repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
khagler/boto | refs/heads/develop | boto/pyami/launch_ami.py | 153 | #!/usr/bin/env python
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import getopt
import sys
import imp
import time
import boto
usage_string = """
SYNOPSIS
launch_ami.py -a ami_id [-b script_bucket] [-s script_name]
[-m module] [-c class_name] [-r]
[-g group] [-k key_name] [-n num_instances]
[-w] [extra_data]
Where:
ami_id - the id of the AMI you wish to launch
module - The name of the Python module containing the class you
want to run when the instance is started. If you use this
option the Python module must already be stored on the
instance in a location that is on the Python path.
script_file - The name of a local Python module that you would like
to have copied to S3 and then run on the instance
when it is started. The specified module must be
import'able (i.e. in your local Python path). It
will then be copied to the specified bucket in S3
(see the -b option). Once the new instance(s)
start up the script will be copied from S3 and then
run locally on the instance.
class_name - The name of the class to be instantiated within the
module or script file specified.
script_bucket - the name of the bucket in which the script will be
stored
group - the name of the security group the instance will run in
key_name - the name of the keypair to use when launching the AMI
num_instances - how many instances of the AMI to launch (default 1)
input_queue_name - Name of SQS to read input messages from
output_queue_name - Name of SQS to write output messages to
extra_data - additional name-value pairs that will be passed as
userdata to the newly launched instance. These should
be of the form "name=value"
The -r option reloads the Python module to S3 without launching
another instance. This can be useful during debugging to allow
you to test a new version of your script without shutting down
your instance and starting up another one.
The -w option tells the script to run synchronously, meaning to
wait until the instance is actually up and running. It then prints
the IP address and internal and external DNS names before exiting.
"""
def usage():
print(usage_string)
sys.exit()
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'a:b:c:g:hi:k:m:n:o:rs:w',
['ami', 'bucket', 'class', 'group', 'help',
'inputqueue', 'keypair', 'module',
'numinstances', 'outputqueue',
'reload', 'script_name', 'wait'])
except:
usage()
params = {'module_name': None,
'script_name': None,
'class_name': None,
'script_bucket': None,
'group': 'default',
'keypair': None,
'ami': None,
'num_instances': 1,
'input_queue_name': None,
'output_queue_name': None}
reload = None
wait = None
for o, a in opts:
if o in ('-a', '--ami'):
params['ami'] = a
if o in ('-b', '--bucket'):
params['script_bucket'] = a
if o in ('-c', '--class'):
params['class_name'] = a
if o in ('-g', '--group'):
params['group'] = a
if o in ('-h', '--help'):
usage()
if o in ('-i', '--inputqueue'):
params['input_queue_name'] = a
if o in ('-k', '--keypair'):
params['keypair'] = a
if o in ('-m', '--module'):
params['module_name'] = a
if o in ('-n', '--num_instances'):
params['num_instances'] = int(a)
if o in ('-o', '--outputqueue'):
params['output_queue_name'] = a
if o in ('-r', '--reload'):
reload = True
if o in ('-s', '--script'):
params['script_name'] = a
if o in ('-w', '--wait'):
wait = True
# check required fields
required = ['ami']
for pname in required:
if not params.get(pname, None):
print('%s is required' % pname)
usage()
if params['script_name']:
# first copy the desired module file to S3 bucket
if reload:
print('Reloading module %s to S3' % params['script_name'])
else:
print('Copying module %s to S3' % params['script_name'])
l = imp.find_module(params['script_name'])
c = boto.connect_s3()
bucket = c.get_bucket(params['script_bucket'])
key = bucket.new_key(params['script_name'] + '.py')
key.set_contents_from_file(l[0])
params['script_md5'] = key.md5
# we have everything we need, now build userdata string
l = []
for k, v in params.items():
if v:
l.append('%s=%s' % (k, v))
c = boto.connect_ec2()
l.append('aws_access_key_id=%s' % c.aws_access_key_id)
l.append('aws_secret_access_key=%s' % c.aws_secret_access_key)
for kv in args:
l.append(kv)
s = '|'.join(l)
if not reload:
rs = c.get_all_images([params['ami']])
img = rs[0]
r = img.run(user_data=s, key_name=params['keypair'],
security_groups=[params['group']],
max_count=params.get('num_instances', 1))
print('AMI: %s - %s (Started)' % (params['ami'], img.location))
print('Reservation %s contains the following instances:' % r.id)
for i in r.instances:
print('\t%s' % i.id)
if wait:
running = False
while not running:
time.sleep(30)
[i.update() for i in r.instances]
status = [i.state for i in r.instances]
print(status)
if status.count('running') == len(r.instances):
running = True
for i in r.instances:
print('Instance: %s' % i.ami_launch_index)
print('Public DNS Name: %s' % i.public_dns_name)
print('Private DNS Name: %s' % i.private_dns_name)
if __name__ == "__main__":
main()
|
buntyke/GPy | refs/heads/master | GPy/util/multioutput.py | 13 | import numpy as np
import warnings
import GPy
def get_slices(input_list):
num_outputs = len(input_list)
_s = [0] + [ _x.shape[0] for _x in input_list ]
_s = np.cumsum(_s)
slices = [slice(a,b) for a,b in zip(_s[:-1],_s[1:])]
return slices
def build_XY(input_list,output_list=None,index=None):
num_outputs = len(input_list)
if output_list is not None:
assert num_outputs == len(output_list)
Y = np.vstack(output_list)
else:
Y = None
if index is not None:
assert len(index) == num_outputs
I = np.hstack( [np.repeat(j,_x.shape[0]) for _x,j in zip(input_list,index)] )
else:
I = np.hstack( [np.repeat(j,_x.shape[0]) for _x,j in zip(input_list,range(num_outputs))] )
X = np.vstack(input_list)
X = np.hstack([X,I[:,None]])
return X,Y,I[:,None]#slices
def build_likelihood(Y_list,noise_index,likelihoods_list=None):
Ny = len(Y_list)
if likelihoods_list is None:
likelihoods_list = [GPy.likelihoods.Gaussian(name="Gaussian_noise_%s" %j) for y,j in zip(Y_list,range(Ny))]
else:
assert len(likelihoods_list) == Ny
#likelihood = GPy.likelihoods.mixed_noise.MixedNoise(likelihoods_list=likelihoods_list, noise_index=noise_index)
likelihood = GPy.likelihoods.mixed_noise.MixedNoise(likelihoods_list=likelihoods_list)
return likelihood
def ICM(input_dim, num_outputs, kernel, W_rank=1,W=None,kappa=None,name='ICM'):
"""
Builds a kernel for an Intrinsic Coregionalization Model
:input_dim: Input dimensionality (does not include dimension of indices)
:num_outputs: Number of outputs
:param kernel: kernel that will be multiplied by the coregionalize kernel (matrix B).
:type kernel: a GPy kernel
:param W_rank: number tuples of the corregionalization parameters 'W'
:type W_rank: integer
"""
if kernel.input_dim != input_dim:
kernel.input_dim = input_dim
warnings.warn("kernel's input dimension overwritten to fit input_dim parameter.")
K = kernel.prod(GPy.kern.Coregionalize(1, num_outputs, active_dims=[input_dim], rank=W_rank,W=W,kappa=kappa,name='B'),name=name)
return K
def LCM(input_dim, num_outputs, kernels_list, W_rank=1,name='ICM'):
"""
Builds a kernel for an Linear Coregionalization Model
:input_dim: Input dimensionality (does not include dimension of indices)
:num_outputs: Number of outputs
:param kernel: kernel that will be multiplied by the coregionalize kernel (matrix B).
:type kernel: a GPy kernel
:param W_rank: number tuples of the corregionalization parameters 'W'
:type W_rank: integer
"""
Nk = len(kernels_list)
K = ICM(input_dim,num_outputs,kernels_list[0],W_rank,name='%s%s' %(name,0))
j = 1
for kernel in kernels_list[1:]:
K += ICM(input_dim,num_outputs,kernel,W_rank,name='%s%s' %(name,j))
j += 1
return K
def Private(input_dim, num_outputs, kernel, output, kappa=None,name='X'):
"""
Builds a kernel for an Intrinsic Coregionalization Model
:input_dim: Input dimensionality
:num_outputs: Number of outputs
:param kernel: kernel that will be multiplied by the coregionalize kernel (matrix B).
:type kernel: a GPy kernel
:param W_rank: number tuples of the corregionalization parameters 'W'
:type W_rank: integer
"""
K = ICM(input_dim,num_outputs,kernel,W_rank=1,kappa=kappa,name=name)
K.B.W.fix(0)
_range = range(num_outputs)
_range.pop(output)
for j in _range:
K.B.kappa[j] = 0
K.B.kappa[j].fix()
return K
|
nikhilprathapani/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/idlelib/FileList.py | 55 | import os
from tkinter import *
import tkinter.messagebox as tkMessageBox
class FileList:
# N.B. this import overridden in PyShellFileList.
from idlelib.EditorWindow import EditorWindow
def __init__(self, root):
self.root = root
self.dict = {}
self.inversedict = {}
self.vars = {} # For EditorWindow.getrawvar (shared Tcl variables)
def open(self, filename, action=None):
assert filename
filename = self.canonize(filename)
if os.path.isdir(filename):
# This can happen when bad filename is passed on command line:
tkMessageBox.showerror(
"File Error",
"%r is a directory." % (filename,),
master=self.root)
return None
key = os.path.normcase(filename)
if key in self.dict:
edit = self.dict[key]
edit.top.wakeup()
return edit
if action:
# Don't create window, perform 'action', e.g. open in same window
return action(filename)
else:
edit = self.EditorWindow(self, filename, key)
if edit.good_load:
return edit
else:
edit._close()
return None
def gotofileline(self, filename, lineno=None):
edit = self.open(filename)
if edit is not None and lineno is not None:
edit.gotoline(lineno)
def new(self, filename=None):
return self.EditorWindow(self, filename)
def close_all_callback(self, *args, **kwds):
for edit in list(self.inversedict):
reply = edit.close()
if reply == "cancel":
break
return "break"
def unregister_maybe_terminate(self, edit):
try:
key = self.inversedict[edit]
except KeyError:
print("Don't know this EditorWindow object. (close)")
return
if key:
del self.dict[key]
del self.inversedict[edit]
if not self.inversedict:
self.root.quit()
def filename_changed_edit(self, edit):
edit.saved_change_hook()
try:
key = self.inversedict[edit]
except KeyError:
print("Don't know this EditorWindow object. (rename)")
return
filename = edit.io.filename
if not filename:
if key:
del self.dict[key]
self.inversedict[edit] = None
return
filename = self.canonize(filename)
newkey = os.path.normcase(filename)
if newkey == key:
return
if newkey in self.dict:
conflict = self.dict[newkey]
self.inversedict[conflict] = None
tkMessageBox.showerror(
"Name Conflict",
"You now have multiple edit windows open for %r" % (filename,),
master=self.root)
self.dict[newkey] = edit
self.inversedict[edit] = newkey
if key:
try:
del self.dict[key]
except KeyError:
pass
def canonize(self, filename):
if not os.path.isabs(filename):
try:
pwd = os.getcwd()
except os.error:
pass
else:
filename = os.path.join(pwd, filename)
return os.path.normpath(filename)
def _test():
from idlelib.EditorWindow import fixwordbreaks
import sys
root = Tk()
fixwordbreaks(root)
root.withdraw()
flist = FileList(root)
if sys.argv[1:]:
for filename in sys.argv[1:]:
flist.open(filename)
else:
flist.new()
if flist.inversedict:
root.mainloop()
if __name__ == '__main__':
_test()
|
EHeneman/google-python-exercises | refs/heads/master | logpuzzle/logpuzzle.py | 1 | #!/usr/bin/python
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
import os
import re
import sys
import urllib
"""Logpuzzle exercise
Given an apache logfile, find the puzzle urls and download the images.
Here's what a puzzle url looks like:
10.254.254.28 - - [06/Aug/2007:00:13:48 -0700] "GET /~foo/puzzle-bar-aaab.jpg HTTP/1.0" 302 528 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6"
"""
def read_urls(filename):
"""Returns a list of the puzzle urls from the given log file,
extracting the hostname from the filename itself.
Screens out duplicate urls and returns the urls sorted into
increasing order."""
# +++your code here+++
with open(filename) as f:
lines = f.readlines()
protocol = r'http://'
puzzle, hostname = filename.split('_')
full_urls_set = set()
for line in lines:
match = re.search(r'\S+(puzzle*).\S+', line)
if match:
url = match.group()
full_url = '{0}{1}{2}'.format(protocol, hostname, url)
if puzzle == 'place':
start = full_url.find('puzzle') + len('puzzle') + 1
url_path, url_extension = os.path.splitext(full_url)
url_name = url_path[start:]
trash, prefix, suffix = url_name.split('-')
full_urls_set.add((full_url, suffix))
else:
full_urls_set.add(full_url)
if puzzle == 'place':
urls = [url for (url, suffix) in sorted(full_urls_set, key=lambda t: t[1])]
else:
urls = sorted(full_urls_set)
return urls
def download_images(img_urls, dest_dir):
"""Given the urls already in the correct order, downloads
each image into the given directory.
Gives the images local filenames img0, img1, and so on.
Creates an index.html in the directory
with an img tag to show each local image file.
Creates the directory if necessary.
"""
# +++your code here+++
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
index_html = "<html><body>[IMG]</body></html>"
index_img = list()
for id, img_url in enumerate(img_urls):
img_id = "img{0}.jpg".format(id)
dest_dir_img = os.path.join(dest_dir, img_id)
img_html = "<img src='{0}'>".format(dest_dir_img)
index_img.append(img_html)
print('Retrieving... {0}: {1}'.format(img_id, img_url))
urllib.urlretrieve(img_url, dest_dir_img)
index = index_html.replace('[IMG]', ''.join(index_img))
index_filename = os.path.join(dest_dir, 'index.html')
with open(index_filename, 'w') as f:
f.write(index)
def main():
args = sys.argv[1:]
if not args:
print('usage: [--todir dir] logfile ')
sys.exit(1)
todir = ''
if args[0] == '--todir':
todir = args[1]
del args[0:2]
img_urls = read_urls(args[0])
if todir:
download_images(img_urls, todir)
else:
print('\n'.join(img_urls))
if __name__ == '__main__':
main()
|
liangjg/openmc | refs/heads/develop | tests/unit_tests/test_surface_composite.py | 8 | from random import uniform
import numpy as np
import openmc
import pytest
def test_rectangular_parallelepiped():
xmin = uniform(-5., 5.)
xmax = xmin + uniform(0., 5.)
ymin = uniform(-5., 5.)
ymax = ymin + uniform(0., 5.)
zmin = uniform(-5., 5.)
zmax = zmin + uniform(0., 5.)
s = openmc.model.RectangularParallelepiped(xmin, xmax, ymin, ymax, zmin, zmax)
assert isinstance(s.xmin, openmc.XPlane)
assert isinstance(s.xmax, openmc.XPlane)
assert isinstance(s.ymin, openmc.YPlane)
assert isinstance(s.ymax, openmc.YPlane)
assert isinstance(s.zmin, openmc.ZPlane)
assert isinstance(s.zmax, openmc.ZPlane)
# Make sure boundary condition propagates
s.boundary_type = 'reflective'
assert s.boundary_type == 'reflective'
for axis in 'xyz':
assert getattr(s, '{}min'.format(axis)).boundary_type == 'reflective'
assert getattr(s, '{}max'.format(axis)).boundary_type == 'reflective'
# Check bounding box
ll, ur = (+s).bounding_box
assert np.all(np.isinf(ll))
assert np.all(np.isinf(ur))
ll, ur = (-s).bounding_box
assert ur == pytest.approx((xmax, ymax, zmax))
assert ll == pytest.approx((xmin, ymin, zmin))
# __contains__ on associated half-spaces
assert (xmin - 0.1, 0., 0.) in +s
assert (xmin - 0.1, 0., 0.) not in -s
dx, dy, dz = xmax - xmin, ymax - ymin, zmax - zmin
assert (xmin + dx/2, ymin + dy/2, zmin + dz/2) in -s
assert (xmin + dx/2, ymin + dy/2, zmin + dz/2) not in +s
# translate method
t = uniform(-5.0, 5.0)
s_t = s.translate((t, t, t))
ll_t, ur_t = (-s_t).bounding_box
assert ur_t == pytest.approx(ur + t)
assert ll_t == pytest.approx(ll + t)
# Make sure repr works
repr(s)
@pytest.mark.parametrize(
"axis, indices", [
("X", [0, 1, 2]),
("Y", [1, 2, 0]),
("Z", [2, 0, 1]),
]
)
def test_right_circular_cylinder(axis, indices):
x, y, z = 1.0, -2.5, 3.0
h, r = 5.0, 3.0
s = openmc.model.RightCircularCylinder((x, y, z), h, r, axis=axis.lower())
assert isinstance(s.cyl, getattr(openmc, axis + "Cylinder"))
assert isinstance(s.top, getattr(openmc, axis + "Plane"))
assert isinstance(s.bottom, getattr(openmc, axis + "Plane"))
# Make sure boundary condition propagates
s.boundary_type = 'reflective'
assert s.boundary_type == 'reflective'
assert s.cyl.boundary_type == 'reflective'
assert s.bottom.boundary_type == 'reflective'
assert s.top.boundary_type == 'reflective'
# Check bounding box
ll, ur = (+s).bounding_box
assert np.all(np.isinf(ll))
assert np.all(np.isinf(ur))
ll, ur = (-s).bounding_box
assert ll == pytest.approx((x, y, z) + np.roll([0, -r, -r], indices[0]))
assert ur == pytest.approx((x, y, z) + np.roll([h, r, r], indices[0]))
# __contains__ on associated half-spaces
point_pos = (x, y, z) + np.roll([h/2, r+1, r+1], indices[0])
assert point_pos in +s
assert point_pos not in -s
point_neg = (x, y, z) + np.roll([h/2, 0, 0], indices[0])
assert point_neg in -s
assert point_neg not in +s
# translate method
t = uniform(-5.0, 5.0)
s_t = s.translate((t, t, t))
ll_t, ur_t = (-s_t).bounding_box
assert ur_t == pytest.approx(ur + t)
assert ll_t == pytest.approx(ll + t)
# Make sure repr works
repr(s)
@pytest.mark.parametrize(
"axis, point_pos, point_neg, ll_true", [
("X", (8., 0., 0.), (12., 0., 0.), (10., -np.inf, -np.inf)),
("Y", (10., -2., 0.), (10., 2., 0.), (-np.inf, 0., -np.inf)),
("Z", (10., 0., -3.), (10., 0., 3.), (-np.inf, -np.inf, 0.))
]
)
def test_cone_one_sided(axis, point_pos, point_neg, ll_true):
cone_oneside = getattr(openmc.model, axis + "ConeOneSided")
cone_twoside = getattr(openmc, axis + "Cone")
plane = getattr(openmc, axis + "Plane")
x, y, z = 10., 0., 0.
r2 = 4.
s = cone_oneside(x, y, z, r2, True)
assert isinstance(s.cone, cone_twoside)
assert isinstance(s.plane, plane)
assert s.up
# Make sure boundary condition propagates
s.boundary_type = 'reflective'
assert s.boundary_type == 'reflective'
assert s.cone.boundary_type == 'reflective'
assert s.plane.boundary_type == 'transmission'
# Check bounding box
ll, ur = (+s).bounding_box
assert np.all(np.isinf(ll))
assert np.all(np.isinf(ur))
ll, ur = (-s).bounding_box
assert np.all(np.isinf(ur))
assert ll == pytest.approx(ll_true)
# __contains__ on associated half-spaces
assert point_pos in +s
assert point_pos not in -s
assert point_neg in -s
assert point_neg not in +s
# translate method
t = uniform(-5.0, 5.0)
s_t = s.translate((t, t, t))
ll_t, ur_t = (-s_t).bounding_box
assert ur_t == pytest.approx(ur + t)
assert ll_t == pytest.approx(ll + t)
# Make sure repr works
repr(s)
|
toscanini/maestro | refs/heads/master | maestro/template.py | 2 | import exceptions, utils, container, py_backend
import StringIO, copy, logging, sys
from requests.exceptions import HTTPError
class Template:
def __init__(self, name, config, service, version):
self.name = name
self.config = config
self.service = service
self.version = version
self.log = logging.getLogger('maestro')
self.backend = py_backend.PyBackend()
def build(self):
# If there is a docker file or url hand off to Docker builder
if 'buildspec' in self.config:
if self.config['buildspec']:
if 'dockerfile' in self.config['buildspec']:
self._build(dockerfile=self.config['buildspec']['dockerfile'])
elif 'url' in self.config['buildspec']:
self._build(url=self.config['buildspec']['url'])
else:
raise exceptions.TemplateError("Template: " + self.name + " Buildspec specified but no dockerfile or url found.")
else:
# verify the base image and pull it if necessary
try:
base = self.config['base_image']
self.backend.inspect_image(base)
except HTTPError:
# Attempt to pull the image.
self.log.info('Attempting to pull base: %s', base)
result = self.backend.pull_image(base)
if 'error' in result:
self.log.error('No base image could be pulled under the name: %s', base)
raise exceptions.TemplateError("No base image could be pulled under the name: " + base)
except KeyError:
raise exceptions.TemplateError("Template: " + self.name + "No base image specified.")
# There doesn't seem to be a way to currently remove tags so we'll generate a new image.
# More consistent for all cases this way too but it does feel kinda wrong.
dockerfile = """
FROM %s
MAINTAINER %s
""" % (base, self._mid())
self._build(dockerfile=dockerfile)
return True
# Launches an instance of the template in a new container
def instantiate(self, name, command=None):
config = copy.deepcopy(self.config['config'])
# Setup bind mounts to the host system
bind_mounts = {}
if 'mounts' in self.config:
bind_mounts = self.config['mounts']
for src, dest in self.config['mounts'].items():
if 'volumes' not in config:
config['volumes'] = {}
config['volumes'][dest] = {}
if command:
if isinstance(command, basestring):
config['command'] = command
else:
config['command'] = " ".join(command)
return container.Container(name, {'template': self.name, 'image_id': self.config['image_id']}, config, mounts=bind_mounts)
def destroy(self):
# If there is an image_id then we need to destroy the image.
if 'image_id' in self.config:
self.backend.remove_image(self.config['image_id'])
def full_name(self):
return self.service + "." + self.name
def _base_id(self, base):
tag = 'latest'
if ':' in base:
base, tag = base.split(':')
result = self.backend.images(name=base)
for image in result:
if image['Tag'] == tag:
return image['Id']
return None
# Generate the meastro specific ID for this template.
def _mid(self):
return self.service + "." + self.name + ":" + self.version
def _build(self, dockerfile=None, url=None):
self.log.info('Building container: %s', self._mid())
if (dockerfile):
result = self.backend.build_image(fileobj=StringIO.StringIO(dockerfile))
elif (url):
result = self.backend.build_image(path=url)
else:
raise exceptions.TemplateError("Can't build if no buildspec is provided: " + self.name)
if result[0] == None:
# TODO: figure out what to do with the result of this execution
print result
raise exceptions.TemplateError("Build failed for template: " + self.name)
self.config['image_id'] = result[0]
self._tag(self.config['image_id'])
self.log.info('Container registered with tag: %s', self._mid())
def _tag(self, image_id):
# Tag the container with the name and process id
self.backend.tag_image(image_id, self.service + "." + self.name, tag=self.version)
# TODO: make sure this is always appropriate to do as there may be cases where tagging a build as latest isn't desired.
self.backend.tag_image(image_id, self.service + "." + self.name, tag='latest')
|
jwren/intellij-community | refs/heads/master | python/testData/inspections/PyUnresolvedReferencesInspection/UnusedImportBeforeStarImport/m2.py | 80 | import m1
|
ZachGoldberg/django-oscar-paypal | refs/heads/master | paypal/payflow/dashboard/app.py | 10 | from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
class PayFlowDashboardApplication(Application):
name = None
list_view = views.TransactionListView
detail_view = views.TransactionDetailView
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='paypal-payflow-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='paypal-payflow-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = PayFlowDashboardApplication()
|
mdaniel/intellij-community | refs/heads/master | python/testData/copyPaste/TopLevelIfStatementWithMultilineCondition.after.py | 35 | if (True or (True or
False)):
x = 1
y = 2 |
pdellaert/ansible | refs/heads/devel | test/units/modules/network/netvisor/test_pn_port_config.py | 23 | # Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.netvisor import pn_port_config
from units.modules.utils import set_module_args
from .nvos_module import TestNvosModule
class TestPortConfigModule(TestNvosModule):
module = pn_port_config
def setUp(self):
self.mock_run_nvos_commands = patch('ansible.modules.network.netvisor.pn_port_config.run_cli')
self.run_nvos_commands = self.mock_run_nvos_commands.start()
def tearDown(self):
self.mock_run_nvos_commands.stop()
def run_cli_patch(self, module, cli, state_map):
if state_map['update'] == 'port-config-modify':
results = dict(
changed=True,
cli_cmd=cli
)
module.exit_json(**results)
def load_fixtures(self, commands=None, state=None, transport='cli'):
self.run_nvos_commands.side_effect = self.run_cli_patch
def test_pn_port_config_modify_t1(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_port': '1,2',
'pn_speed': '10g', 'pn_jumbo': True, 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 port-config-modify speed 10g port 1,2 jumbo '
self.assertEqual(result['cli_cmd'], expected_cmd)
def test_pn_port_config_modify_t2(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_port': 'all',
'pn_host_enable': True, 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 port-config-modify port all host-enable '
self.assertEqual(result['cli_cmd'], expected_cmd)
def test_pn_port_config_modify_t3(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_port': '5',
'pn_crc_check_enable': True, 'pn_vxlan_termination': False, 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 port-config-modify port 5 crc-check-enable no-vxlan-termination '
self.assertEqual(result['cli_cmd'], expected_cmd)
def test_pn_port_config_modify_t4(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_port': '10,11,12',
'pn_pause': False, 'pn_enable': True, 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 port-config-modify port 10,11,12 no-pause enable '
self.assertEqual(result['cli_cmd'], expected_cmd)
|
openhatch/oh-mainline | refs/heads/master | vendor/packages/python-openid/openid/store/__init__.py | 173 | """
This package contains the modules related to this library's use of
persistent storage.
@sort: interface, filestore, sqlstore, memstore
"""
__all__ = ['interface', 'filestore', 'sqlstore', 'memstore', 'nonce']
|
DARKPOP/external_chromium_org | refs/heads/dark-5.1 | tools/telemetry/telemetry/core/system_info.py | 58 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import gpu_info
class SystemInfo(object):
"""Provides low-level system information."""
def __init__(self, model_name, gpu_dict):
if (model_name == None) or (gpu_dict == None):
raise Exception("Missing model_name or gpu_dict argument")
self._model_name = model_name
self._gpu = gpu_info.GPUInfo.FromDict(gpu_dict)
@classmethod
def FromDict(cls, attrs):
"""Constructs a SystemInfo from a dictionary of attributes.
Attributes currently required to be present in the dictionary:
model_name (string): a platform-dependent string
describing the model of machine, or the empty string if not
supported.
gpu (object containing GPUInfo's required attributes)
"""
return cls(attrs["model_name"], attrs["gpu"])
@property
def model_name(self):
"""A string describing the machine model.
This is a highly platform-dependent value and not currently
specified for any machine type aside from Macs. On Mac OS, this
is the model identifier, reformatted slightly; for example,
'MacBookPro 10.1'."""
return self._model_name
@property
def gpu(self):
"""A GPUInfo object describing the graphics processor(s) on the system."""
return self._gpu
|
afandria/sky_engine | refs/heads/master | sky/tools/android_library_cacher.py | 13 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import re
import skypy.paths
import subprocess
import sys
SRC_ROOT = skypy.paths.Paths('ignored').src_root
ADB_PATH = os.path.join(SRC_ROOT,
'third_party/android_tools/sdk/platform-tools/adb')
# TODO(eseidel): This should be shared with adb_gdb
def main():
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(
description='Pull all libraries used by a pid on android into a cache.')
parser.add_argument('cache_root', type=str)
parser.add_argument('pid', type=int)
args = parser.parse_args()
if not os.path.exists(args.cache_root):
os.makedirs(args.cache_root)
subprocess.check_call([ADB_PATH, 'root'])
# TODO(eseidel): Check the build.props, or find some way to avoid
# re-pulling every library every time. adb_gdb has code to do this
# but doesn't seem to notice when the set of needed libraries changed.
library_regexp = re.compile(r'(?P<library_path>/system/.*\.so)')
cat_maps_cmd = [ADB_PATH, 'shell', 'cat', '/proc/%s/maps' % args.pid]
maps_lines = subprocess.check_output(cat_maps_cmd).strip().split('\n')
# adb shell doesn't return the return code from the shell?
if not maps_lines or 'No such file or directory' in maps_lines[0]:
print 'Failed to get maps for pid %s on device.' % args.pid
sys.exit(1)
def library_from_line(line):
result = library_regexp.search(line)
if not result:
return None
return result.group('library_path')
dev_null = open(os.devnull, 'w') # Leaking.
to_pull = set(filter(None, map(library_from_line, maps_lines)))
to_pull.add('/system/bin/linker') # Unclear why but adb_gdb pulls this too.
for library_path in sorted(to_pull):
# Not using os.path.join since library_path is absolute.
dest_file = os.path.normpath("%s/%s" % (args.cache_root, library_path))
dest_dir = os.path.dirname(dest_file)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
if os.path.exists(dest_file):
continue
print '%s -> %s' % (library_path, dest_file)
pull_cmd = [ADB_PATH, 'pull', library_path, dest_file]
subprocess.check_call(pull_cmd, stderr=dev_null)
if __name__ == '__main__':
main()
|
quietcoolwu/learn-python3-master | refs/heads/master | samples/multitask/task_worker.py | 19 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time, sys, queue
from multiprocessing.managers import BaseManager
# 创建类似的QueueManager:
class QueueManager(BaseManager):
pass
# 由于这个QueueManager只从网络上获取Queue,所以注册时只提供名字:
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
# 连接到服务器,也就是运行task_master.py的机器:
server_addr = '127.0.0.1'
print('Connect to server %s...' % server_addr)
# 端口和验证码注意保持与task_master.py设置的完全一致:
m = QueueManager(address=(server_addr, 5000), authkey=b'abc')
# 从网络连接:
m.connect()
# 获取Queue的对象:
task = m.get_task_queue()
result = m.get_result_queue()
# 从task队列取任务,并把结果写入result队列:
for i in range(10):
try:
n = task.get(timeout=1)
print('run task %d * %d...' % (n, n))
r = '%d * %d = %d' % (n, n, n*n)
time.sleep(1)
result.put(r)
except Queue.Empty:
print('task queue is empty.')
# 处理结束:
print('worker exit.')
|
jeetsukumaran/archipelago | refs/heads/master | bin/archipelago-summarize.py | 1 | #! /usr/bin/env python
import sys
import os
import argparse
import json
import collections
import csv
import dendropy
import re
from archipelago import summarize
from archipelago import utility
from archipelago.utility import USER_SPECIFIED_TRAIT_TYPE_INDEX_START_VALUE
def parse_trait_states(labels):
if not labels:
return collections.OrderedDict()
trait_states = []
for label in labels:
match = re.match(r"\s*(.*?)\s*:\s*(.*)\s*", label)
if not match:
raise ValueError("Cannot parse fieldname and label (format required: fieldname:value): {}".format(label))
fieldname, value = match.groups(0)
# The trait states need to be an integer if
# ArchipelagoModel.decode_label coerces the labels to integers.
# The reason we do NOT want it parsed to an integer value
# is to allow null traits 'NA', 'null', etc.
trait_states.append( (int(fieldname), value,) )
return trait_states
def parse_fieldname_and_value(labels):
if not labels:
return collections.OrderedDict()
fieldname_value_map = collections.OrderedDict()
for label in labels:
match = re.match(r"\s*(.*?)\s*:\s*(.*)\s*", label)
if not match:
raise ValueError("Cannot parse fieldname and label (format required: fieldname:value): {}".format(label))
fieldname, value = match.groups(0)
fieldname_value_map[fieldname] = value
return fieldname_value_map
def main():
parser = argparse.ArgumentParser()
source_options = parser.add_argument_group("Source Options")
source_options.add_argument(
"source_paths",
nargs="+",
help="Path(s) to simulated tree files.")
source_options.add_argument("-f", "--format",
dest="schema",
type=str,
default="newick",
choices=["nexus", "newick"],
help="Input data format (default: '%(default)s').")
source_options.add_argument("--no-preserve-underscores",
action="store_true",
default=False,
help="Convert unquoted underscores to spaces, as dictated by the Newick/NEXUS standards.")
summarization_options = parser.add_argument_group("Summarization Options")
summarization_options.add_argument("-x", "--exclude-trait",
action="append",
help="Index of trait to exclude, with first trait indexed with value {}; multiple traits can be specified by repeating the option (e.g., '--exclude-trait {} --ingore-trait {}').".format(
USER_SPECIFIED_TRAIT_TYPE_INDEX_START_VALUE,
USER_SPECIFIED_TRAIT_TYPE_INDEX_START_VALUE,
USER_SPECIFIED_TRAIT_TYPE_INDEX_START_VALUE+1,
))
summarization_options.add_argument("-X", "--exclude-trait-state",
action="append",
help="States of traits to exclude, (in format <TRAIT-INDEX:STATE-INDEX>. Not that traits are {}-based indexed, and states are 0-based indexed. E.g. '--exclude-trait-state 1:0 --exclude-trait-state 1:3').".format(
USER_SPECIFIED_TRAIT_TYPE_INDEX_START_VALUE,
))
summarization_options.add_argument("--no-drop-trees-not-spanning-all-areas",
action="store_true",
default=False,
help="Do NOT skip trees that do not span all areas.")
summarization_options.add_argument("--drop-trees-with-single-lineage-areas",
action="store_true",
default=False,
help="Skip trees that have areas with only one lineage.")
summarization_options.add_argument("--drop-trees-with-single-lineage-trait-states",
action="store_true",
default=False,
help="Skip trees that have trait states with only one lineage.")
output_options = parser.add_argument_group("Source Options")
output_options.add_argument("-l", "--labels",
action="append",
help="Labels to append to output (in format <FIELD-NAME>:value;)")
output_options.add_argument(
"-o", "--output-filepath",
default=None,
help="Path to output file.")
output_options.add_argument( "--no-header-row",
action="store_true",
default=False,
help="Do not write a header row.")
output_options.add_argument( "--append",
action="store_true",
default=False,
help="Append to output file if it already exists instead of overwriting.")
run_options = parser.add_argument_group("Run Options")
run_options.add_argument("-q", "--quiet",
action="store_true",
default=False,
help="Suppress progress messages.")
args = parser.parse_args()
args.group_processed_trees_by_model = False
if args.quiet:
run_logger=None,
else:
run_logger = utility.RunLogger(
name="archipelago",
stderr_logging_level="info",
log_to_file=False,
)
# log_frequency_percentage = 1
# def _progress_update_fn(current_idx, total):
if args.exclude_trait:
trait_indexes_to_exclude = [int(i) - USER_SPECIFIED_TRAIT_TYPE_INDEX_START_VALUE for i in args.exclude_trait]
assert -1 not in trait_indexes_to_exclude
else:
trait_indexes_to_exclude = None
trait_states_to_exclude = parse_trait_states(args.exclude_trait_state)
tree_summarizer = summarize.TreeSummarizer(
drop_trees_not_spanning_all_areas=not args.no_drop_trees_not_spanning_all_areas,
trait_indexes_to_exclude=trait_indexes_to_exclude,
trait_states_to_exclude=trait_states_to_exclude,
drop_trees_with_single_lineage_areas=args.drop_trees_with_single_lineage_areas,
drop_trees_with_single_lineage_trait_states=args.drop_trees_with_single_lineage_trait_states,
run_logger=run_logger,
)
summary_results = []
output_root_dir = "."
output_dir = output_root_dir
# if not os.path.exists(output_dir):
# os.makedirs(output_dir)
extra_fields = parse_fieldname_and_value(args.labels)
stats_fields = set()
try:
for source_idx, tree_filepath in enumerate(args.source_paths):
if not args.quiet:
sys.stderr.write("Processing job {} of {}: {}\n".format(source_idx+1, len(args.source_paths), tree_filepath))
trees = dendropy.TreeList.get_from_path(
tree_filepath,
schema=args.schema,
preserve_underscores=not args.no_preserve_underscores,
suppress_internal_node_taxa=True,
suppress_external_node_taxa=True,
)
processed_trees, sub_stats_fields, sub_results = tree_summarizer.summarize_trees(
trees,
# progress_update_fn=_progress_update_fn,
# lineage_data_source=lineage_data_source,
# traits_filepath=traits_filepath,
# areas_filepath=areas_filepath,
)
stats_fields.update(sub_stats_fields)
if extra_fields:
for r in sub_results:
r.update(extra_fields)
summary_results.extend(sub_results)
except KeyboardInterrupt:
pass
stats_fields = sorted(list(stats_fields))
all_fields = list(extra_fields.keys()) + stats_fields
out = utility.open_output_file_for_csv_writer(
filepath=args.output_filepath,
append=args.append)
with out:
writer = csv.DictWriter(
out,
fieldnames=all_fields,
restval="NA",
delimiter=",",
lineterminator=os.linesep,
)
if not args.no_header_row:
writer.writeheader()
writer.writerows(summary_results)
if __name__ == "__main__":
main()
|
davidszotten/pytest-django | refs/heads/master | pytest_django/client.py | 10 | from django.core.handlers.wsgi import WSGIRequest
from django.test.client import RequestFactory as VanillaRequestFactory
from django.test.client import FakePayload
class PytestDjangoRequestFactory(VanillaRequestFactory):
"""
Based on Django 1.3's RequestFactory, but fixes an issue that causes an
error to be thrown when creating a WSGIRequest instance with a plain call
to RequestFactory.rf().
This issue is fixed in Django 1.4, so this class will be unnecessary when
support for Django 1.3 is dropped.
https://code.djangoproject.com/ticket/15898
Incorporates code from https://code.djangoproject.com/changeset/16933.
"""
def request(self, **request):
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return WSGIRequest(environ)
try:
VanillaRequestFactory().request()
RequestFactory = VanillaRequestFactory
except KeyError:
RequestFactory = PytestDjangoRequestFactory
|
cuihantao/cvxopt | refs/heads/master | examples/book/chap6/cvxfit.py | 4 | # Figure 6.24, page 339.
# Least-squares fit of a convex function.
from cvxopt import solvers, matrix, spmatrix, mul
from pickle import load
#solvers.options['show_progress'] = 0
data = load(open('cvxfit.bin','rb'))
u, y = data['u'], data['y']
m = len(u)
# minimize (1/2) * || yhat - y ||_2^2
# subject to yhat[j] >= yhat[i] + g[i]' * (u[j] - u[i]), j, i = 0,...,m-1
#
# Variables yhat (m), g (m).
nvars = 2*m
P = spmatrix(1.0, range(m), range(m), (nvars, nvars))
q = matrix(0.0, (nvars,1))
q[:m] = -y
# m blocks (i = 0,...,m-1) of linear inequalities
#
# yhat[i] + g[i]' * (u[j] - u[i]) <= yhat[j], j = 0,...,m-1.
G = spmatrix([],[],[], (m**2, nvars))
I = spmatrix(1.0, range(m), range(m))
for i in range(m):
# coefficients of yhat[i]
G[list(range(i*m, (i+1)*m)), i] = 1.0
# coefficients of g[i]
G[list(range(i*m, (i+1)*m)), m+i] = u - u[i]
# coefficients of yhat[j]
G[list(range(i*m, (i+1)*m)), list(range(m))] -= I
h = matrix(0.0, (m**2,1))
sol = solvers.qp(P, q, G, h)
yhat = sol['x'][:m]
g = sol['x'][m:]
nopts = 1000
ts = [ 2.2/nopts * t for t in range(1000) ]
f = [ max(yhat + mul(g, t-u)) for t in ts ]
try: import pylab
except ImportError: pass
else:
pylab.figure(1, facecolor='w')
pylab.plot(u, y, 'wo', markeredgecolor='b')
pylab.plot(ts, f, '-g')
pylab.axis([-0.1, 2.3, -1.1, 7.2])
pylab.axis('off')
pylab.title('Least-squares fit of convex function (fig. 6.24)')
pylab.show()
|
HiroIshikawa/21playground | refs/heads/master | microblog/flask/lib/python3.5/site-packages/coverage/bytecode.py | 45 | # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""Bytecode manipulation for coverage.py"""
import opcode
import types
from coverage.backward import byte_to_int
class ByteCode(object):
"""A single bytecode."""
def __init__(self):
# The offset of this bytecode in the code object.
self.offset = -1
# The opcode, defined in the `opcode` module.
self.op = -1
# The argument, a small integer, whose meaning depends on the opcode.
self.arg = -1
# The offset in the code object of the next bytecode.
self.next_offset = -1
# The offset to jump to.
self.jump_to = -1
class ByteCodes(object):
"""Iterator over byte codes in `code`.
This handles the logic of EXTENDED_ARG byte codes internally. Those byte
codes are not returned by this iterator.
Returns `ByteCode` objects.
"""
def __init__(self, code):
self.code = code
def __getitem__(self, i):
return byte_to_int(self.code[i])
def __iter__(self):
offset = 0
ext_arg = 0
while offset < len(self.code):
bc = ByteCode()
bc.op = self[offset]
bc.offset = offset
next_offset = offset+1
if bc.op >= opcode.HAVE_ARGUMENT:
bc.arg = ext_arg + self[offset+1] + 256*self[offset+2]
next_offset += 2
label = -1
if bc.op in opcode.hasjrel:
label = next_offset + bc.arg
elif bc.op in opcode.hasjabs:
label = bc.arg
bc.jump_to = label
bc.next_offset = offset = next_offset
if bc.op == opcode.EXTENDED_ARG:
ext_arg = bc.arg * 256*256
else:
ext_arg = 0
yield bc
class CodeObjects(object):
"""Iterate over all the code objects in `code`."""
def __init__(self, code):
self.stack = [code]
def __iter__(self):
while self.stack:
# We're going to return the code object on the stack, but first
# push its children for later returning.
code = self.stack.pop()
for c in code.co_consts:
if isinstance(c, types.CodeType):
self.stack.append(c)
yield code
|
rjawor/concordia-server | refs/heads/master | mgiza-aligner/mgiza/experimental/dual-model/MGIZA/scripts/plain2snt-hasvcb.py | 8 | #!/usr/bin/env python
from sys import *
def loadvcb(fname,out):
dict={};
df = open(fname,"r");
for line in df:
out.write(line);
ws = line.strip().split();
id = int(ws[0]);
wd = ws[1];
dict[wd]=id;
return dict;
if len(argv)<9:
stderr.write("Error, the input should be \n");
stderr.write("%s evcb fvcb etxt ftxt esnt(out) fsnt(out) evcbx(out) fvcbx(out)\n" % argv[0]);
stderr.write("You should concatenate the evcbx and fvcbx to existing vcb files\n");
exit();
ein = open(argv[3],"r");
fin = open(argv[4],"r");
eout = open(argv[5],"w");
fout = open(argv[6],"w");
evcbx = open(argv[7],"w");
fvcbx = open(argv[8],"w");
evcb = loadvcb(argv[1],evcbx);
fvcb = loadvcb(argv[2],fvcbx);
i=0
while True:
i+=1;
eline=ein.readline();
fline=fin.readline();
if len(eline)==0 or len(fline)==0:
break;
ewords = eline.strip().split();
fwords = fline.strip().split();
el = [];
fl = [];
j=0;
for w in ewords:
j+=1
if evcb.has_key(w):
el.append(evcb[w]);
else:
if evcb.has_key(w.lower()):
el.append(evcb[w.lower()]);
else:
##stdout.write("#E %d %d %s\n" % (i,j,w))
#el.append(1);
nid = len(evcb)+1;
evcb[w.lower()] = nid;
evcbx.write("%d %s 1\n" % (nid, w));
el.append(nid);
j=0;
for w in fwords:
j+=1
if fvcb.has_key(w):
fl.append(fvcb[w]);
else:
if fvcb.has_key(w.lower()):
fl.append(fvcb[w.lower()]);
else:
#stdout.write("#F %d %d %s\n" % (i,j,w))
nid = len(fvcb)+1;
fvcb[w.lower()] = nid;
fvcbx.write("%d %s 1\n" % (nid, w));
fl.append(nid);
#fl.append(1);
eout.write("1\n");
fout.write("1\n");
for I in el:
eout.write("%d " % I);
eout.write("\n");
for I in fl:
eout.write("%d " % I);
fout.write("%d " % I);
eout.write("\n");
fout.write("\n");
for I in el:
fout.write("%d " % I);
fout.write("\n");
fout.close();
eout.close();
fvcbx.close();
evcbx.close();
|
Peddle/hue | refs/heads/master | desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py | 120 | # -*- coding: utf-8 -*-
#
# SelfTest/Random/Fortuna/test_FortunaGenerator.py: Self-test for the FortunaGenerator module
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-tests for Crypto.Random.Fortuna.FortunaGenerator"""
__revision__ = "$Id$"
import sys
if sys.version_info[0] == 2 and sys.version_info[1] == 1:
from Crypto.Util.py21compat import *
from Crypto.Util.py3compat import *
import unittest
from binascii import b2a_hex
class FortunaGeneratorTests(unittest.TestCase):
def setUp(self):
global FortunaGenerator
from Crypto.Random.Fortuna import FortunaGenerator
def test_generator(self):
"""FortunaGenerator.AESGenerator"""
fg = FortunaGenerator.AESGenerator()
# We shouldn't be able to read data until we've seeded the generator
self.assertRaises(Exception, fg.pseudo_random_data, 1)
self.assertEqual(0, fg.counter.next_value())
# Seed the generator, which should set the key and increment the counter.
fg.reseed(b("Hello"))
self.assertEqual(b("0ea6919d4361551364242a4ba890f8f073676e82cf1a52bb880f7e496648b565"), b2a_hex(fg.key))
self.assertEqual(1, fg.counter.next_value())
# Read 2 full blocks from the generator
self.assertEqual(b("7cbe2c17684ac223d08969ee8b565616") + # counter=1
b("717661c0d2f4758bd6ba140bf3791abd"), # counter=2
b2a_hex(fg.pseudo_random_data(32)))
# Meanwhile, the generator will have re-keyed itself and incremented its counter
self.assertEqual(b("33a1bb21987859caf2bbfc5615bef56d") + # counter=3
b("e6b71ff9f37112d0c193a135160862b7"), # counter=4
b2a_hex(fg.key))
self.assertEqual(5, fg.counter.next_value())
# Read another 2 blocks from the generator
self.assertEqual(b("fd6648ba3086e919cee34904ef09a7ff") + # counter=5
b("021f77580558b8c3e9248275f23042bf"), # counter=6
b2a_hex(fg.pseudo_random_data(32)))
# Try to read more than 2**20 bytes using the internal function. This should fail.
self.assertRaises(AssertionError, fg._pseudo_random_data, 2**20+1)
def get_tests(config={}):
from Crypto.SelfTest.st_common import list_test_cases
return list_test_cases(FortunaGeneratorTests)
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
|
delta2323/chainer | refs/heads/master | tests/chainer_tests/functions_tests/loss_tests/test_squared_error.py | 4 | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.parameterize(
{'shape': (4, 3)},
{'shape': (4, 3, 2)},
{'shape': (4,)},
{'shape': ()},
{'shape': (1,)},
{'shape': (1, 1)},
)
class TestSquaredError(unittest.TestCase):
def setUp(self):
self.x0 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
self.x1 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
self.gy = numpy.random.random(self.shape).astype(numpy.float32)
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
loss = functions.squared_error(x0, x1)
loss_value = cuda.to_cpu(loss.data)
self.assertEqual(loss_value.dtype, numpy.float32)
self.assertEqual(loss_value.shape, x0_data.shape)
for i in numpy.ndindex(self.x0.shape):
# Compute expected value
loss_expect = (self.x0[i] - self.x1[i]) ** 2
self.assertAlmostEqual(loss_value[i], loss_expect, places=5)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x0, self.x1)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x0), cuda.to_gpu(self.x1))
def check_backward(self, x0_data, x1_data, y_grad):
gradient_check.check_backward(
functions.SquaredError(),
(x0_data, x1_data), y_grad, eps=1e-2)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x0, self.x1, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(
cuda.to_gpu(self.x0), cuda.to_gpu(self.x1), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
|
edxnercel/edx-platform | refs/heads/master | common/djangoapps/util/memcache.py | 251 | """
This module provides a KEY_FUNCTION suitable for use with a memcache backend
so that we can cache any keys, not just ones that memcache would ordinarily accept
"""
from django.utils.encoding import smart_str
import hashlib
import urllib
def fasthash(string):
"""
Hashes `string` into a string representation of a 128-bit digest.
"""
md4 = hashlib.new("md4")
md4.update(string)
return md4.hexdigest()
def cleaned_string(val):
"""
Converts `val` to unicode and URL-encodes special characters
(including quotes and spaces)
"""
return urllib.quote_plus(smart_str(val))
def safe_key(key, key_prefix, version):
"""
Given a `key`, `key_prefix`, and `version`,
return a key that is safe to use with memcache.
`key`, `key_prefix`, and `version` can be numbers, strings, or unicode.
"""
# Clean for whitespace and control characters, which
# cause memcache to raise an exception
key = cleaned_string(key)
key_prefix = cleaned_string(key_prefix)
version = cleaned_string(version)
# Attempt to combine the prefix, version, and key
combined = ":".join([key_prefix, version, key])
# If the total length is too long for memcache, hash it
if len(combined) > 250:
combined = fasthash(combined)
# Return the result
return combined
|
DirkHoffmann/indico | refs/heads/master | indico/modules/events/sessions/models/sessions.py | 1 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import timedelta
from operator import attrgetter
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.core.db.sqlalchemy.attachments import AttachedItemsMixin
from indico.core.db.sqlalchemy.colors import ColorMixin, ColorTuple
from indico.core.db.sqlalchemy.descriptions import DescriptionMixin, RenderMode
from indico.core.db.sqlalchemy.locations import LocationMixin
from indico.core.db.sqlalchemy.notes import AttachedNotesMixin
from indico.core.db.sqlalchemy.protection import ProtectionManagersMixin
from indico.core.db.sqlalchemy.util.models import auto_table_args
from indico.core.db.sqlalchemy.util.queries import increment_and_get
from indico.modules.events.management.util import get_non_inheriting_objects
from indico.modules.events.timetable.models.entries import TimetableEntry, TimetableEntryType
from indico.util.caching import memoize_request
from indico.util.locators import locator_property
from indico.util.string import format_repr
def _get_next_friendly_id(context):
"""Get the next friendly id for a session."""
from indico.modules.events import Event
event_id = context.current_parameters['event_id']
assert event_id is not None
return increment_and_get(Event._last_friendly_session_id, Event.id == event_id)
class Session(DescriptionMixin, ColorMixin, ProtectionManagersMixin, LocationMixin, AttachedItemsMixin,
AttachedNotesMixin, db.Model):
__tablename__ = 'sessions'
__auto_table_args = (db.Index(None, 'friendly_id', 'event_id', unique=True,
postgresql_where=db.text('NOT is_deleted')),
{'schema': 'events'})
location_backref_name = 'sessions'
disallowed_protection_modes = frozenset()
inheriting_have_acl = True
default_colors = ColorTuple('#202020', '#e3f2d3')
allow_relationship_preloading = True
PRELOAD_EVENT_ATTACHED_ITEMS = True
PRELOAD_EVENT_NOTES = True
ATTACHMENT_FOLDER_ID_COLUMN = 'session_id'
possible_render_modes = {RenderMode.markdown}
default_render_mode = RenderMode.markdown
@declared_attr
def __table_args__(cls):
return auto_table_args(cls)
id = db.Column(
db.Integer,
primary_key=True
)
#: The human-friendly ID for the session
friendly_id = db.Column(
db.Integer,
nullable=False,
default=_get_next_friendly_id
)
event_id = db.Column(
db.Integer,
db.ForeignKey('events.events.id'),
index=True,
nullable=False
)
type_id = db.Column(
db.Integer,
db.ForeignKey('events.session_types.id'),
index=True,
nullable=True
)
title = db.Column(
db.String,
nullable=False
)
code = db.Column(
db.String,
nullable=False,
default=''
)
default_contribution_duration = db.Column(
db.Interval,
nullable=False,
default=timedelta(minutes=20)
)
is_deleted = db.Column(
db.Boolean,
nullable=False,
default=False
)
event = db.relationship(
'Event',
lazy=True,
backref=db.backref(
'sessions',
primaryjoin='(Session.event_id == Event.id) & ~Session.is_deleted',
cascade='all, delete-orphan',
lazy=True
)
)
acl_entries = db.relationship(
'SessionPrincipal',
lazy=True,
cascade='all, delete-orphan',
collection_class=set,
backref='session'
)
blocks = db.relationship(
'SessionBlock',
lazy=True,
cascade='all, delete-orphan',
backref=db.backref(
'session',
lazy=False
)
)
type = db.relationship(
'SessionType',
lazy=True,
backref=db.backref(
'sessions',
lazy=True
)
)
# relationship backrefs:
# - attachment_folders (AttachmentFolder.session)
# - contributions (Contribution.session)
# - default_for_tracks (Track.default_session)
# - legacy_mapping (LegacySessionMapping.session)
# - note (EventNote.session)
def __init__(self, **kwargs):
# explicitly initialize this relationship with None to avoid
# an extra query to check whether there is an object associated
# when assigning a new one (e.g. during cloning)
kwargs.setdefault('note', None)
super().__init__(**kwargs)
@classmethod
def preload_acl_entries(cls, event):
cls.preload_relationships(cls.query.with_parent(event), 'acl_entries')
@property
def location_parent(self):
return self.event
@property
def protection_parent(self):
return self.event
@property
def session(self):
"""Convenience property so all event entities have it."""
return self
@property
@memoize_request
def start_dt(self):
from indico.modules.events.sessions.models.blocks import SessionBlock
start_dt = (self.event.timetable_entries
.with_entities(TimetableEntry.start_dt)
.join('session_block')
.filter(TimetableEntry.type == TimetableEntryType.SESSION_BLOCK,
SessionBlock.session == self)
.order_by(TimetableEntry.start_dt)
.first())
return start_dt[0] if start_dt else None
@property
@memoize_request
def end_dt(self):
sorted_blocks = sorted(self.blocks, key=attrgetter('timetable_entry.end_dt'), reverse=True)
return sorted_blocks[0].timetable_entry.end_dt if sorted_blocks else None
@property
@memoize_request
def conveners(self):
from indico.modules.events.sessions.models.blocks import SessionBlock
from indico.modules.events.sessions.models.persons import SessionBlockPersonLink
return (SessionBlockPersonLink.query
.join(SessionBlock)
.filter(SessionBlock.session_id == self.id)
.distinct(SessionBlockPersonLink.person_id)
.all())
@property
def is_poster(self):
return self.type.is_poster if self.type else False
@locator_property
def locator(self):
return dict(self.event.locator, session_id=self.id)
def get_non_inheriting_objects(self):
"""Get a set of child objects that do not inherit protection."""
return get_non_inheriting_objects(self)
def __repr__(self):
return format_repr(self, 'id', is_deleted=False, _text=self.title)
def can_manage_contributions(self, user, allow_admin=True):
"""Check whether a user can manage contributions within the session."""
from indico.modules.events.sessions.util import session_coordinator_priv_enabled
if user is None:
return False
elif self.session.can_manage(user, allow_admin=allow_admin):
return True
elif (self.session.can_manage(user, 'coordinate') and
session_coordinator_priv_enabled(self.event, 'manage-contributions')):
return True
else:
return False
def can_manage_blocks(self, user, allow_admin=True):
"""Check whether a user can manage session blocks.
This only applies to the blocks themselves, not to contributions inside them.
"""
from indico.modules.events.sessions.util import session_coordinator_priv_enabled
if user is None:
return False
# full session manager can always manage blocks. this also includes event managers and higher.
elif self.session.can_manage(user, allow_admin=allow_admin):
return True
# session coordiator if block management is allowed
elif (self.session.can_manage(user, 'coordinate') and
session_coordinator_priv_enabled(self.event, 'manage-blocks')):
return True
else:
return False
Session.register_location_events()
Session.register_protection_events()
|
density215/d215-miniblog | refs/heads/master | django/contrib/localflavor/sk/forms.py | 344 | """
Slovak-specific form helpers
"""
from django.forms.fields import Select, RegexField
from django.utils.translation import ugettext_lazy as _
class SKRegionSelect(Select):
"""
A select widget widget with list of Slovak regions as choices.
"""
def __init__(self, attrs=None):
from sk_regions import REGION_CHOICES
super(SKRegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class SKDistrictSelect(Select):
"""
A select widget with list of Slovak districts as choices.
"""
def __init__(self, attrs=None):
from sk_districts import DISTRICT_CHOICES
super(SKDistrictSelect, self).__init__(attrs, choices=DISTRICT_CHOICES)
class SKPostalCodeField(RegexField):
"""
A form field that validates its input as Slovak postal code.
Valid form is XXXXX or XXX XX, where X represents integer.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXX or XXX XX.'),
}
def __init__(self, *args, **kwargs):
super(SKPostalCodeField, self).__init__(r'^\d{5}$|^\d{3} \d{2}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self, value):
"""
Validates the input and returns a string that contains only numbers.
Returns an empty string for empty values.
"""
v = super(SKPostalCodeField, self).clean(value)
return v.replace(' ', '')
|
mzdaniel/pycon | refs/heads/2012 | pycon_project/apps/boxes/views.py | 2 | from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.contrib.admin.views.decorators import staff_member_required
from boxes.forms import BoxForm
from boxes.models import Box
@staff_member_required
def box_edit(request, pk):
box = get_object_or_404(Box, pk=pk)
if request.method == "POST":
form = BoxForm(request.POST, instance=box)
if form.is_valid():
form.save()
return render_to_response("boxes/refresh.html", {})
else:
form = BoxForm(instance=box)
ctx = {
"form": form,
"box": box,
}
ctx = RequestContext(request, ctx)
return render_to_response("boxes/box_edit.html", ctx)
@staff_member_required
def box_create(request, label):
if request.method == "POST":
form = BoxForm(request.POST)
if form.is_valid():
box = form.save(commit=False)
box.label = label
box.created_by = request.user
box.last_updated_by = request.user
box.save()
return render_to_response("boxes/refresh.html", {})
else:
form = BoxForm()
ctx = {
"form": form,
"label": label
}
ctx = RequestContext(request, ctx)
return render_to_response("boxes/box_create.html", ctx)
|
ITURO/ituro | refs/heads/master | ituro/projects/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
iandriver/RNA-sequence-tools | refs/heads/master | Tophat_Cluster_submission/qsub_cuffdiff.py | 2 | import fnmatch
import os
import csv
import subprocess
def write_file(filename, contents):
"""Write the given contents to a text file.
ARGUMENTS
filename (string) - name of the file to write to, creating if it doesn't exist
contents (string) - contents of the file to be written
"""
# Open the file for writing
file = open(filename, 'w')
# Write the file contents
file.write(contents)
# Close the file
file.close()
return
def qsub_submit(command_filename, hold_jobid = None, fname = None):
"""Submit the given command filename to the queue.
ARGUMENTS
command_filename (string) - the name of the command file to submit
OPTIONAL ARGUMENTS
hold_jobid (int) - job id to hold on as a prerequisite for execution
RETURNS
jobid (integer) - the jobid
"""
# Form command
command = 'qsub'
if fname: command += ' -N %s' % fname
if hold_jobid: command += ' -hold_jid %d' % hold_jobid
command += ' %s' % command_filename
# Submit the job and capture output.
import subprocess
print "> " + command
process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
print(out)
# Match job id
jobid = out.split(' ')[2]
return int(jobid)
result_file_name = ['results_cindy_rna']
cuffdiff_files_ctrl = ''
cuffdiff_files_7exp = ''
cuffdiff_files_14exp = ''
path = os.path.join('/netapp/home/idriver', result_file_name[0])
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, 'accepted_hits.bam'):
sample_name = root.split('/')[-1]
if 'D7-CTL' in sample_name or 'D14-CTL' in sample_name:
cuffdiff_files_ctrl += os.path.join(root, filename)+','
elif 'D7-E' in sample_name:
cuffdiff_files_7exp += os.path.join(root, filename)+','
elif 'D14-E' in sample_name:
cuffdiff_files_14exp += os.path.join(root, filename)+','
cuff_diff_files = cuffdiff_files_ctrl.strip(',')+' '+ cuffdiff_files_7exp.strip(',')+' '+ cuffdiff_files_14exp.strip(',')
annotation_file = '/netapp/home/idriver/genes_E_RS.gtf'
index_gen_loc = '/netapp/home/idriver/Norm_mm10_ERCC_RS.fa'
cuff_name = 'cuffdiff_'+result_file_name[0]
cuffdiff_cmd = 'cuffdiff -p 8 -u -b '+ index_gen_loc+ ' -o '+cuff_name+' '+annotation_file+' '+cuff_diff_files
print cuffdiff_cmd
mk_dir = 'mkdir -p '+os.path.join('/netapp/home/idriver', cuff_name)
subprocess.call(mk_dir, shell=True)
command = """\
#!/bin/sh
#!/bin/sh
#$ -l arch=linux-x64
#$ -S /bin/bash
#$ -o /netapp/home/idriver/%(cuff_name)s
#$ -e /netapp/home/idriver/error_spc
#$ -cwd
#$ -r y
#$ -j y
#$ -l netapp=10G,scratch=20G,mem_total=22G
#$ -pe smp 8
#$ -R yes
#$ -l h_rt=6:59:00
set echo on
date
hostname
pwd
export PATH=$PATH:${HOME}/bin
PATH=$PATH:/netapp/home/idriver/cufflinks-2.2.1.Linux_x86_64
PATH=$PATH:/netapp/home/idriver/bin/bowtie2-2.2.3
PATH=$PATH:/netapp/home/idriver/bin/samtools-0.1.19_2
PATH=$PATH:/netapp/home/idriver/bin/tophat-2.0.13.Linux_x86_64
PATH=$PATH:/usr/bin/gunzip
export PATH
export TMPDIR=/scratch
echo $TMPDIR
cd $TMPDIR
mkdir %(cuff_name)s
%(cuffdiff_cmd)s
# Copy the results back to the project directory:
cd $TMPDIR
cp -r %(cuff_name)s/* /netapp/home/idriver/%(cuff_name)s
""" % vars()
filename = 'cuffdiff_'+result_file_name[0]+'.sh'
write_file(filename, command)
jobid = qsub_submit(filename, fname=cuff_name)
print "Submitted. jobid = %d" % jobid
# Write jobid to a file.
import subprocess
process = subprocess.Popen('echo %d > jobids' % jobid, stdout=subprocess.PIPE, shell = True)
out, err = process.communicate()
print(out)
|
cwtaylor/viper | refs/heads/master | viper/modules/peepdf/colorama/ansi.py | 81 | '''
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
'''
CSI = '\033['
def code_to_chars(code):
return CSI + str(code) + 'm'
class AnsiCodes(object):
def __init__(self, codes):
for name in dir(codes):
if not name.startswith('_'):
value = getattr(codes, name)
setattr(self, name, code_to_chars(value))
class AnsiFore:
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
class AnsiBack:
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
class AnsiStyle:
BRIGHT = 1
DIM = 2
NORMAL = 22
RESET_ALL = 0
Fore = AnsiCodes( AnsiFore )
Back = AnsiCodes( AnsiBack )
Style = AnsiCodes( AnsiStyle )
|
ryannathans/micropython | refs/heads/master | examples/hwapi/hwconfig_esp8266_esp12.py | 41 | from machine import Pin, Signal
# ESP12 module as used by many boards
# Blue LED on pin 2, active low (inverted)
LED = Signal(2, Pin.OUT, invert=True)
|
rizumu/django | refs/heads/master | django/test/html.py | 220 | """
Comparing two html documents.
"""
from __future__ import unicode_literals
import re
from django.utils import six
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html_parser import HTMLParseError, HTMLParser
WHITESPACE = re.compile('\s+')
def normalize_whitespace(string):
return WHITESPACE.sub(' ', string)
@python_2_unicode_compatible
class Element(object):
def __init__(self, name, attributes):
self.name = name
self.attributes = sorted(attributes)
self.children = []
def append(self, element):
if isinstance(element, six.string_types):
element = force_text(element)
element = normalize_whitespace(element)
if self.children:
if isinstance(self.children[-1], six.string_types):
self.children[-1] += element
self.children[-1] = normalize_whitespace(self.children[-1])
return
elif self.children:
# removing last children if it is only whitespace
# this can result in incorrect dom representations since
# whitespace between inline tags like <span> is significant
if isinstance(self.children[-1], six.string_types):
if self.children[-1].isspace():
self.children.pop()
if element:
self.children.append(element)
def finalize(self):
def rstrip_last_element(children):
if children:
if isinstance(children[-1], six.string_types):
children[-1] = children[-1].rstrip()
if not children[-1]:
children.pop()
children = rstrip_last_element(children)
return children
rstrip_last_element(self.children)
for i, child in enumerate(self.children):
if isinstance(child, six.string_types):
self.children[i] = child.strip()
elif hasattr(child, 'finalize'):
child.finalize()
def __eq__(self, element):
if not hasattr(element, 'name'):
return False
if hasattr(element, 'name') and self.name != element.name:
return False
if len(self.attributes) != len(element.attributes):
return False
if self.attributes != element.attributes:
# attributes without a value is same as attribute with value that
# equals the attributes name:
# <input checked> == <input checked="checked">
for i in range(len(self.attributes)):
attr, value = self.attributes[i]
other_attr, other_value = element.attributes[i]
if value is None:
value = attr
if other_value is None:
other_value = other_attr
if attr != other_attr or value != other_value:
return False
if self.children != element.children:
return False
return True
def __hash__(self):
return hash((self.name,) + tuple(a for a in self.attributes))
def __ne__(self, element):
return not self.__eq__(element)
def _count(self, element, count=True):
if not isinstance(element, six.string_types):
if self == element:
return 1
i = 0
for child in self.children:
# child is text content and element is also text content, then
# make a simple "text" in "text"
if isinstance(child, six.string_types):
if isinstance(element, six.string_types):
if count:
i += child.count(element)
elif element in child:
return 1
else:
i += child._count(element, count=count)
if not count and i:
return i
return i
def __contains__(self, element):
return self._count(element, count=False) > 0
def count(self, element):
return self._count(element, count=True)
def __getitem__(self, key):
return self.children[key]
def __str__(self):
output = '<%s' % self.name
for key, value in self.attributes:
if value:
output += ' %s="%s"' % (key, value)
else:
output += ' %s' % key
if self.children:
output += '>\n'
output += ''.join(six.text_type(c) for c in self.children)
output += '\n</%s>' % self.name
else:
output += ' />'
return output
def __repr__(self):
return six.text_type(self)
@python_2_unicode_compatible
class RootElement(Element):
def __init__(self):
super(RootElement, self).__init__(None, ())
def __str__(self):
return ''.join(six.text_type(c) for c in self.children)
class Parser(HTMLParser):
SELF_CLOSING_TAGS = ('br', 'hr', 'input', 'img', 'meta', 'spacer',
'link', 'frame', 'base', 'col')
def __init__(self):
HTMLParser.__init__(self)
self.root = RootElement()
self.open_tags = []
self.element_positions = {}
def error(self, msg):
raise HTMLParseError(msg, self.getpos())
def format_position(self, position=None, element=None):
if not position and element:
position = self.element_positions[element]
if position is None:
position = self.getpos()
if hasattr(position, 'lineno'):
position = position.lineno, position.offset
return 'Line %d, Column %d' % position
@property
def current(self):
if self.open_tags:
return self.open_tags[-1]
else:
return self.root
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
if tag not in self.SELF_CLOSING_TAGS:
self.handle_endtag(tag)
def handle_starttag(self, tag, attrs):
# Special case handling of 'class' attribute, so that comparisons of DOM
# instances are not sensitive to ordering of classes.
attrs = [
(name, " ".join(sorted(value.split(" "))))
if name == "class"
else (name, value)
for name, value in attrs
]
element = Element(tag, attrs)
self.current.append(element)
if tag not in self.SELF_CLOSING_TAGS:
self.open_tags.append(element)
self.element_positions[element] = self.getpos()
def handle_endtag(self, tag):
if not self.open_tags:
self.error("Unexpected end tag `%s` (%s)" % (
tag, self.format_position()))
element = self.open_tags.pop()
while element.name != tag:
if not self.open_tags:
self.error("Unexpected end tag `%s` (%s)" % (
tag, self.format_position()))
element = self.open_tags.pop()
def handle_data(self, data):
self.current.append(data)
def handle_charref(self, name):
self.current.append('&%s;' % name)
def handle_entityref(self, name):
self.current.append('&%s;' % name)
def parse_html(html):
"""
Takes a string that contains *valid* HTML and turns it into a Python object
structure that can be easily compared against other HTML on semantic
equivalence. Syntactical differences like which quotation is used on
arguments will be ignored.
"""
parser = Parser()
parser.feed(html)
parser.close()
document = parser.root
document.finalize()
# Removing ROOT element if it's not necessary
if len(document.children) == 1:
if not isinstance(document.children[0], six.string_types):
document = document.children[0]
return document
|
wglass/zoonado | refs/heads/master | zoonado/connection.py | 1 | from __future__ import unicode_literals
import collections
import logging
import re
import struct
import sys
from tornado import ioloop, iostream, gen, concurrent, tcpclient
from zoonado import protocol, iterables, exc
version_regex = re.compile(r'Zookeeper version: (\d)\.(\d)\.(\d)-.*')
# all requests and responses are prefixed with a 32-bit int denoting size
size_struct = struct.Struct("!i")
# replies are prefixed with an xid, zxid and error code
reply_header_struct = struct.Struct("!iqi")
log = logging.getLogger(__name__)
payload_log = logging.getLogger(__name__ + ".payload")
if payload_log.level == logging.NOTSET:
payload_log.setLevel(logging.INFO)
class Connection(object):
def __init__(self, host, port, watch_handler):
self.host = host
self.port = int(port)
self.stream = None
self.closing = False
self.version_info = None
self.start_read_only = None
self.watch_handler = watch_handler
self.opcode_xref = {}
self.pending = {}
self.pending_specials = collections.defaultdict(list)
self.watches = collections.defaultdict(list)
@gen.coroutine
def connect(self):
client = tcpclient.TCPClient()
log.debug("Initial connection to server %s:%d", self.host, self.port)
stream = yield client.connect(self.host, self.port)
log.debug("Sending 'srvr' command to %s:%d", self.host, self.port)
yield stream.write(b"srvr")
answer = yield stream.read_until_close()
answer = answer.decode("utf8")
version_line = answer.split("\n")[0]
self.version_info = tuple(
map(int, version_regex.match(version_line).groups())
)
self.start_read_only = bool("READ_ONLY" in answer)
log.debug("Version info: %s", self.version_info)
log.debug("Read-only mode: %s", self.start_read_only)
log.debug("Actual connection to server %s:%d", self.host, self.port)
self.stream = yield client.connect(self.host, self.port)
@gen.coroutine
def send_connect(self, request):
# meant to be used before the read_loop starts
payload_log.debug("[SEND] (initial) %s", request)
payload = request.serialize()
payload = size_struct.pack(len(payload)) + payload
yield self.stream.write(payload)
try:
_, zxid, response = yield self.read_response(initial_connect=True)
except Exception:
log.exception("Error reading connect response.")
return
payload_log.debug("[RECV] (initial) %s", response)
raise gen.Return((zxid, response))
def start_read_loop(self):
ioloop.IOLoop.current().add_callback(self.read_loop)
def send(self, request, xid=None):
f = concurrent.Future()
if self.closing:
f.set_exception(exc.ConnectError(self.host, self.port))
return f
if request.special_xid:
xid = request.special_xid
payload_log.debug("[SEND] (xid: %s) %s", xid, request)
payload = request.serialize(xid)
payload = size_struct.pack(len(payload)) + payload
self.opcode_xref[xid] = request.opcode
if xid in protocol.SPECIAL_XIDS:
self.pending_specials[xid].append(f)
else:
self.pending[xid] = f
def handle_write(write_future):
try:
write_future.result()
except Exception:
self.abort()
try:
self.stream.write(payload).add_done_callback(handle_write)
except Exception:
self.abort()
return f
@gen.coroutine
def read_loop(self):
"""
Infinite loop that reads messages off of the socket while not closed.
When a message is received its corresponding pending Future is set
to have the message as its result.
This is never used directly and is fired as a separate callback on the
I/O loop via the `connect()` method.
"""
while not self.closing:
try:
xid, zxid, response = yield self.read_response()
except iostream.StreamClosedError:
return
except Exception:
log.exception("Error reading response.")
self.abort()
return
payload_log.debug("[RECV] (xid: %s) %s", xid, response)
if xid == protocol.WATCH_XID:
self.watch_handler(response)
continue
elif xid in protocol.SPECIAL_XIDS:
f = self.pending_specials[xid].pop()
else:
f = self.pending.pop(xid)
if isinstance(response, Exception):
f.set_exception(response)
else:
f.set_result((zxid, response))
@gen.coroutine
def read_response(self, initial_connect=False):
raw_size = yield self.stream.read_bytes(size_struct.size)
size = size_struct.unpack(raw_size)[0]
# connect and close op replies don't contain a reply header
if initial_connect or self.pending_specials[protocol.CLOSE_XID]:
raw_payload = yield self.stream.read_bytes(size)
response = protocol.ConnectResponse.deserialize(raw_payload)
raise gen.Return((None, None, response))
raw_header = yield self.stream.read_bytes(reply_header_struct.size)
xid, zxid, error_code = reply_header_struct.unpack_from(raw_header)
if error_code:
raise gen.Return((xid, zxid, exc.get_response_error(error_code)))
size -= reply_header_struct.size
raw_payload = yield self.stream.read_bytes(size)
if xid == protocol.WATCH_XID:
response = protocol.WatchEvent.deserialize(raw_payload)
else:
opcode = self.opcode_xref.pop(xid)
response = protocol.response_xref[opcode].deserialize(raw_payload)
raise gen.Return((xid, zxid, response))
def abort(self, exception=exc.ConnectError):
"""
Aborts a connection and puts all pending futures into an error state.
If ``sys.exc_info()`` is set (i.e. this is being called in an exception
handler) then pending futures will have that exc info set. Otherwise
the given ``exception`` parameter is used (defaults to
``ConnectError``).
"""
log.warn("Aborting connection to %s:%s", self.host, self.port)
def abort_pending(f):
exc_info = sys.exc_info()
if any(exc_info):
f.set_exc_info(exc_info)
else:
f.set_exception(exception(self.host, self.port))
for pending in self.drain_all_pending():
abort_pending(pending)
def drain_all_pending(self):
for special_xid in protocol.SPECIAL_XIDS:
for f in iterables.drain(self.pending_specials[special_xid]):
yield f
for _, f in iterables.drain(self.pending):
yield f
@gen.coroutine
def close(self, timeout):
if self.closing:
return
self.closing = True
pending_with_timeouts = []
for pending in self.drain_all_pending():
pending_with_timeouts.append(gen.with_timeout(timeout, pending))
try:
yield list(pending_with_timeouts)
except gen.TimeoutError:
yield self.abort(exception=exc.TimeoutError)
finally:
self.stream.close()
|
mozilla/verbatim | refs/heads/master | vendor/lib/python/webassets/__init__.py | 1 | __version__ = (0, 8)
# Make a couple frequently used things available right here.
from bundle import Bundle
from env import Environment
|
wcmitchell/insights-core | refs/heads/master | insights/client/support.py | 1 | '''
Module responsible for running the --support option for collecting debug information
'''
import logging
import shlex
import re
import os
import requests
from subprocess import Popen, PIPE, STDOUT
from constants import InsightsConstants as constants
from connection import InsightsConnection
from config import CONFIG as config
APP_NAME = constants.app_name
logger = logging.getLogger(__name__)
def registration_check():
# check local registration record
unreg_date = None
unreachable = False
if os.path.isfile(constants.registered_file):
local_record = 'System is registered locally via .registered file.'
with open(constants.registered_file) as reg_file:
local_record += ' Registered at ' + reg_file.readline()
else:
local_record = 'System is NOT registered locally via .registered file.'
if os.path.isfile(constants.unregistered_file):
with open(constants.unregistered_file) as reg_file:
local_record += ' Unregistered at ' + reg_file.readline()
pconn = InsightsConnection()
api_reg_status = pconn.api_registration_check()
logger.debug('Registration status: %s', api_reg_status)
if type(api_reg_status) is bool:
if api_reg_status:
api_record = 'Insights API confirms registration.'
else:
api_record = 'Insights API could not be reached to confirm registration status.'
unreachable = True
elif api_reg_status is None:
api_record = 'Insights API says this machine is NOT registered.'
api_reg_status = False
else:
api_record = 'Insights API says this machine was unregistered at ' + api_reg_status
unreg_date = api_reg_status
api_reg_status = False
return {'messages': [local_record, api_record],
'status': api_reg_status,
'unreg_date': unreg_date,
'unreachable': unreachable}
class InsightsSupport(object):
'''
Build the support logfile
'''
def __init__(self):
pass
def collect_support_info(self):
'''
Collect log info for debug
'''
# check insights config
cfg_block = []
logger.info('Insights version: %s' % (constants.version))
cfg_block += registration_check()
lastupload = 'never'
if os.path.isfile(constants.lastupload_file):
with open(constants.lastupload_file) as upl_file:
lastupload = upl_file.readline().strip()
cfg_block.append('Last successful upload was ' + lastupload)
cfg_block.append('auto_config: ' + str(config['auto_config']))
if config['proxy']:
obfuscated_proxy = re.sub(r'(.*)(:)(.*)(@.*)',
r'\1\2********\4',
config['proxy'])
else:
obfuscated_proxy = 'None'
cfg_block.append('proxy: ' + obfuscated_proxy)
logger.info('\n'.join(cfg_block))
logger.info('python-requests: %s', requests.__version__)
# run commands
commands = ['insights-client --test-connection --quiet',
'uname -a',
'cat /etc/redhat-release',
'env',
'sestatus',
'subscription-manager identity']
for cmd in commands:
logger.info("Running command: %s", cmd)
try:
proc = Popen(
shlex.split(cmd), shell=False, stdout=PIPE, stderr=STDOUT, close_fds=True)
stdout, stderr = proc.communicate()
except Exception as e:
logger.info("Process failed: %s", e)
if 'test-connection' in cmd:
if proc.returncode == 0:
logger.info('Connection test: PASS\n')
else:
logger.info('Connection test: FAIL\n')
else:
logger.info("Process output: \n%s", stdout)
# check available disk space for /var/tmp
tmp_dir = '/var/tmp'
dest_dir_stat = os.statvfs(tmp_dir)
dest_dir_size = (dest_dir_stat.f_bavail * dest_dir_stat.f_frsize)
logger.info('Available space in %s:\t%s bytes\t%.1f 1K-blocks\t%.1f MB',
tmp_dir, dest_dir_size,
dest_dir_size / 1024.0,
(dest_dir_size / 1024.0) / 1024.0)
|
SrNetoChan/QGIS | refs/heads/master | python/plugins/processing/core/ProcessingConfig.py | 4 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ProcessingConfig.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
import tempfile
from qgis.PyQt.QtCore import QCoreApplication, QObject, pyqtSignal
from qgis.core import (NULL,
QgsApplication,
QgsSettings,
QgsVectorFileWriter,
QgsRasterFileWriter,
QgsProcessingUtils)
from processing.tools.system import defaultOutputFolder
import processing.tools.dataobjects
from multiprocessing import cpu_count
class SettingsWatcher(QObject):
settingsChanged = pyqtSignal()
settingsWatcher = SettingsWatcher()
class ProcessingConfig:
OUTPUT_FOLDER = 'OUTPUTS_FOLDER'
RASTER_STYLE = 'RASTER_STYLE'
VECTOR_POINT_STYLE = 'VECTOR_POINT_STYLE'
VECTOR_LINE_STYLE = 'VECTOR_LINE_STYLE'
VECTOR_POLYGON_STYLE = 'VECTOR_POLYGON_STYLE'
FILTER_INVALID_GEOMETRIES = 'FILTER_INVALID_GEOMETRIES'
PREFER_FILENAME_AS_LAYER_NAME = 'PREFER_FILENAME_AS_LAYER_NAME'
KEEP_DIALOG_OPEN = 'KEEP_DIALOG_OPEN'
PRE_EXECUTION_SCRIPT = 'PRE_EXECUTION_SCRIPT'
POST_EXECUTION_SCRIPT = 'POST_EXECUTION_SCRIPT'
SHOW_CRS_DEF = 'SHOW_CRS_DEF'
WARN_UNMATCHING_CRS = 'WARN_UNMATCHING_CRS'
SHOW_PROVIDERS_TOOLTIP = 'SHOW_PROVIDERS_TOOLTIP'
SHOW_ALGORITHMS_KNOWN_ISSUES = 'SHOW_ALGORITHMS_KNOWN_ISSUES'
MAX_THREADS = 'MAX_THREADS'
DEFAULT_OUTPUT_RASTER_LAYER_EXT = 'DefaultOutputRasterLayerExt'
DEFAULT_OUTPUT_VECTOR_LAYER_EXT = 'DefaultOutputVectorLayerExt'
TEMP_PATH = 'TEMP_PATH2'
settings = {}
settingIcons = {}
@staticmethod
def initialize():
icon = QgsApplication.getThemeIcon("/processingAlgorithm.svg")
ProcessingConfig.settingIcons['General'] = icon
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.KEEP_DIALOG_OPEN,
ProcessingConfig.tr('Keep dialog open after running an algorithm'), True))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.PREFER_FILENAME_AS_LAYER_NAME,
ProcessingConfig.tr('Prefer output filename for layer names'), True))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.SHOW_PROVIDERS_TOOLTIP,
ProcessingConfig.tr('Show tooltip when there are disabled providers'), True))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.OUTPUT_FOLDER,
ProcessingConfig.tr('Output folder'), defaultOutputFolder(),
valuetype=Setting.FOLDER))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.SHOW_CRS_DEF,
ProcessingConfig.tr('Show layer CRS definition in selection boxes'), True))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.WARN_UNMATCHING_CRS,
ProcessingConfig.tr("Warn before executing if parameter CRS's do not match"), True))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.SHOW_ALGORITHMS_KNOWN_ISSUES,
ProcessingConfig.tr("Show algorithms with known issues"), False))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.RASTER_STYLE,
ProcessingConfig.tr('Style for raster layers'), '',
valuetype=Setting.FILE))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.VECTOR_POINT_STYLE,
ProcessingConfig.tr('Style for point layers'), '',
valuetype=Setting.FILE))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.VECTOR_LINE_STYLE,
ProcessingConfig.tr('Style for line layers'), '',
valuetype=Setting.FILE))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.VECTOR_POLYGON_STYLE,
ProcessingConfig.tr('Style for polygon layers'), '',
valuetype=Setting.FILE))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.PRE_EXECUTION_SCRIPT,
ProcessingConfig.tr('Pre-execution script'), '',
valuetype=Setting.FILE))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.POST_EXECUTION_SCRIPT,
ProcessingConfig.tr('Post-execution script'), '',
valuetype=Setting.FILE))
invalidFeaturesOptions = [ProcessingConfig.tr('Do not filter (better performance)'),
ProcessingConfig.tr('Skip (ignore) features with invalid geometries'),
ProcessingConfig.tr('Stop algorithm execution when a geometry is invalid')]
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.FILTER_INVALID_GEOMETRIES,
ProcessingConfig.tr('Invalid features filtering'),
invalidFeaturesOptions[2],
valuetype=Setting.SELECTION,
options=invalidFeaturesOptions))
threads = QgsApplication.maxThreads() # if user specified limit for rendering, lets keep that as default here, otherwise max
threads = cpu_count() if threads == -1 else threads # if unset, maxThreads() returns -1
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.MAX_THREADS,
ProcessingConfig.tr('Max Threads'), threads,
valuetype=Setting.INT))
extensions = QgsVectorFileWriter.supportedFormatExtensions()
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.DEFAULT_OUTPUT_VECTOR_LAYER_EXT,
ProcessingConfig.tr('Default output vector layer extension'),
QgsVectorFileWriter.supportedFormatExtensions()[0],
valuetype=Setting.SELECTION,
options=extensions))
extensions = QgsRasterFileWriter.supportedFormatExtensions()
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.DEFAULT_OUTPUT_RASTER_LAYER_EXT,
ProcessingConfig.tr('Default output raster layer extension'),
'tif',
valuetype=Setting.SELECTION,
options=extensions))
ProcessingConfig.addSetting(Setting(
ProcessingConfig.tr('General'),
ProcessingConfig.TEMP_PATH,
ProcessingConfig.tr('Override temporary output folder path (leave blank for default)'), None,
valuetype=Setting.FOLDER))
@staticmethod
def setGroupIcon(group, icon):
ProcessingConfig.settingIcons[group] = icon
@staticmethod
def getGroupIcon(group):
if group == ProcessingConfig.tr('General'):
return QgsApplication.getThemeIcon("/processingAlgorithm.svg")
if group in ProcessingConfig.settingIcons:
return ProcessingConfig.settingIcons[group]
else:
return QgsApplication.getThemeIcon("/processingAlgorithm.svg")
@staticmethod
def addSetting(setting):
ProcessingConfig.settings[setting.name] = setting
@staticmethod
def removeSetting(name):
del ProcessingConfig.settings[name]
@staticmethod
def getSettings():
'''Return settings as a dict with group names as keys and lists of settings as values'''
settings = {}
for setting in list(ProcessingConfig.settings.values()):
if setting.group not in settings:
group = []
settings[setting.group] = group
else:
group = settings[setting.group]
group.append(setting)
return settings
@staticmethod
def readSettings():
for setting in list(ProcessingConfig.settings.values()):
setting.read()
@staticmethod
def getSetting(name, readable=False):
if name in list(ProcessingConfig.settings.keys()):
v = ProcessingConfig.settings[name].value
try:
if v == NULL:
v = None
except:
pass
if ProcessingConfig.settings[name].valuetype == Setting.SELECTION:
if readable:
return v
return ProcessingConfig.settings[name].options.index(v)
else:
return v
else:
return None
@staticmethod
def setSettingValue(name, value):
if name in list(ProcessingConfig.settings.keys()):
if ProcessingConfig.settings[name].valuetype == Setting.SELECTION:
ProcessingConfig.settings[name].setValue(ProcessingConfig.settings[name].options[value])
else:
ProcessingConfig.settings[name].setValue(value)
ProcessingConfig.settings[name].save()
@staticmethod
def tr(string, context=''):
if context == '':
context = 'ProcessingConfig'
return QCoreApplication.translate(context, string)
class Setting:
"""A simple config parameter that will appear on the config dialog.
"""
STRING = 0
FILE = 1
FOLDER = 2
SELECTION = 3
FLOAT = 4
INT = 5
MULTIPLE_FOLDERS = 6
def __init__(self, group, name, description, default, hidden=False, valuetype=None,
validator=None, options=None):
self.group = group
self.name = name
self.qname = "Processing/Configuration/" + self.name
self.description = description
self.default = default
self.hidden = hidden
self.valuetype = valuetype
self.options = options
if self.valuetype is None:
if isinstance(default, int):
self.valuetype = self.INT
elif isinstance(default, float):
self.valuetype = self.FLOAT
if validator is None:
if self.valuetype == self.FLOAT:
def checkFloat(v):
try:
float(v)
except ValueError:
raise ValueError(self.tr('Wrong parameter value:\n{0}').format(v))
validator = checkFloat
elif self.valuetype == self.INT:
def checkInt(v):
try:
int(v)
except ValueError:
raise ValueError(self.tr('Wrong parameter value:\n{0}').format(v))
validator = checkInt
elif self.valuetype in [self.FILE, self.FOLDER]:
def checkFileOrFolder(v):
if v and not os.path.exists(v):
raise ValueError(self.tr('Specified path does not exist:\n{0}').format(v))
validator = checkFileOrFolder
elif self.valuetype == self.MULTIPLE_FOLDERS:
def checkMultipleFolders(v):
folders = v.split(';')
for f in folders:
if f and not os.path.exists(f):
raise ValueError(self.tr('Specified path does not exist:\n{0}').format(f))
validator = checkMultipleFolders
else:
def validator(x):
return True
self.validator = validator
self.value = default
def setValue(self, value):
self.validator(value)
self.value = value
def read(self, qsettings=None):
if not qsettings:
qsettings = QgsSettings()
value = qsettings.value(self.qname, None)
if value is not None:
if isinstance(self.value, bool):
value = str(value).lower() == str(True).lower()
if self.valuetype == self.SELECTION:
try:
self.value = self.options[int(value)]
except:
self.value = self.options[0]
else:
self.value = value
def save(self, qsettings=None):
if not qsettings:
qsettings = QgsSettings()
if self.valuetype == self.SELECTION:
qsettings.setValue(self.qname, self.options.index(self.value))
else:
qsettings.setValue(self.qname, self.value)
def __str__(self):
return self.name + '=' + str(self.value)
def tr(self, string, context=''):
if context == '':
context = 'ProcessingConfig'
return QCoreApplication.translate(context, string)
|
google/orchestra | refs/heads/master | orchestra/google/marketing_platform/utils/schema/sdf/__init__.py | 1 | ###########################################################################
#
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from orchestra.google.marketing_platform.utils.schema.sdf.v4_2 import (
SDF_V4_2_SCHEMA_TYPES
)
from orchestra.google.marketing_platform.utils.schema.sdf.v5 import (
SDF_V5_SCHEMA_TYPES
)
SDF_VERSIONED_SCHEMA_TYPES = {'4.2':SDF_V4_2_SCHEMA_TYPES,'5':SDF_V5_SCHEMA_TYPES}
|
jeremypogue/ansible | refs/heads/devel | lib/ansible/playbook/become.py | 63 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.attribute import Attribute, FieldAttribute
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Become:
# Privilege escalation
_become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
_become_flags = FieldAttribute(isa='string')
def __init__(self):
return super(Become, self).__init__()
def _detect_privilege_escalation_conflict(self, ds):
# Fail out if user specifies conflicting privilege escalations
has_become = 'become' in ds or 'become_user'in ds
has_sudo = 'sudo' in ds or 'sudo_user' in ds
has_su = 'su' in ds or 'su_user' in ds
if has_become:
msg = 'The become params ("become", "become_user") and'
if has_sudo:
raise AnsibleParserError('%s sudo params ("sudo", "sudo_user") cannot be used together' % msg)
elif has_su:
raise AnsibleParserError('%s su params ("su", "su_user") cannot be used together' % msg)
elif has_sudo and has_su:
raise AnsibleParserError('sudo params ("sudo", "sudo_user") and su params ("su", "su_user") cannot be used together')
def _preprocess_data_become(self, ds):
"""Preprocess the playbook data for become attributes
This is called from the Base object's preprocess_data() method which
in turn is called pretty much anytime any sort of playbook object
(plays, tasks, blocks, etc) is created.
"""
self._detect_privilege_escalation_conflict(ds)
# Privilege escalation, backwards compatibility for sudo/su
if 'sudo' in ds or 'sudo_user' in ds:
ds['become_method'] = 'sudo'
if 'sudo' in ds:
ds['become'] = ds['sudo']
del ds['sudo']
if 'sudo_user' in ds:
ds['become_user'] = ds['sudo_user']
del ds['sudo_user']
display.deprecated("Instead of sudo/sudo_user, use become/become_user and make sure become_method is 'sudo' (default)")
elif 'su' in ds or 'su_user' in ds:
ds['become_method'] = 'su'
if 'su' in ds:
ds['become'] = ds['su']
del ds['su']
if 'su_user' in ds:
ds['become_user'] = ds['su_user']
del ds['su_user']
display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)")
return ds
def set_become_defaults(self, become, become_method, become_user):
''' if we are becoming someone else, but some fields are unset,
make sure they're initialized to the default config values '''
if become:
if become_method is None:
become_method = C.DEFAULT_BECOME_METHOD
if become_user is None:
become_user = C.DEFAULT_BECOME_USER
|
whereismyjetpack/ansible | refs/heads/devel | lib/ansible/modules/network/eos/eos_command.py | 2 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'core',
'version': '1.0'
}
DOCUMENTATION = """
---
module: eos_command
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Run arbitrary commands on an Arista EOS device
description:
- Sends an arbitrary set of commands to an EOS node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
options:
commands:
description:
- The commands to send to the remote EOS device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of I(retries) has been exceeded.
required: true
wait_for:
description:
- Specifies what to evaluate from the output of the command
and what conditionals to apply. This argument will cause
the task to wait for a particular conditional to be true
before moving forward. If the conditional is not true
by the configured retries, the task fails. See examples.
required: false
default: null
aliases: ['waitfor']
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the I(wait_for) must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
required: false
default: all
choices: ['any', 'all']
version_added: "2.2"
retries:
description:
- Specifies the number of retries a command should be tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the I(wait_for)
conditionals.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditional, the interval indicates how to long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
- name: run show version on remote devices
eos_command:
commands: show version
- name: run show version and check to see if output contains Arista
eos_command:
commands: show version
wait_for: result[0] contains Arista
- name: run multiple commands on remote nodes
eos_command:
commands:
- show version
- show interfaces
- name: run multiple commands and evaluate the output
eos_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains Arista
- result[1] contains Loopback0
- name: run commands and specify the output format
eos_command:
commands:
- command: show version
output: json
"""
RETURN = """
failed_conditions:
description: the conditionals that failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six import string_types
from ansible.module_utils.netcli import Conditional
from ansible.module_utils.network_common import ComplexList
from ansible.module_utils.eos import run_commands
from ansible.module_utils.eos import eos_argument_spec, check_args
VALID_KEYS = ['command', 'output', 'prompt', 'response']
def to_lines(stdout):
lines = list()
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
lines.append(item)
return lines
def parse_commands(module, warnings):
spec = dict(
command=dict(key=True),
output=dict(),
prompt=dict(),
response=dict()
)
transform = ComplexList(spec, module)
commands = transform(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'Only show commands are supported when using check_mode, not '
'executing %s' % item['command']
)
return commands
def to_cli(obj):
cmd = obj['command']
if obj.get('output') == 'json':
cmd += ' | json'
return cmd
def main():
"""entry point for module execution
"""
argument_spec = dict(
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(eos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
if warnings:
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
try:
conditionals = [Conditional(c) for c in wait_for]
except AttributeError:
exc = get_exception()
module.fail_json(msg=str(exc))
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not be satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'changed': False,
'stdout': responses,
'stdout_lines': to_lines(responses)
})
module.exit_json(**result)
if __name__ == '__main__':
main()
|
busyStone/ardupilot | refs/heads/master | Tools/autotest/common.py | 13 | import util, pexpect, time, math
from pymavlink import mavwp
# a list of pexpect objects to read while waiting for
# messages. This keeps the output to stdout flowing
expect_list = []
def expect_list_clear():
'''clear the expect list'''
global expect_list
for p in expect_list[:]:
expect_list.remove(p)
def expect_list_extend(list):
'''extend the expect list'''
global expect_list
expect_list.extend(list)
def idle_hook(mav):
'''called when waiting for a mavlink message'''
global expect_list
for p in expect_list:
util.pexpect_drain(p)
def message_hook(mav, msg):
'''called as each mavlink msg is received'''
idle_hook(mav)
def expect_callback(e):
'''called when waiting for a expect pattern'''
global expect_list
for p in expect_list:
if p == e:
continue
util.pexpect_drain(p)
def get_distance(loc1, loc2):
'''get ground distance between two locations'''
dlat = loc2.lat - loc1.lat
dlong = loc2.lng - loc1.lng
return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5
def get_bearing(loc1, loc2):
'''get bearing from loc1 to loc2'''
off_x = loc2.lng - loc1.lng
off_y = loc2.lat - loc1.lat
bearing = 90.00 + math.atan2(-off_y, off_x) * 57.2957795
if bearing < 0:
bearing += 360.00
return bearing;
def wait_seconds(mav, seconds_to_wait):
tstart = get_sim_time(mav)
tnow = tstart
while tstart + seconds_to_wait > tnow:
tnow = get_sim_time(mav)
def get_sim_time(mav):
m = mav.recv_match(type='SYSTEM_TIME', blocking=True)
return m.time_boot_ms * 1.0e-3
def wait_altitude(mav, alt_min, alt_max, timeout=30):
climb_rate = 0
previous_alt = 0
'''wait for a given altitude range'''
tstart = get_sim_time(mav)
print("Waiting for altitude between %u and %u" % (alt_min, alt_max))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
climb_rate = m.alt - previous_alt
previous_alt = m.alt
print("Wait Altitude: Cur:%u, min_alt:%u, climb_rate: %u" % (m.alt, alt_min , climb_rate))
if m.alt >= alt_min and m.alt <= alt_max:
print("Altitude OK")
return True
print("Failed to attain altitude range")
return False
def wait_groundspeed(mav, gs_min, gs_max, timeout=30):
'''wait for a given ground speed range'''
tstart = get_sim_time(mav)
print("Waiting for groundspeed between %.1f and %.1f" % (gs_min, gs_max))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("Wait groundspeed %.1f, target:%.1f" % (m.groundspeed, gs_min))
if m.groundspeed >= gs_min and m.groundspeed <= gs_max:
return True
print("Failed to attain groundspeed range")
return False
def wait_roll(mav, roll, accuracy, timeout=30):
'''wait for a given roll in degrees'''
tstart = get_sim_time(mav)
print("Waiting for roll of %d at %s" % (roll, time.ctime()))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
p = math.degrees(m.pitch)
r = math.degrees(m.roll)
print("Roll %d Pitch %d" % (r, p))
if math.fabs(r - roll) <= accuracy:
print("Attained roll %d" % roll)
return True
print("Failed to attain roll %d" % roll)
return False
def wait_pitch(mav, pitch, accuracy, timeout=30):
'''wait for a given pitch in degrees'''
tstart = get_sim_time(mav)
print("Waiting for pitch of %u at %s" % (pitch, time.ctime()))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='ATTITUDE', blocking=True)
p = math.degrees(m.pitch)
r = math.degrees(m.roll)
print("Pitch %d Roll %d" % (p, r))
if math.fabs(p - pitch) <= accuracy:
print("Attained pitch %d" % pitch)
return True
print("Failed to attain pitch %d" % pitch)
return False
def wait_heading(mav, heading, accuracy=5, timeout=30):
'''wait for a given heading'''
tstart = get_sim_time(mav)
print("Waiting for heading %u with accuracy %u" % (heading, accuracy))
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("Heading %u" % m.heading)
if math.fabs(m.heading - heading) <= accuracy:
print("Attained heading %u" % heading)
return True
print("Failed to attain heading %u" % heading)
return False
def wait_distance(mav, distance, accuracy=5, timeout=30):
'''wait for flight of a given distance'''
tstart = get_sim_time(mav)
start = mav.location()
while get_sim_time(mav) < tstart + timeout:
pos = mav.location()
delta = get_distance(start, pos)
print("Distance %.2f meters" % delta)
if math.fabs(delta - distance) <= accuracy:
print("Attained distance %.2f meters OK" % delta)
return True
if delta > (distance + accuracy):
print("Failed distance - overshoot delta=%f distance=%f" % (delta, distance))
return False
print("Failed to attain distance %u" % distance)
return False
def wait_location(mav, loc, accuracy=5, timeout=30, target_altitude=None, height_accuracy=-1):
'''wait for arrival at a location'''
tstart = get_sim_time(mav)
if target_altitude is None:
target_altitude = loc.alt
print("Waiting for location %.4f,%.4f at altitude %.1f height_accuracy=%.1f" % (
loc.lat, loc.lng, target_altitude, height_accuracy))
while get_sim_time(mav) < tstart + timeout:
pos = mav.location()
delta = get_distance(loc, pos)
print("Distance %.2f meters alt %.1f" % (delta, pos.alt))
if delta <= accuracy:
if height_accuracy != -1 and math.fabs(pos.alt - target_altitude) > height_accuracy:
continue
print("Reached location (%.2f meters)" % delta)
return True
print("Failed to attain location")
return False
def wait_waypoint(mav, wpnum_start, wpnum_end, allow_skip=True, max_dist=2, timeout=400, mode=None):
'''wait for waypoint ranges'''
tstart = get_sim_time(mav)
# this message arrives after we set the current WP
start_wp = mav.waypoint_current()
current_wp = start_wp
print("\ntest: wait for waypoint ranges start=%u end=%u\n\n" % (wpnum_start, wpnum_end))
# if start_wp != wpnum_start:
# print("test: Expected start waypoint %u but got %u" % (wpnum_start, start_wp))
# return False
while get_sim_time(mav) < tstart + timeout:
seq = mav.waypoint_current()
m = mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True)
wp_dist = m.wp_dist
m = mav.recv_match(type='VFR_HUD', blocking=True)
# if we exited the required mode, finish
if mode is not None and mav.flightmode != mode:
print('Exited %s mode' % mode)
return True
print("test: WP %u (wp_dist=%u Alt=%d), current_wp: %u, wpnum_end: %u" % (seq, wp_dist, m.alt, current_wp, wpnum_end))
if seq == current_wp+1 or (seq > current_wp+1 and allow_skip):
print("test: Starting new waypoint %u" % seq)
tstart = get_sim_time(mav)
current_wp = seq
# the wp_dist check is a hack until we can sort out the right seqnum
# for end of mission
#if current_wp == wpnum_end or (current_wp == wpnum_end-1 and wp_dist < 2):
if (current_wp == wpnum_end and wp_dist < max_dist):
print("Reached final waypoint %u" % seq)
return True
if (seq >= 255):
print("Reached final waypoint %u" % seq)
return True
if seq > current_wp+1:
print("Failed: Skipped waypoint! Got wp %u expected %u" % (seq, current_wp+1))
return False
print("Failed: Timed out waiting for waypoint %u of %u" % (wpnum_end, wpnum_end))
return False
def save_wp(mavproxy, mav):
mavproxy.send('rc 7 1000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True)
wait_seconds(mav, 1)
mavproxy.send('rc 7 2000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==2000', blocking=True)
wait_seconds(mav, 1)
mavproxy.send('rc 7 1000\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True)
wait_seconds(mav, 1)
def wait_mode(mav, mode, timeout=None):
print("Waiting for mode %s" % mode)
mav.recv_match(condition='MAV.flightmode.upper()=="%s".upper()' % mode, timeout=timeout, blocking=True)
print("Got mode %s" % mode)
return mav.flightmode
def mission_count(filename):
'''load a mission from a file and return number of waypoints'''
wploader = mavwp.MAVWPLoader()
wploader.load(filename)
num_wp = wploader.count()
return num_wp
def sim_location(mav):
'''return current simulator location'''
from pymavlink import mavutil
m = mav.recv_match(type='SIMSTATE', blocking=True)
return mavutil.location(m.lat*1.0e-7, m.lng*1.0e-7, 0, math.degrees(m.yaw))
def log_download(mavproxy, mav, filename, timeout=360):
'''download latest log'''
mavproxy.send("log list\n")
mavproxy.expect("numLogs")
mav.wait_heartbeat()
mav.wait_heartbeat()
mavproxy.send("set shownoise 0\n")
mavproxy.send("log download latest %s\n" % filename)
mavproxy.expect("Finished downloading", timeout=timeout)
mav.wait_heartbeat()
mav.wait_heartbeat()
return True
|
kmod/icbd | refs/heads/master | icbd/type_analyzer/tests/import_test/f.py | 1 | from . import dup as dup1
dup1 # 0 <module 'dup'|num>
import dup as dup2
dup2 # 0 module 'dup'
from .d import e as e1
e1
from d import e as e2
e2
from . import g
xg = g.xg
from .d.e import x as x2
print x2
|
Axam/nsx-web | refs/heads/master | nailgun/nailgun/openstack/common/periodic_task.py | 6 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
from oslo.config import cfg
import six
from nailgun.openstack.common.gettextutils import _ # noqa
from nailgun.openstack.common import log as logging
from nailgun.openstack.common import timeutils
periodic_opts = [
cfg.BoolOpt('run_external_periodic_tasks',
default=True,
help=('Some periodic tasks can be run in a separate process. '
'Should we run them here?')),
]
CONF = cfg.CONF
CONF.register_opts(periodic_opts)
LOG = logging.getLogger(__name__)
DEFAULT_INTERVAL = 60.0
class InvalidPeriodicTaskArg(Exception):
message = _("Unexpected argument for periodic task creation: %(arg)s.")
def periodic_task(*args, **kwargs):
"""Decorator to indicate that a method is a periodic task.
This decorator can be used in two ways:
1. Without arguments '@periodic_task', this will be run on every cycle
of the periodic scheduler.
2. With arguments:
@periodic_task(spacing=N [, run_immediately=[True|False]])
this will be run on approximately every N seconds. If this number is
negative the periodic task will be disabled. If the run_immediately
argument is provided and has a value of 'True', the first run of the
task will be shortly after task scheduler starts. If
run_immediately is omitted or set to 'False', the first time the
task runs will be approximately N seconds after the task scheduler
starts.
"""
def decorator(f):
# Test for old style invocation
if 'ticks_between_runs' in kwargs:
raise InvalidPeriodicTaskArg(arg='ticks_between_runs')
# Control if run at all
f._periodic_task = True
f._periodic_external_ok = kwargs.pop('external_process_ok', False)
if f._periodic_external_ok and not CONF.run_external_periodic_tasks:
f._periodic_enabled = False
else:
f._periodic_enabled = kwargs.pop('enabled', True)
# Control frequency
f._periodic_spacing = kwargs.pop('spacing', 0)
f._periodic_immediate = kwargs.pop('run_immediately', False)
if f._periodic_immediate:
f._periodic_last_run = None
else:
f._periodic_last_run = timeutils.utcnow()
return f
# NOTE(sirp): The `if` is necessary to allow the decorator to be used with
# and without parens.
#
# In the 'with-parens' case (with kwargs present), this function needs to
# return a decorator function since the interpreter will invoke it like:
#
# periodic_task(*args, **kwargs)(f)
#
# In the 'without-parens' case, the original function will be passed
# in as the first argument, like:
#
# periodic_task(f)
if kwargs:
return decorator
else:
return decorator(args[0])
class _PeriodicTasksMeta(type):
def __init__(cls, names, bases, dict_):
"""Metaclass that allows us to collect decorated periodic tasks."""
super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_)
# NOTE(sirp): if the attribute is not present then we must be the base
# class, so, go ahead an initialize it. If the attribute is present,
# then we're a subclass so make a copy of it so we don't step on our
# parent's toes.
try:
cls._periodic_tasks = cls._periodic_tasks[:]
except AttributeError:
cls._periodic_tasks = []
try:
cls._periodic_last_run = cls._periodic_last_run.copy()
except AttributeError:
cls._periodic_last_run = {}
try:
cls._periodic_spacing = cls._periodic_spacing.copy()
except AttributeError:
cls._periodic_spacing = {}
for value in cls.__dict__.values():
if getattr(value, '_periodic_task', False):
task = value
name = task.__name__
if task._periodic_spacing < 0:
LOG.info(_('Skipping periodic task %(task)s because '
'its interval is negative'),
{'task': name})
continue
if not task._periodic_enabled:
LOG.info(_('Skipping periodic task %(task)s because '
'it is disabled'),
{'task': name})
continue
# A periodic spacing of zero indicates that this task should
# be run every pass
if task._periodic_spacing == 0:
task._periodic_spacing = None
cls._periodic_tasks.append((name, task))
cls._periodic_spacing[name] = task._periodic_spacing
cls._periodic_last_run[name] = task._periodic_last_run
@six.add_metaclass(_PeriodicTasksMeta)
class PeriodicTasks(object):
def run_periodic_tasks(self, context, raise_on_error=False):
"""Tasks to be run at a periodic interval."""
idle_for = DEFAULT_INTERVAL
for task_name, task in self._periodic_tasks:
full_task_name = '.'.join([self.__class__.__name__, task_name])
now = timeutils.utcnow()
spacing = self._periodic_spacing[task_name]
last_run = self._periodic_last_run[task_name]
# If a periodic task is _nearly_ due, then we'll run it early
if spacing is not None and last_run is not None:
due = last_run + datetime.timedelta(seconds=spacing)
if not timeutils.is_soon(due, 0.2):
idle_for = min(idle_for, timeutils.delta_seconds(now, due))
continue
if spacing is not None:
idle_for = min(idle_for, spacing)
LOG.debug(_("Running periodic task %(full_task_name)s"),
{"full_task_name": full_task_name})
self._periodic_last_run[task_name] = timeutils.utcnow()
try:
task(self, context)
except Exception as e:
if raise_on_error:
raise
LOG.exception(_("Error during %(full_task_name)s: %(e)s"),
{"full_task_name": full_task_name, "e": e})
time.sleep(0)
return idle_for
|
kenshay/ImageScripter | refs/heads/master | ProgramData/SystemFiles/Python/Lib/lib2to3/tests/test_pytree.py | 48 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Unit tests for pytree.py.
NOTE: Please *don't* add doc strings to individual test methods!
In verbose mode, printing of the module, class and method name is much
more helpful than printing of (the first line of) the docstring,
especially when debugging a test.
"""
from __future__ import with_statement
import sys
import warnings
# Testing imports
from . import support
from lib2to3 import pytree
try:
sorted
except NameError:
def sorted(lst):
l = list(lst)
l.sort()
return l
class TestNodes(support.TestCase):
"""Unit tests for nodes (Base, Leaf, Node)."""
if sys.version_info >= (2,6):
# warnings.catch_warnings is new in 2.6.
def test_deprecated_prefix_methods(self):
l = pytree.Leaf(100, "foo")
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DeprecationWarning)
self.assertEqual(l.get_prefix(), "")
l.set_prefix("hi")
self.assertEqual(l.prefix, "hi")
self.assertEqual(len(w), 2)
for warning in w:
self.assertTrue(warning.category is DeprecationWarning)
self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
"use the prefix property")
self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
"use the prefix property")
def test_instantiate_base(self):
if __debug__:
# Test that instantiating Base() raises an AssertionError
self.assertRaises(AssertionError, pytree.Base)
def test_leaf(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.type, 100)
self.assertEqual(l1.value, "foo")
def test_leaf_repr(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(repr(l1), "Leaf(100, 'foo')")
def test_leaf_str(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(str(l1), "foo")
l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1)))
self.assertEqual(str(l2), " foo")
def test_leaf_str_numeric_value(self):
# Make sure that the Leaf's value is stringified. Failing to
# do this can cause a TypeError in certain situations.
l1 = pytree.Leaf(2, 5)
l1.prefix = "foo_"
self.assertEqual(str(l1), "foo_5")
def test_leaf_equality(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo", context=(" ", (1, 0)))
self.assertEqual(l1, l2)
l3 = pytree.Leaf(101, "foo")
l4 = pytree.Leaf(100, "bar")
self.assertNotEqual(l1, l3)
self.assertNotEqual(l1, l4)
def test_leaf_prefix(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.prefix, "")
self.assertFalse(l1.was_changed)
l1.prefix = " ##\n\n"
self.assertEqual(l1.prefix, " ##\n\n")
self.assertTrue(l1.was_changed)
def test_node(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(200, "bar")
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(n1.type, 1000)
self.assertEqual(n1.children, [l1, l2])
def test_node_repr(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(repr(n1),
"Node(1000, [%s, %s])" % (repr(l1), repr(l2)))
def test_node_str(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(str(n1), "foo bar")
def test_node_prefix(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.prefix, "")
n1 = pytree.Node(1000, [l1])
self.assertEqual(n1.prefix, "")
n1.prefix = " "
self.assertEqual(n1.prefix, " ")
self.assertEqual(l1.prefix, " ")
def test_get_suffix(self):
l1 = pytree.Leaf(100, "foo", prefix="a")
l2 = pytree.Leaf(100, "bar", prefix="b")
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(l1.get_suffix(), l2.prefix)
self.assertEqual(l2.get_suffix(), "")
self.assertEqual(n1.get_suffix(), "")
l3 = pytree.Leaf(100, "bar", prefix="c")
n2 = pytree.Node(1000, [n1, l3])
self.assertEqual(n1.get_suffix(), l3.prefix)
self.assertEqual(l3.get_suffix(), "")
self.assertEqual(n2.get_suffix(), "")
def test_node_equality(self):
n1 = pytree.Node(1000, ())
n2 = pytree.Node(1000, [], context=(" ", (1, 0)))
self.assertEqual(n1, n2)
n3 = pytree.Node(1001, ())
self.assertNotEqual(n1, n3)
def test_node_recursive_equality(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
n2 = pytree.Node(1000, [l2])
self.assertEqual(n1, n2)
l3 = pytree.Leaf(100, "bar")
n3 = pytree.Node(1000, [l3])
self.assertNotEqual(n1, n3)
def test_replace(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
self.assertEqual(n1.children, [l1, l2, l3])
self.assertIsInstance(n1.children, list)
self.assertFalse(n1.was_changed)
l2new = pytree.Leaf(100, "-")
l2.replace(l2new)
self.assertEqual(n1.children, [l1, l2new, l3])
self.assertIsInstance(n1.children, list)
self.assertTrue(n1.was_changed)
def test_replace_with_list(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")])
self.assertEqual(str(n1), "foo**bar")
self.assertIsInstance(n1.children, list)
def test_leaves(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [l3])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(list(n1.leaves()), [l1, l2, l3])
def test_depth(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(l1.depth(), 2)
self.assertEqual(n3.depth(), 1)
self.assertEqual(n1.depth(), 0)
def test_post_order(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
c1 = pytree.Node(1000, [l1, l2])
n1 = pytree.Node(1000, [c1, l3])
self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1])
def test_pre_order(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
c1 = pytree.Node(1000, [l1, l2])
n1 = pytree.Node(1000, [c1, l3])
self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3])
def test_changed(self):
l1 = pytree.Leaf(100, "f")
self.assertFalse(l1.was_changed)
l1.changed()
self.assertTrue(l1.was_changed)
l1 = pytree.Leaf(100, "f")
n1 = pytree.Node(1000, [l1])
self.assertFalse(n1.was_changed)
n1.changed()
self.assertTrue(n1.was_changed)
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
n2 = pytree.Node(1000, [n1])
self.assertFalse(l1.was_changed)
self.assertFalse(n1.was_changed)
self.assertFalse(n2.was_changed)
n1.changed()
self.assertTrue(n1.was_changed)
self.assertTrue(n2.was_changed)
self.assertFalse(l1.was_changed)
def test_leaf_constructor_prefix(self):
for prefix in ("xyz_", ""):
l1 = pytree.Leaf(100, "self", prefix=prefix)
self.assertTrue(str(l1), prefix + "self")
self.assertEqual(l1.prefix, prefix)
def test_node_constructor_prefix(self):
for prefix in ("xyz_", ""):
l1 = pytree.Leaf(100, "self")
l2 = pytree.Leaf(100, "foo", prefix="_")
n1 = pytree.Node(1000, [l1, l2], prefix=prefix)
self.assertTrue(str(n1), prefix + "self_foo")
self.assertEqual(n1.prefix, prefix)
self.assertEqual(l1.prefix, prefix)
self.assertEqual(l2.prefix, "_")
def test_remove(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [n1])
self.assertEqual(n1.remove(), 0)
self.assertEqual(n2.children, [])
self.assertEqual(l1.parent, n1)
self.assertEqual(n1.parent, None)
self.assertEqual(n2.parent, None)
self.assertFalse(n1.was_changed)
self.assertTrue(n2.was_changed)
self.assertEqual(l2.remove(), 1)
self.assertEqual(l1.remove(), 0)
self.assertEqual(n1.children, [])
self.assertEqual(l1.parent, None)
self.assertEqual(n1.parent, None)
self.assertEqual(n2.parent, None)
self.assertTrue(n1.was_changed)
self.assertTrue(n2.was_changed)
def test_remove_parentless(self):
n1 = pytree.Node(1000, [])
n1.remove()
self.assertEqual(n1.parent, None)
l1 = pytree.Leaf(100, "foo")
l1.remove()
self.assertEqual(l1.parent, None)
def test_node_set_child(self):
l1 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
l2 = pytree.Leaf(100, "bar")
n1.set_child(0, l2)
self.assertEqual(l1.parent, None)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l2])
n2 = pytree.Node(1000, [l1])
n2.set_child(0, n1)
self.assertEqual(l1.parent, None)
self.assertEqual(n1.parent, n2)
self.assertEqual(n2.parent, None)
self.assertEqual(n2.children, [n1])
self.assertRaises(IndexError, n1.set_child, 4, l2)
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.set_child, 0, list)
def test_node_insert_child(self):
l1 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
l2 = pytree.Leaf(100, "bar")
n1.insert_child(0, l2)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l2, l1])
l3 = pytree.Leaf(100, "abc")
n1.insert_child(2, l3)
self.assertEqual(n1.children, [l2, l1, l3])
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.insert_child, 0, list)
def test_node_append_child(self):
n1 = pytree.Node(1000, [])
l1 = pytree.Leaf(100, "foo")
n1.append_child(l1)
self.assertEqual(l1.parent, n1)
self.assertEqual(n1.children, [l1])
l2 = pytree.Leaf(100, "bar")
n1.append_child(l2)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l1, l2])
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.append_child, list)
def test_node_next_sibling(self):
n1 = pytree.Node(1000, [])
n2 = pytree.Node(1000, [])
p1 = pytree.Node(1000, [n1, n2])
self.assertIs(n1.next_sibling, n2)
self.assertEqual(n2.next_sibling, None)
self.assertEqual(p1.next_sibling, None)
def test_leaf_next_sibling(self):
l1 = pytree.Leaf(100, "a")
l2 = pytree.Leaf(100, "b")
p1 = pytree.Node(1000, [l1, l2])
self.assertIs(l1.next_sibling, l2)
self.assertEqual(l2.next_sibling, None)
self.assertEqual(p1.next_sibling, None)
def test_node_prev_sibling(self):
n1 = pytree.Node(1000, [])
n2 = pytree.Node(1000, [])
p1 = pytree.Node(1000, [n1, n2])
self.assertIs(n2.prev_sibling, n1)
self.assertEqual(n1.prev_sibling, None)
self.assertEqual(p1.prev_sibling, None)
def test_leaf_prev_sibling(self):
l1 = pytree.Leaf(100, "a")
l2 = pytree.Leaf(100, "b")
p1 = pytree.Node(1000, [l1, l2])
self.assertIs(l2.prev_sibling, l1)
self.assertEqual(l1.prev_sibling, None)
self.assertEqual(p1.prev_sibling, None)
class TestPatterns(support.TestCase):
"""Unit tests for tree matching patterns."""
def test_basic_patterns(self):
# Build a tree
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [l3])
root = pytree.Node(1000, [n1, n2])
# Build a pattern matching a leaf
pl = pytree.LeafPattern(100, "foo", name="pl")
r = {}
self.assertFalse(pl.match(root, results=r))
self.assertEqual(r, {})
self.assertFalse(pl.match(n1, results=r))
self.assertEqual(r, {})
self.assertFalse(pl.match(n2, results=r))
self.assertEqual(r, {})
self.assertTrue(pl.match(l1, results=r))
self.assertEqual(r, {"pl": l1})
r = {}
self.assertFalse(pl.match(l2, results=r))
self.assertEqual(r, {})
# Build a pattern matching a node
pn = pytree.NodePattern(1000, [pl], name="pn")
self.assertFalse(pn.match(root, results=r))
self.assertEqual(r, {})
self.assertFalse(pn.match(n1, results=r))
self.assertEqual(r, {})
self.assertTrue(pn.match(n2, results=r))
self.assertEqual(r, {"pn": n2, "pl": l3})
r = {}
self.assertFalse(pn.match(l1, results=r))
self.assertEqual(r, {})
self.assertFalse(pn.match(l2, results=r))
self.assertEqual(r, {})
def test_wildcard(self):
# Build a tree for testing
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [l3])
root = pytree.Node(1000, [n1, n2])
# Build a pattern
pl = pytree.LeafPattern(100, "foo", name="pl")
pn = pytree.NodePattern(1000, [pl], name="pn")
pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw")
r = {}
self.assertFalse(pw.match_seq([root], r))
self.assertEqual(r, {})
self.assertFalse(pw.match_seq([n1], r))
self.assertEqual(r, {})
self.assertTrue(pw.match_seq([n2], r))
# These are easier to debug
self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"])
self.assertEqual(r["pl"], l1)
self.assertEqual(r["pn"], n2)
self.assertEqual(r["pw"], [n2])
# But this is equivalent
self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]})
r = {}
self.assertTrue(pw.match_seq([l1, l3], r))
self.assertEqual(r, {"pl": l3, "pw": [l1, l3]})
self.assertIs(r["pl"], l3)
r = {}
def test_generate_matches(self):
la = pytree.Leaf(1, "a")
lb = pytree.Leaf(1, "b")
lc = pytree.Leaf(1, "c")
ld = pytree.Leaf(1, "d")
le = pytree.Leaf(1, "e")
lf = pytree.Leaf(1, "f")
leaves = [la, lb, lc, ld, le, lf]
root = pytree.Node(1000, leaves)
pa = pytree.LeafPattern(1, "a", "pa")
pb = pytree.LeafPattern(1, "b", "pb")
pc = pytree.LeafPattern(1, "c", "pc")
pd = pytree.LeafPattern(1, "d", "pd")
pe = pytree.LeafPattern(1, "e", "pe")
pf = pytree.LeafPattern(1, "f", "pf")
pw = pytree.WildcardPattern([[pa, pb, pc], [pd, pe],
[pa, pb], [pc, pd], [pe, pf]],
min=1, max=4, name="pw")
self.assertEqual([x[0] for x in pw.generate_matches(leaves)],
[3, 5, 2, 4, 6])
pr = pytree.NodePattern(type=1000, content=[pw], name="pr")
matches = list(pytree.generate_matches([pr], [root]))
self.assertEqual(len(matches), 1)
c, r = matches[0]
self.assertEqual(c, 1)
self.assertEqual(str(r["pr"]), "abcdef")
self.assertEqual(r["pw"], [la, lb, lc, ld, le, lf])
for c in "abcdef":
self.assertEqual(r["p" + c], pytree.Leaf(1, c))
def test_has_key_example(self):
pattern = pytree.NodePattern(331,
(pytree.LeafPattern(7),
pytree.WildcardPattern(name="args"),
pytree.LeafPattern(8)))
l1 = pytree.Leaf(7, "(")
l2 = pytree.Leaf(3, "x")
l3 = pytree.Leaf(8, ")")
node = pytree.Node(331, [l1, l2, l3])
r = {}
self.assertTrue(pattern.match(node, r))
self.assertEqual(r["args"], [l2])
|
pdxjohnny/stratus | refs/heads/master | stratus/__main__.py | 1 | """
Stratus
Facilitates connections
"""
import sys
import time
import json
import argparse
import subprocess
import stratus
import service
import client
import server
import constants
PROMPT = ":\r"
AUTH_USER = False
AUTH_PASS = False
__server_process__ = False
__client_conn__ = False
def print_disconnect(client):
print(client)
def print_recv(data):
sys.stdout.write(data["from"] + ": " + str(data["data"]) + "\r\n")
sys.stdout.write(PROMPT)
def shell(data):
sys.stdout.write(data["from"] + ": " + data["data"] + "\r\n")
output = subprocess.check_output(data["data"], shell=True)
sys.stdout.write(output + "\r\n")
sys.stdout.write(PROMPT)
__client_conn__.send(output, to=data["from"])
def auth(username, password):
if username == AUTH_USER and password == AUTH_PASS:
return True
return False
def master(args):
global __server_process__
__server_process__ = stratus.stratus()
if "username" in args and "password" in args:
global AUTH_USER
global AUTH_PASS
AUTH_USER = args["username"]
AUTH_PASS = args["password"]
del args["username"]
del args["password"]
__server_process__.auth = auth
__server_process__.disconnect = print_disconnect
__server_process__.start(**args)
__server_process__.recv = getattr(sys.modules[__name__], args["recv"])
while True:
sys.stdout.write(PROMPT)
data = sys.stdin.readline()
if len(data) > 1:
data = data[:-1]
if data == "exit":
__server_process__.stop()
sys.exit(0)
if data.startswith("info"):
data = data[5:]
__server_process__.info(data)
else:
__server_process__.send(data)
def start(args):
global __server_process__
del args["recv"]
__server_process__ = server.server()
if "username" in args and "password" in args:
global AUTH_USER
global AUTH_PASS
AUTH_USER = args["username"]
AUTH_PASS = args["password"]
del args["username"]
del args["password"]
__server_process__.auth = auth
__server_process__.disconnect = print_disconnect
__server_process__.start(**args)
sys.stdout.write("Server listening\r\n")
while True:
time.sleep(300)
def connect(args):
global __client_conn__
recv_function = args["recv"]
del args["recv"]
__client_conn__ = client.client()
__client_conn__.connect(**args)
__client_conn__.recv = getattr(sys.modules[__name__], recv_function)
while True:
sys.stdout.write(PROMPT)
data = sys.stdin.readline()
if len(data) > 1:
data = data[:-1]
if data == "exit":
sys.exit(0)
if data.startswith("info"):
data = data[5:]
__client_conn__.info(data)
else:
__client_conn__.send(data)
def arg_setup():
arg_parser = argparse.ArgumentParser(description=constants.__description__)
arg_parser.add_argument("action", type=unicode, \
help="Start server or connect to server (start, connect, master)")
arg_parser.add_argument("--host", "-a", type=unicode, \
help="Address of host server")
arg_parser.add_argument("--port", type=int, \
help="Port to host or connect to stratus server")
arg_parser.add_argument("--key", type=unicode, \
help="Key file to use")
arg_parser.add_argument("--crt", type=unicode, \
help="Cert file to use")
arg_parser.add_argument("--name", "-n", type=unicode, \
help="Name to identify client by other than hostname")
arg_parser.add_argument("--username", "-u", type=unicode, \
help="Username to connect to stratus server")
arg_parser.add_argument("--password", "-p", type=unicode, \
help="Password to connect to stratus server")
arg_parser.add_argument("--ssl", action='store_true', default=False, \
help="Connect to the server with ssl")
arg_parser.add_argument("--recv", "-r", type=unicode, \
default="print_recv", \
help="Function to exicute on recive data (print_recv, shell)")
arg_parser.add_argument("--version", "-v", action="version", \
version=u"stratus " + unicode(constants.__version__) )
initial = vars(arg_parser.parse_args())
args = {}
for arg in initial:
if initial[arg]:
args[arg] = initial[arg]
return args
def main():
print (constants.__logo__)
args = arg_setup()
# Get the action
action = getattr(sys.modules[__name__], args["action"])
del args["action"]
action(args)
return 0
if __name__ == '__main__':
main()
|
jeffery-do/Vizdoombot | refs/heads/master | doom/lib/python3.5/site-packages/theano/tensor/nnet/tests/test_corr.py | 3 | from nose.plugins.skip import SkipTest
from nose.plugins.attrib import attr
import numpy
from six import integer_types
import theano
import theano.tensor as T
from theano.tests import unittest_tools as utt
from theano.tensor.nnet import corr, conv
from theano.tensor.basic import _allclose
class TestCorr2D(utt.InferShapeTester):
if theano.config.mode == "FAST_COMPILE":
mode = theano.compile.get_mode("FAST_RUN")
else:
mode = None
dtype = theano.config.floatX
def setUp(self):
super(TestCorr2D, self).setUp()
self.input = T.tensor4('input', dtype=self.dtype)
self.input.name = 'default_V'
self.filters = T.tensor4('filters', dtype=self.dtype)
self.filters.name = 'default_filters'
if not conv.imported_scipy_signal and theano.config.cxx == "":
raise SkipTest("CorrMM tests need SciPy or a c++ compiler")
if not theano.config.blas.ldflags:
raise SkipTest("CorrMM tests need a BLAS")
def validate(self, image_shape, filter_shape,
border_mode='valid', subsample=(1, 1),
input=None, filters=None,
verify_grad=True, non_contiguous=False):
"""
:param image_shape: The constant shape info passed to corrMM.
:param filter_shape: The constant shape info passed to corrMM.
"""
N_image_shape = [T.get_scalar_constant_value(T.as_tensor_variable(x))
for x in image_shape]
N_filter_shape = [T.get_scalar_constant_value(T.as_tensor_variable(x))
for x in filter_shape]
if input is None:
input = self.input
if filters is None:
filters = self.filters
# THEANO IMPLEMENTATION
# we create a symbolic function so that verify_grad can work
def sym_CorrMM(input, filters):
# define theano graph and function
input.name = 'input'
filters.name = 'filters'
rval = corr.CorrMM(border_mode, subsample)(input, filters)
rval.name = 'corr_output'
return rval
output = sym_CorrMM(input, filters)
output.name = 'CorrMM()(%s,%s)' % (input.name, filters.name)
theano_corr = theano.function([input, filters], output, mode=self.mode)
# initialize input and compute result
image_data = numpy.random.random(N_image_shape).astype(self.dtype)
filter_data = numpy.random.random(N_filter_shape).astype(self.dtype)
if non_contiguous:
image_data = numpy.transpose(image_data, axes=(0, 1, 3, 2))
image_data = image_data.copy()
image_data = numpy.transpose(image_data, axes=(0, 1, 3, 2))
filter_data = numpy.transpose(filter_data, axes=(0, 1, 3, 2))
filter_data = filter_data.copy()
filter_data = numpy.transpose(filter_data, axes=(0, 1, 3, 2))
assert not image_data.flags['CONTIGUOUS']
assert not filter_data.flags['CONTIGUOUS']
theano_output = theano_corr(image_data, filter_data)
# REFERENCE IMPLEMENTATION
# Testing correlation, not convolution. Reverse filters.
filter_data_corr = numpy.array(filter_data[:, :, ::-1, ::-1],
copy=True,
order='C')
orig_image_data = image_data
img_shape2d = numpy.array(N_image_shape[-2:])
fil_shape2d = numpy.array(N_filter_shape[-2:])
subsample2d = numpy.array(subsample)
if border_mode == 'full':
padHW = (fil_shape2d - 1)
elif border_mode == 'valid':
padHW = numpy.array([0, 0])
elif border_mode == 'half':
padHW = numpy.floor(fil_shape2d / 2).astype('int32')
elif isinstance(border_mode, tuple):
padHW = numpy.array(border_mode)
elif isinstance(border_mode, integer_types):
padHW = numpy.array([border_mode, border_mode])
else:
raise NotImplementedError('Unsupported border_mode {}'.format(border_mode))
out_shape2d = numpy.floor((img_shape2d + 2 * (padHW) - fil_shape2d) / subsample2d) + 1
# avoid numpy deprecation
out_shape2d = out_shape2d.astype('int32')
out_shape = (N_image_shape[0], N_filter_shape[0]) + tuple(out_shape2d)
ref_output = numpy.zeros(out_shape)
# loop over output feature maps
ref_output.fill(0)
image_data2 = numpy.zeros((N_image_shape[0], N_image_shape[1],
N_image_shape[2] + 2 * padHW[0],
N_image_shape[3] + 2 * padHW[1]))
image_data2[:, :, padHW[0]:padHW[0] + N_image_shape[2],
padHW[1]:padHW[1] + N_image_shape[3]] = image_data
image_data = image_data2
N_image_shape = image_data.shape
for bb in range(N_image_shape[0]):
for nn in range(N_filter_shape[0]):
for im0 in range(N_image_shape[1]):
filter2d = filter_data_corr[nn, im0, :, :]
image2d = image_data[bb, im0, :, :]
for row in range(ref_output.shape[2]):
irow = row * subsample[0] # image row
for col in range(ref_output.shape[3]):
icol = col * subsample[1] # image col
ref_output[bb, nn, row, col] += (image2d[
irow:irow + N_filter_shape[2],
icol:icol + N_filter_shape[3]] * filter2d[::-1, ::-1]
).sum()
self.assertTrue(_allclose(theano_output, ref_output))
# TEST GRADIENT
if verify_grad:
utt.verify_grad(sym_CorrMM, [orig_image_data, filter_data])
@attr('slow')
def test_basic(self):
"""
Tests that basic correlations work for odd and even
dimensions of image and filter shapes, as well as rectangular
images and filters.
"""
border_modes = ['valid', 'full', 'half', (1, 1), (2, 1), (1, 2),
(3, 3), 1]
img_shapes = [(2, 2, 3, 3), (3, 2, 8, 8), (3, 2, 7, 5), (3, 2, 7, 5),
(3, 2, 8, 8), (3, 2, 7, 5)]
fil_shapes = [(2, 2, 2, 2), (4, 2, 5, 5), (5, 2, 2, 3), (5, 2, 3, 2),
(4, 2, 5, 5), (5, 2, 2, 3)]
for border_mode in border_modes:
for img, fil in zip(img_shapes, fil_shapes):
self.validate(img, fil, border_mode, verify_grad=False)
# Very slow on with 'full' or 'half'
self.validate((1, 10, 213, 129), (46, 10, 212, 1), 'valid', verify_grad=False)
def test_img_kernel_same_shape(self):
self.validate((3, 2, 3, 3), (4, 2, 3, 3), 'full')
self.validate((3, 2, 3, 3), (4, 2, 3, 3), 'valid')
self.validate((3, 2, 3, 3), (4, 2, 3, 3), 'half')
self.validate((3, 2, 3, 3), (4, 2, 3, 3), (1, 1))
self.validate((3, 2, 3, 3), (4, 2, 3, 3), 1)
@attr('slow')
def test_subsample(self):
"""
Tests correlation where subsampling != (1,1)
"""
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'valid', subsample=(2, 2))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'valid', subsample=(2, 1))
self.validate((1, 1, 6, 6), (1, 1, 3, 3), 'valid', subsample=(3, 3))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'full', subsample=(2, 2))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'full', subsample=(2, 1))
self.validate((1, 1, 6, 6), (1, 1, 3, 3), 'full', subsample=(3, 3))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'half', subsample=(2, 2))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'half', subsample=(2, 1))
self.validate((1, 1, 6, 6), (1, 1, 3, 3), 'half', subsample=(3, 3))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), (1, 1), subsample=(2, 2))
self.validate((3, 2, 7, 5), (5, 2, 2, 3), (2, 1), subsample=(2, 1))
self.validate((1, 1, 6, 6), (1, 1, 3, 3), (1, 2), subsample=(3, 3))
self.validate((1, 1, 6, 6), (1, 1, 3, 3), 1, subsample=(3, 3))
@attr('slow')
def test_shape_Constant_tensor(self):
"""
Tests correlation where the {image,filter}_shape is a Constant tensor.
"""
as_t = T.as_tensor_variable
border_modes = ['valid', 'full', 'half', (1, 1), (2, 1), (1, 2), (3, 3), 1]
for border_mode in border_modes:
self.validate((as_t(3), as_t(2), as_t(7), as_t(5)),
(5, 2, 2, 3), border_mode)
self.validate(as_t([3, 2, 7, 5]), (5, 2, 2, 3), border_mode)
self.validate(as_t((3, 2, 7, 5)), (5, 2, 2, 3), border_mode)
self.validate((3, 2, 7, 5), (as_t(5), as_t(2), as_t(2),
as_t(3)), 'valid')
self.validate((3, 2, 7, 5), as_t([5, 2, 2, 3]), border_mode)
self.validate(as_t([3, 2, 7, 5]), as_t([5, 2, 2, 3]), border_mode)
def test_invalid_filter_shape(self):
"""
Tests scenario where filter_shape[1] != input_shape[1]
"""
self.assertRaises(ValueError, self.validate,
(3, 2, 8, 8), (4, 3, 5, 5),
'valid')
def test_full_mode(self):
"""
Tests basic correlation in full mode and case where filter
is larger than the input image.
"""
self.validate((3, 2, 5, 5), (4, 2, 8, 8), 'full')
def f():
self.validate((3, 2, 5, 5), (4, 2, 8, 8), 'valid')
self.assertRaises(Exception, f)
def test_wrong_input(self):
"""
Make sure errors are raised when image and kernel are not 4D tensors
"""
self.assertRaises(Exception, self.validate, (3, 2, 8, 8), (4, 2, 5, 5),
'valid', input=T.dmatrix())
self.assertRaises(Exception, self.validate, (3, 2, 8, 8), (4, 2, 5, 5),
'valid', filters=T.dvector())
self.assertRaises(Exception, self.validate, (3, 2, 8, 8), (4, 2, 5, 5),
'valid', input=T.dtensor3())
@attr('slow')
def test_infer_shape_forward(self):
def rand(*shape):
r = numpy.asarray(numpy.random.rand(*shape), dtype='float64')
return r * 2 - 1
corrMM = corr.CorrMM
adtens = T.dtensor4()
bdtens = T.dtensor4()
aivec_vals = [[4, 5, 6, 3], [6, 2, 8, 3], [3, 6, 7, 5],
[3, 6, 7, 5], [5, 2, 4, 3]]
bivec_vals = [[7, 5, 3, 2], [4, 2, 5, 3], [5, 6, 3, 2],
[5, 6, 2, 3], [6, 2, 4, 3]]
modes = ['valid', 'full', 'half', (1, 1), (2, 1), (1, 2), 1]
subsamples = [(1, 1), (2, 1), (1, 2)]
for aivec_val, bivec_val in zip(aivec_vals, bivec_vals):
adtens_val = rand(*aivec_val)
bdtens_val = rand(*bivec_val)
for mode in modes:
for subsample in subsamples:
# CorrMM
cdtens = corrMM(border_mode=mode, subsample=subsample)(adtens, bdtens)
self._compile_and_check([adtens, bdtens],
[cdtens],
[adtens_val, bdtens_val], corrMM,
warn=False)
@attr('slow')
def test_infer_shape_gradW(self):
def rand(*shape):
r = numpy.asarray(numpy.random.rand(*shape), dtype='float64')
return r * 2 - 1
corrMM = corr.CorrMM
gradW = corr.CorrMM_gradWeights
adtens = T.dtensor4()
bdtens = T.dtensor4()
aivec_vals = [[1, 5, 6, 3], [8, 2, 7, 3], [1, 6, 9, 4],
[9, 6, 8, 5], [9, 1, 6, 8]]
bivec_vals = [[7, 5, 3, 1], [4, 2, 5, 3], [12, 6, 3, 2],
[5, 6, 1, 3], [11, 1, 3, 3]]
modes = ['valid', 'full', 'half', (1, 1), (2, 1), (1, 2), 1]
subsamples = [(1, 1), (2, 1), (1, 2)]
for aivec_val, bivec_val in zip(aivec_vals, bivec_vals):
adtens_val = rand(*aivec_val)
bdtens_val = rand(*bivec_val)
for mode in modes:
for subsample in subsamples:
# CorrMM
cdtens = corrMM(border_mode=mode, subsample=subsample)(adtens, bdtens)
f = theano.function([adtens, bdtens], cdtens)
cdtens_val = f(adtens_val, bdtens_val)
# CorrMM_gradWeights
shape = (theano.shared(bivec_val[2]), theano.shared(bivec_val[3]))
bdtens_g = gradW(border_mode=mode,
subsample=subsample)(adtens, cdtens, shape=shape)
self._compile_and_check([adtens, cdtens],
[bdtens_g],
[adtens_val, cdtens_val], gradW,
warn=False)
@attr('slow')
def test_infer_shape_gradI(self):
def rand(*shape):
r = numpy.asarray(numpy.random.rand(*shape), dtype='float64')
return r * 2 - 1
corrMM = corr.CorrMM
gradI = corr.CorrMM_gradInputs
adtens = T.dtensor4()
bdtens = T.dtensor4()
aivec_vals = [[1, 5, 6, 3], [8, 2, 7, 3], [1, 6, 9, 4],
[9, 6, 8, 5], [9, 1, 6, 8]]
bivec_vals = [[7, 5, 3, 1], [4, 2, 5, 3], [12, 6, 3, 2],
[5, 6, 1, 3], [7, 1, 3, 4]]
modes = ['valid', 'full', 'half', (1, 1), (2, 1), (1, 2), 1]
subsamples = [(1, 1), (2, 1), (1, 2)]
for aivec_val, bivec_val in zip(aivec_vals, bivec_vals):
adtens_val = rand(*aivec_val)
bdtens_val = rand(*bivec_val)
for mode in modes:
for subsample in subsamples:
# CorrMM
cdtens = corrMM(border_mode=mode, subsample=subsample)(adtens, bdtens)
f = theano.function([adtens, bdtens], cdtens)
cdtens_val = f(adtens_val, bdtens_val)
# CorrMM_gradInputs
shape = (theano.shared(aivec_val[2]), theano.shared(aivec_val[3]))
adtens_g = gradI(border_mode=mode,
subsample=subsample)(bdtens, cdtens, shape=shape)
self._compile_and_check([bdtens, cdtens],
[adtens_g],
[bdtens_val, cdtens_val], gradI,
warn=False)
def test_non_contiguous(self):
self.validate((2, 2, 3, 3), (2, 2, 2, 2), 'valid', non_contiguous=True)
self.validate((3, 2, 8, 8), (4, 2, 5, 5), 'valid', non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'valid', non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 3, 2), 'valid', non_contiguous=True)
self.validate((3, 2, 8, 8), (4, 2, 5, 5), 'full', non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'full', non_contiguous=True)
self.validate((3, 2, 8, 8), (4, 2, 5, 5), 'half', non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 'half', non_contiguous=True)
self.validate((3, 2, 8, 8), (4, 2, 5, 5), (1, 1), non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 2, 3), (1, 2), non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 2, 3), (2, 1), non_contiguous=True)
self.validate((3, 2, 7, 5), (5, 2, 2, 3), 2, non_contiguous=True)
if __name__ == '__main__':
t = TestCorr2D('setUp')
t.setUp()
t.test_infer_shape()
|
Rudloff/youtube-dl | refs/heads/master | youtube_dl/extractor/discoverygo.py | 2 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
extract_attributes,
int_or_none,
parse_age_limit,
unescapeHTML,
)
class DiscoveryGoIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?discoverygo\.com/(?:[^/]+/)*(?P<id>[^/?#&]+)'
_TEST = {
'url': 'https://www.discoverygo.com/love-at-first-kiss/kiss-first-ask-questions-later/',
'info_dict': {
'id': '57a33c536b66d1cd0345eeb1',
'ext': 'mp4',
'title': 'Kiss First, Ask Questions Later!',
'description': 'md5:fe923ba34050eae468bffae10831cb22',
'duration': 2579,
'series': 'Love at First Kiss',
'season_number': 1,
'episode_number': 1,
'age_limit': 14,
},
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
container = extract_attributes(
self._search_regex(
r'(<div[^>]+class=["\']video-player-container[^>]+>)',
webpage, 'video container'))
video = self._parse_json(
unescapeHTML(container.get('data-video') or container.get('data-json')),
display_id)
title = video['name']
stream = video['stream']
STREAM_URL_SUFFIX = 'streamUrl'
formats = []
for stream_kind in ('', 'hds'):
suffix = STREAM_URL_SUFFIX.capitalize() if stream_kind else STREAM_URL_SUFFIX
stream_url = stream.get('%s%s' % (stream_kind, suffix))
if not stream_url:
continue
if stream_kind == '':
formats.extend(self._extract_m3u8_formats(
stream_url, display_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
elif stream_kind == 'hds':
formats.extend(self._extract_f4m_formats(
stream_url, display_id, f4m_id=stream_kind, fatal=False))
self._sort_formats(formats)
video_id = video.get('id') or display_id
description = video.get('description', {}).get('detailed')
duration = int_or_none(video.get('duration'))
series = video.get('show', {}).get('name')
season_number = int_or_none(video.get('season', {}).get('number'))
episode_number = int_or_none(video.get('episodeNumber'))
tags = video.get('tags')
age_limit = parse_age_limit(video.get('parental', {}).get('rating'))
subtitles = {}
captions = stream.get('captions')
if isinstance(captions, list):
for caption in captions:
subtitle_url = caption.get('fileUrl')
if (not subtitle_url or not isinstance(subtitle_url, compat_str) or
not subtitle_url.startswith('http')):
continue
lang = caption.get('fileLang', 'en')
subtitles.setdefault(lang, []).append({'url': subtitle_url})
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'duration': duration,
'series': series,
'season_number': season_number,
'episode_number': episode_number,
'tags': tags,
'age_limit': age_limit,
'formats': formats,
'subtitles': subtitles,
}
|
mitliagkas/pyliakmon | refs/heads/master | getPhenoTypes.py | 1 | import numpy as np
import json
with open('db/cpt.json', 'rb') as outfile:
procHier = json.load(outfile)
outfile.close()
with open('db/icd.json', 'rb') as outfile:
icdHier = json.load(outfile)
outfile.close()
with open('db/icd-level2.json', 'rb') as outfile:
icdL2 = json.load(outfile)
outfile.close()
with open('db/cpt-level2.json', 'rb') as outfile:
procL2 = json.load(outfile)
outfile.close()
icdMap=dict([(icdHier[x]['level2'],{'desc':icdL2[str(icdHier[x]['level2'])],'code':x}) for x in icdHier.keys()])
procMap=dict([(procHier[x]['level2'],{'desc':procL2[str(procHier[x]['level2'])],'code':x}) for x in procHier.keys()])
#procMap=dict([(procHier[x]['level2'],{'desc':procHier[x]['desc'],'code':x}) for x in procHier.keys()])
#pcs=np.loadtxt('cmsComp.txt')
pcs=np.loadtxt('results/cmsCompOrder3.txt')
pcs=np.loadtxt('results/cmsQOrder2.txt')
pcs=np.loadtxt('results/cmsQOrder3.txt')
pcs=np.loadtxt('cmsQTopic.txt')
p,k=pcs.shape
l=8
print
print
for c in range(k):
print
print "[Component", c+1, "]"
comp=pcs[:,c]
#comp=pcs[:,c]
#ind=abs(comp).argsort()[-l:]
ind=comp.argsort()[-l:]
ind=ind.tolist()
ind.reverse()
for id,magnitude in [(x,comp[x]) for x in ind]:
if id < 132:
# ICD
print " ICD9", icdMap[id]['desc'].ljust(70), magnitude
else:
# Procedure
id-=132
print " Proc", procMap[id]['desc'].ljust(70), magnitude
|
MaximNevrov/neutron | refs/heads/master | neutron/worker.py | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_service import service
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
class NeutronWorker(service.ServiceBase):
"""Partial implementation of the ServiceBase ABC
Subclasses will still need to add the other abstract methods defined in
service.ServiceBase. See oslo_service for more details.
If a plugin needs to handle synchronization with the Neutron database and
do this only once instead of in every API worker, for instance, it would
define a NeutronWorker class and the plugin would have get_workers return
an array of NeutronWorker instances. For example:
class MyPlugin(...):
def get_workers(self):
return [MyPluginWorker()]
class MyPluginWorker(NeutronWorker):
def start(self):
super(MyPluginWorker, self).start()
do_sync()
"""
def start(self):
registry.notify(resources.PROCESS, events.AFTER_CREATE, self.start)
|
gaqzi/django-emoji | refs/heads/master | emoji/models.py | 1 | import os
import re
import struct
from sys import version_info
from django.contrib.staticfiles.storage import staticfiles_storage
try:
from ._unicode_characters import UNICODE_ALIAS
except ImportError as exc:
UNICODE_ALIAS = {}
from . import settings
__all__ = ('Emoji',)
UNICODE_WIDE = True
try:
unichr(0x0001f48b)
except ValueError: # pragma: no cover
import unicodedata
UNICODE_WIDE = False
UNICODE_SURROGATE_MIN = 55296 # U+D800
UNICODE_SURROGATE_MAX = 57343 # U+DFFF
def convert_unicode_surrogates(surrogate_pair):
return unicodedata.normalize('NFKD', surrogate_pair)
except NameError:
unichr = chr # Python3 doesn't have unichr
PYTHON3 = False
if version_info[0] == 3:
PYTHON3 = True
else:
from _python2 import hex_to_unicode
class Emoji(object):
"""Test if an emoji exists in the library and returns the URL to it.
Also can add emojis to a text if they match the pattern :emoticon:.
Usage:
>>> emoji = Emoji()
>>> 'dog' in emoji
True
>>> 'doesntexistatall' in emoji
False
>>> emoji['dog'] # Uses staticfiles app internally
'/static/emoji/img/dog.png'
>>> emoji.replace("I am a :cat:.")
'I am a <img src="/static/emoji/img/cat.png" alt="cat" class="emoji">.'
This class is a singleton and if imported as following an instantiated
version will be imported.
>>> from emoji import Emoji
>>> Emoji['dog']
'/static/emoji/dog.png'
"""
_static_path = 'emoji/img'
_image_path = os.path.join(os.path.dirname(__file__),
'static', 'emoji', 'img')
_instance = None
_pattern = re.compile(r':([a-z0-9\+\-_]+):', re.I)
_files = []
_unicode_characters = UNICODE_ALIAS
# This character acts as a modifier, if it's ever seen then remove
# it because the modification is done when converting to an image
# anyway.
_unicode_modifiers = (u'\ufe0e', u'\ufe0f')
# HTML entities regexs
_html_entities_integer_unicode_regex = re.compile(r'&#([0-9]+);')
_html_entities_hex_unicode_regex = re.compile(r'&#x([0-9a-f]+);', re.I)
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Emoji, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self):
self.names()
def __contains__(self, value):
return value in self._files
def keys(self):
return self._files
def __getitem__(self, item):
if item in self._files:
return self._static_url(item)
def _static_url(self, name):
return staticfiles_storage.url(
'{0}/{1}.png'.format(self._static_path, name)
)
def _image_string(self, filename, alt=None):
title = ' '.join(filename.split('_'))
return settings.EMOJI_IMG_TAG.format(
self._static_url(filename),
alt or title,
title,
)
@classmethod
def names(cls):
"""A list of all emoji names without file extension."""
if not cls._files:
for f in os.listdir(cls._image_path):
if(not f.startswith('.') and
os.path.isfile(os.path.join(cls._image_path, f))):
cls._files.append(os.path.splitext(f)[0])
return cls._files
@classmethod
def replace(cls, replacement_string):
"""Add in valid emojis in a string where a valid emoji is between ::"""
e = cls()
def _replace_emoji(match):
val = match.group(1)
if val in e:
return e._image_string(match.group(1))
else:
return match.group(0)
return e._pattern.sub(_replace_emoji, replacement_string)
@classmethod
def replace_unicode(cls, replacement_string):
"""This method will iterate over every character in
``replacement_string`` and see if it mathces any of the
unicode codepoints that we recognize. If it does then it will
replace that codepoint with an image just like ``replace``.
NOTE: This will only work with Python versions built with wide
unicode caracter support. Python 3 should always work but
Python 2 will have to tested before deploy.
"""
e = cls()
output = []
surrogate_character = None
if settings.EMOJI_REPLACE_HTML_ENTITIES:
replacement_string = cls.replace_html_entities(replacement_string)
for i, character in enumerate(replacement_string):
if character in cls._unicode_modifiers:
continue
# Check whether this is the first character in a Unicode
# surrogate pair when Python doesn't have wide Unicode
# support.
#
# Is there any reason to do this even if Python got wide
# support enabled?
if(not UNICODE_WIDE and not surrogate_character and
ord(character) >= UNICODE_SURROGATE_MIN and
ord(character) <= UNICODE_SURROGATE_MAX):
surrogate_character = character
continue
if surrogate_character:
character = convert_unicode_surrogates(
surrogate_character + character
)
surrogate_character = None
name = e.name_for(character)
if name:
if settings.EMOJI_ALT_AS_UNICODE:
character = e._image_string(name, alt=character)
else:
character = e._image_string(name)
output.append(character)
return ''.join(output)
@classmethod
def name_for(cls, character):
for modifier in cls._unicode_modifiers:
character = character.replace(modifier, '')
return cls._unicode_characters.get(character, False)
@classmethod
def replace_html_entities(cls, replacement_string):
"""Replaces HTML escaped unicode entities with their unicode
equivalent. If the setting `EMOJI_REPLACE_HTML_ENTITIES` is
`True` then this conversation will always be done in
`replace_unicode` (default: True).
"""
def _hex_to_unicode(hex_code):
if PYTHON3:
hex_code = '{0:0>8}'.format(hex_code)
as_int = struct.unpack('>i', bytes.fromhex(hex_code))[0]
return '{0:c}'.format(as_int)
else:
return hex_to_unicode(hex_code)
def _replace_integer_entity(match):
hex_val = hex(int(match.group(1)))
return _hex_to_unicode(hex_val.replace('0x', ''))
def _replace_hex_entity(match):
return _hex_to_unicode(match.group(1))
# replace integer code points, A
replacement_string = re.sub(
cls._html_entities_integer_unicode_regex,
_replace_integer_entity,
replacement_string
)
# replace hex code points, A
replacement_string = re.sub(
cls._html_entities_hex_unicode_regex,
_replace_hex_entity,
replacement_string
)
return replacement_string
|
jackTheRipper/iotrussia | refs/heads/master | web_server/lib/werkzeug-master/werkzeug/debug/tbtools.py | 2 | # -*- coding: utf-8 -*-
"""
werkzeug.debug.tbtools
~~~~~~~~~~~~~~~~~~~~~~
This module provides various traceback related utility functions.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import re
import os
import sys
import inspect
import traceback
import codecs
from tokenize import TokenError
from werkzeug.utils import cached_property, escape
from werkzeug.debug.console import Console
from werkzeug._compat import range_type, PY2, text_type, string_types
_coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
_line_re = re.compile(r'^(.*?)$(?m)')
_funcdef_re = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
UTF8_COOKIE = '\xef\xbb\xbf'
system_exceptions = (SystemExit, KeyboardInterrupt)
try:
system_exceptions += (GeneratorExit,)
except NameError:
pass
HEADER = u'''\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>%(title)s // Werkzeug Debugger</title>
<link rel="stylesheet" href="?__debugger__=yes&cmd=resource&f=style.css" type="text/css">
<!-- We need to make sure this has a favicon so that the debugger does not by
accident trigger a request to /favicon.ico which might change the application
state. -->
<link rel="shortcut icon" href="?__debugger__=yes&cmd=resource&f=console.png">
<script type="text/javascript" src="?__debugger__=yes&cmd=resource&f=jquery.js"></script>
<script type="text/javascript" src="?__debugger__=yes&cmd=resource&f=debugger.js"></script>
<script type="text/javascript">
var TRACEBACK = %(traceback_id)d,
CONSOLE_MODE = %(console)s,
EVALEX = %(evalex)s,
SECRET = "%(secret)s";
</script>
</head>
<body>
<div class="debugger">
'''
FOOTER = u'''\
<div class="footer">
Brought to you by <strong class="arthur">DON'T PANIC</strong>, your
friendly Werkzeug powered traceback interpreter.
</div>
</div>
</body>
</html>
'''
PAGE_HTML = HEADER + u'''\
<h1>%(exception_type)s</h1>
<div class="detail">
<p class="errormsg">%(exception)s</p>
</div>
<h2 class="traceback">Traceback <em>(most recent call last)</em></h2>
%(summary)s
<div class="plain">
<form action="%(lodgeit_url)s" method="post">
<p>
<input type="hidden" name="language" value="pytb">
This is the Copy/Paste friendly version of the traceback. <span
class="pastemessage">You can also paste this traceback into LodgeIt:
<input type="submit" value="create paste"></span>
</p>
<textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea>
</form>
</div>
<div class="explanation">
The debugger caught an exception in your WSGI application. You can now
look at the traceback which led to the error. <span class="nojavascript">
If you enable JavaScript you can also use additional features such as code
execution (if the evalex feature is enabled), automatic pasting of the
exceptions and much more.</span>
</div>
''' + FOOTER + '''
<!--
%(plaintext_cs)s
-->
'''
CONSOLE_HTML = HEADER + u'''\
<h1>Interactive Console</h1>
<div class="explanation">
In this console you can execute Python expressions in the context of the
application. The initial namespace was created by the debugger automatically.
</div>
<div class="console"><div class="inner">The Console requires JavaScript.</div></div>
''' + FOOTER
SUMMARY_HTML = u'''\
<div class="%(classes)s">
%(title)s
<ul>%(frames)s</ul>
%(description)s
</div>
'''
FRAME_HTML = u'''\
<div class="frame" id="frame-%(id)d">
<h4>File <cite class="filename">"%(filename)s"</cite>,
line <em class="line">%(lineno)s</em>,
in <code class="function">%(function_name)s</code></h4>
<pre>%(current_line)s</pre>
</div>
'''
SOURCE_TABLE_HTML = u'<table class=source>%s</table>'
SOURCE_LINE_HTML = u'''\
<tr class="%(classes)s">
<td class=lineno>%(lineno)s</td>
<td>%(code)s</td>
</tr>
'''
def render_console_html(secret):
return CONSOLE_HTML % {
'evalex': 'true',
'console': 'true',
'title': 'Console',
'secret': secret,
'traceback_id': -1
}
def get_current_traceback(ignore_system_exceptions=False,
show_hidden_frames=False, skip=0):
"""Get the current exception info as `Traceback` object. Per default
calling this method will reraise system exceptions such as generator exit,
system exit or others. This behavior can be disabled by passing `False`
to the function as first parameter.
"""
exc_type, exc_value, tb = sys.exc_info()
if ignore_system_exceptions and exc_type in system_exceptions:
raise
for x in range_type(skip):
if tb.tb_next is None:
break
tb = tb.tb_next
tb = Traceback(exc_type, exc_value, tb)
if not show_hidden_frames:
tb.filter_hidden_frames()
return tb
class Line(object):
"""Helper for the source renderer."""
__slots__ = ('lineno', 'code', 'in_frame', 'current')
def __init__(self, lineno, code):
self.lineno = lineno
self.code = code
self.in_frame = False
self.current = False
def classes(self):
rv = ['line']
if self.in_frame:
rv.append('in-frame')
if self.current:
rv.append('current')
return rv
classes = property(classes)
def render(self):
return SOURCE_LINE_HTML % {
'classes': u' '.join(self.classes),
'lineno': self.lineno,
'code': escape(self.code)
}
class Traceback(object):
"""Wraps a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
if not isinstance(exc_type, str):
exception_type = exc_type.__name__
if exc_type.__module__ not in ('__builtin__', 'exceptions'):
exception_type = exc_type.__module__ + '.' + exception_type
else:
exception_type = exc_type
self.exception_type = exception_type
# we only add frames to the list that are not hidden. This follows
# the the magic variables as defined by paste.exceptions.collector
self.frames = []
while tb:
self.frames.append(Frame(exc_type, exc_value, tb))
tb = tb.tb_next
def filter_hidden_frames(self):
"""Remove the frames according to the paste spec."""
if not self.frames:
return
new_frames = []
hidden = False
for frame in self.frames:
hide = frame.hide
if hide in ('before', 'before_and_this'):
new_frames = []
hidden = False
if hide == 'before_and_this':
continue
elif hide in ('reset', 'reset_and_this'):
hidden = False
if hide == 'reset_and_this':
continue
elif hide in ('after', 'after_and_this'):
hidden = True
if hide == 'after_and_this':
continue
elif hide or hidden:
continue
new_frames.append(frame)
# if we only have one frame and that frame is from the codeop
# module, remove it.
if len(new_frames) == 1 and self.frames[0].module == 'codeop':
del self.frames[:]
# if the last frame is missing something went terrible wrong :(
elif self.frames[-1] in new_frames:
self.frames[:] = new_frames
def is_syntax_error(self):
"""Is it a syntax error?"""
return isinstance(self.exc_value, SyntaxError)
is_syntax_error = property(is_syntax_error)
def exception(self):
"""String representation of the exception."""
buf = traceback.format_exception_only(self.exc_type, self.exc_value)
rv = ''.join(buf).strip()
return rv.decode('utf-8', 'replace') if PY2 else rv
exception = property(exception)
def log(self, logfile=None):
"""Log the ASCII traceback into a file object."""
if logfile is None:
logfile = sys.stderr
tb = self.plaintext.rstrip() + u'\n'
if PY2:
tb.encode('utf-8', 'replace')
logfile.write(tb)
def paste(self, lodgeit_url):
"""Create a paste and return the paste id."""
from xmlrpclib import ServerProxy
srv = ServerProxy('%sxmlrpc/' % lodgeit_url)
return srv.pastes.newPaste('pytb', self.plaintext, '', '', '', True)
def render_summary(self, include_title=True):
"""Render the traceback for the interactive console."""
title = ''
frames = []
classes = ['traceback']
if not self.frames:
classes.append('noframe-traceback')
if include_title:
if self.is_syntax_error:
title = u'Syntax Error'
else:
title = u'Traceback <em>(most recent call last)</em>:'
for frame in self.frames:
frames.append(u'<li%s>%s' % (
frame.info and u' title="%s"' % escape(frame.info) or u'',
frame.render()
))
if self.is_syntax_error:
description_wrapper = u'<pre class=syntaxerror>%s</pre>'
else:
description_wrapper = u'<blockquote>%s</blockquote>'
return SUMMARY_HTML % {
'classes': u' '.join(classes),
'title': title and u'<h3>%s</h3>' % title or u'',
'frames': u'\n'.join(frames),
'description': description_wrapper % escape(self.exception)
}
def render_full(self, evalex=False, lodgeit_url=None,
secret=None):
"""Render the Full HTML page with the traceback info."""
exc = escape(self.exception)
return PAGE_HTML % {
'evalex': evalex and 'true' or 'false',
'console': 'false',
'lodgeit_url': escape(lodgeit_url),
'title': exc,
'exception': exc,
'exception_type': escape(self.exception_type),
'summary': self.render_summary(include_title=False),
'plaintext': self.plaintext,
'plaintext_cs': re.sub('-{2,}', '-', self.plaintext),
'traceback_id': self.id,
'secret': secret
}
def generate_plaintext_traceback(self):
"""Like the plaintext attribute but returns a generator"""
yield u'Traceback (most recent call last):'
for frame in self.frames:
yield u' File "%s", line %s, in %s' % (
frame.filename,
frame.lineno,
frame.function_name
)
yield u' ' + frame.current_line.strip()
yield self.exception
def plaintext(self):
return u'\n'.join(self.generate_plaintext_traceback())
plaintext = cached_property(plaintext)
id = property(lambda x: id(x))
class Frame(object):
"""A single frame in a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.lineno = tb.tb_lineno
self.function_name = tb.tb_frame.f_code.co_name
self.locals = tb.tb_frame.f_locals
self.globals = tb.tb_frame.f_globals
fn = inspect.getsourcefile(tb) or inspect.getfile(tb)
if fn[-4:] in ('.pyo', '.pyc'):
fn = fn[:-1]
# if it's a file on the file system resolve the real filename.
if os.path.isfile(fn):
fn = os.path.realpath(fn)
self.filename = fn
self.module = self.globals.get('__name__')
self.loader = self.globals.get('__loader__')
self.code = tb.tb_frame.f_code
# support for paste's traceback extensions
self.hide = self.locals.get('__traceback_hide__', False)
info = self.locals.get('__traceback_info__')
if info is not None:
try:
info = text_type(info)
except UnicodeError:
info = str(info).decode('utf-8', 'replace')
self.info = info
def render(self):
"""Render a single frame in a traceback."""
return FRAME_HTML % {
'id': self.id,
'filename': escape(self.filename),
'lineno': self.lineno,
'function_name': escape(self.function_name),
'current_line': escape(self.current_line.strip())
}
def get_annotated_lines(self):
"""Helper function that returns lines with extra information."""
lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)]
# find function definition and mark lines
if hasattr(self.code, 'co_firstlineno'):
lineno = self.code.co_firstlineno - 1
while lineno > 0:
if _funcdef_re.match(lines[lineno].code):
break
lineno -= 1
try:
offset = len(inspect.getblock([x.code + '\n' for x
in lines[lineno:]]))
except TokenError:
offset = 0
for line in lines[lineno:lineno + offset]:
line.in_frame = True
# mark current line
try:
lines[self.lineno - 1].current = True
except IndexError:
pass
return lines
def render_source(self):
"""Render the sourcecode."""
return SOURCE_TABLE_HTML % u'\n'.join(line.render() for line in
self.get_annotated_lines())
def eval(self, code, mode='single'):
"""Evaluate code in the context of the frame."""
if isinstance(code, string_types):
if PY2 and isinstance(code, unicode):
code = UTF8_COOKIE + code.encode('utf-8')
code = compile(code, '<interactive>', mode)
return eval(code, self.globals, self.locals)
@cached_property
def sourcelines(self):
"""The sourcecode of the file as list of unicode strings."""
# get sourcecode from loader or file
source = None
if self.loader is not None:
try:
if hasattr(self.loader, 'get_source'):
source = self.loader.get_source(self.module)
elif hasattr(self.loader, 'get_source_by_code'):
source = self.loader.get_source_by_code(self.code)
except Exception:
# we munch the exception so that we don't cause troubles
# if the loader is broken.
pass
if source is None:
try:
f = open(self.filename)
except IOError:
return []
try:
source = f.read()
finally:
f.close()
# already unicode? return right away
if isinstance(source, text_type):
return source.splitlines()
# yes. it should be ascii, but we don't want to reject too many
# characters in the debugger if something breaks
charset = 'utf-8'
if source.startswith(UTF8_COOKIE):
source = source[3:]
else:
for idx, match in enumerate(_line_re.finditer(source)):
match = _line_re.search(match.group())
if match is not None:
charset = match.group(1)
break
if idx > 1:
break
# on broken cookies we fall back to utf-8 too
try:
codecs.lookup(charset)
except LookupError:
charset = 'utf-8'
return source.decode(charset, 'replace').splitlines()
@property
def current_line(self):
try:
return self.sourcelines[self.lineno - 1]
except IndexError:
return u''
@cached_property
def console(self):
return Console(self.globals, self.locals)
id = property(lambda x: id(x))
|
swift-lang/swift-e-lab | refs/heads/master | parsl/app/python.py | 1 | import logging
import tblib.pickling_support
tblib.pickling_support.install()
from parsl.app.futures import DataFuture
from parsl.app.app import AppBase
from parsl.app.errors import wrap_error
from parsl.dataflow.dflow import DataFlowKernelLoader
logger = logging.getLogger(__name__)
class PythonApp(AppBase):
"""Extends AppBase to cover the Python App."""
def __init__(self, func, data_flow_kernel=None, walltime=60, cache=False, executors='all'):
super().__init__(
wrap_error(func),
data_flow_kernel=data_flow_kernel,
walltime=walltime,
executors=executors,
cache=cache
)
def __call__(self, *args, **kwargs):
"""This is where the call to a python app is handled.
Args:
- Arbitrary
Kwargs:
- Arbitrary
Returns:
If outputs=[...] was a kwarg then:
App_fut, [Data_Futures...]
else:
App_fut
"""
if self.data_flow_kernel is None:
dfk = DataFlowKernelLoader.dfk()
else:
dfk = self.data_flow_kernel
app_fut = dfk.submit(self.func, *args,
executors=self.executors,
fn_hash=self.func_hash,
cache=self.cache,
**kwargs)
# logger.debug("App[{}] assigned Task[{}]".format(self.func.__name__,
# app_fut.tid))
out_futs = [DataFuture(app_fut, o, parent=app_fut, tid=app_fut.tid)
for o in kwargs.get('outputs', [])]
app_fut._outputs = out_futs
return app_fut
|
factorylabs/f_closure_linter | refs/heads/master | build/lib/closure_linter/tokenutil.py | 13 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Token utility functions."""
__author__ = ('[email protected] (Robert Walker)',
'[email protected] (Andy Perelson)')
from closure_linter.common import tokens
from closure_linter import javascripttokens
import copy
# Shorthand
JavaScriptToken = javascripttokens.JavaScriptToken
Type = tokens.TokenType
def GetFirstTokenInSameLine(token):
"""Returns the first token in the same line as token.
Args:
token: Any token in the line.
Returns:
The first token in the same line as token.
"""
while not token.IsFirstInLine():
token = token.previous
return token
def CustomSearch(start_token, func, end_func=None, distance=None,
reverse=False):
"""Returns the first token where func is True within distance of this token.
Args:
start_token: The token to start searching from
func: The function to call to test a token for applicability
end_func: The function to call to test a token to determine whether to abort
the search.
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token matching func within distance of this token, or None if no
such token is found.
"""
token = start_token
if reverse:
while token and (distance is None or distance > 0):
previous = token.previous
if previous:
if func(previous):
return previous
if end_func and end_func(previous):
return None
token = previous
if distance is not None:
distance -= 1
else:
while token and (distance is None or distance > 0):
next = token.next
if next:
if func(next):
return next
if end_func and end_func(next):
return None
token = next
if distance is not None:
distance -= 1
return None
def Search(start_token, token_types, distance=None, reverse=False):
"""Returns the first token of type in token_types within distance.
Args:
start_token: The token to start searching from
token_types: The allowable types of the token being searched for
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token of any type in token_types within distance of this token, or
None if no such token is found.
"""
return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
None, distance, reverse)
def SearchExcept(start_token, token_types, distance=None, reverse=False):
"""Returns the first token not of any type in token_types within distance.
Args:
start_token: The token to start searching from
token_types: The unallowable types of the token being searched for
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token of any type in token_types within distance of this token, or
None if no such token is found.
"""
return CustomSearch(start_token,
lambda token: not token.IsAnyType(token_types),
None, distance, reverse)
def SearchUntil(start_token, token_types, end_types, distance=None,
reverse=False):
"""Returns the first token of type in token_types before a token of end_type.
Args:
start_token: The token to start searching from.
token_types: The allowable types of the token being searched for.
end_types: Types of tokens to abort search if we find.
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token of any type in token_types within distance of this token
before any tokens of type in end_type, or None if no such token is found.
"""
return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
lambda token: token.IsAnyType(end_types),
distance, reverse)
def DeleteToken(token):
"""Deletes the given token from the linked list.
Args:
token: The token to delete
"""
if token.previous:
token.previous.next = token.next
if token.next:
token.next.previous = token.previous
following_token = token.next
while following_token and following_token.metadata.last_code == token:
following_token.metadata.last_code = token.metadata.last_code
following_token = following_token.next
def DeleteTokens(token, tokenCount):
"""Deletes the given number of tokens starting with the given token.
Args:
token: The token to start deleting at.
tokenCount: The total number of tokens to delete.
"""
for i in xrange(1, tokenCount):
DeleteToken(token.next)
DeleteToken(token)
def InsertTokenAfter(new_token, token):
"""Insert new_token after token
Args:
new_token: A token to be added to the stream
token: A token already in the stream
"""
new_token.previous = token
new_token.next = token.next
new_token.metadata = copy.copy(token.metadata)
if token.IsCode():
new_token.metadata.last_code = token
if new_token.IsCode():
following_token = token.next
while following_token and following_token.metadata.last_code == token:
following_token.metadata.last_code = new_token
following_token = following_token.next
token.next = new_token
if new_token.next:
new_token.next.previous = new_token
if new_token.start_index is None:
if new_token.line_number == token.line_number:
new_token.start_index = token.start_index + len(token.string)
else:
new_token.start_index = 0
iterator = new_token.next
while iterator and iterator.line_number == new_token.line_number:
iterator.start_index += len(new_token.string)
iterator = iterator.next
def InsertSpaceTokenAfter(token):
"""Inserts a space token after the given token.
Args:
token: The token to insert a space token after
Returns:
A single space token"""
space_token = JavaScriptToken(' ', Type.WHITESPACE, token.line,
token.line_number)
InsertTokenAfter(space_token, token)
def InsertLineAfter(token):
"""Inserts a blank line after the given token.
Args:
token: The token to insert a blank line after
Returns:
A single space token"""
blank_token = JavaScriptToken('', Type.BLANK_LINE, '',
token.line_number + 1)
InsertTokenAfter(blank_token, token)
# Update all subsequent ine numbers.
blank_token = blank_token.next
while blank_token:
blank_token.line_number += 1
blank_token = blank_token.next
def SplitToken(token, position):
"""Splits the token into two tokens at position.
Args:
token: The token to split
position: The position to split at. Will be the beginning of second token.
Returns:
The new second token.
"""
new_string = token.string[position:]
token.string = token.string[:position]
new_token = JavaScriptToken(new_string, token.type, token.line,
token.line_number)
InsertTokenAfter(new_token, token)
return new_token
def Compare(token1, token2):
"""Compares two tokens and determines their relative order.
Returns:
A negative integer, zero, or a positive integer as the first token is
before, equal, or after the second in the token stream.
"""
if token2.line_number != token1.line_number:
return token1.line_number - token2.line_number
else:
return token1.start_index - token2.start_index
|
maryklayne/Funcao | refs/heads/master | examples/beginner/precision.py | 116 | #!/usr/bin/env python
"""Precision Example
Demonstrates SymPy's arbitrary integer precision abilities
"""
import sympy
from sympy import Mul, Pow, S
def main():
x = Pow(2, 50, evaluate=False)
y = Pow(10, -50, evaluate=False)
# A large, unevaluated expression
m = Mul(x, y, evaluate=False)
# Evaluating the expression
e = S(2)**50/S(10)**50
print("%s == %s" % (m, e))
if __name__ == "__main__":
main()
|
gisce/OCB | refs/heads/7.0 | addons/point_of_sale/wizard/pos_session_opening.py | 46 |
from openerp import netsvc
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp.addons.point_of_sale.point_of_sale import pos_session
class pos_session_opening(osv.osv_memory):
_name = 'pos.session.opening'
_columns = {
'pos_config_id' : fields.many2one('pos.config', 'Point of Sale', required=True),
'pos_session_id' : fields.many2one('pos.session', 'PoS Session'),
'pos_state' : fields.related('pos_session_id', 'state',
type='selection',
selection=pos_session.POS_SESSION_STATE,
string='Session Status', readonly=True),
'pos_state_str' : fields.char('Status', 32, readonly=True),
'show_config' : fields.boolean('Show Config', readonly=True),
'pos_session_name' : fields.related('pos_session_id', 'name',
type='char', size=64, readonly=True),
'pos_session_username' : fields.related('pos_session_id', 'user_id', 'name',
type='char', size=64, readonly=True)
}
def open_ui(self, cr, uid, ids, context=None):
context = context or {}
data = self.browse(cr, uid, ids[0], context=context)
context['active_id'] = data.pos_session_id.id
return {
'type' : 'ir.actions.client',
'name' : _('Start Point Of Sale'),
'tag' : 'pos.ui',
'context' : context
}
def open_existing_session_cb_close(self, cr, uid, ids, context=None):
wf_service = netsvc.LocalService("workflow")
wizard = self.browse(cr, uid, ids[0], context=context)
wf_service.trg_validate(uid, 'pos.session', wizard.pos_session_id.id, 'cashbox_control', cr)
return self.open_session_cb(cr, uid, ids, context)
def open_session_cb(self, cr, uid, ids, context=None):
assert len(ids) == 1, "you can open only one session at a time"
proxy = self.pool.get('pos.session')
wizard = self.browse(cr, uid, ids[0], context=context)
if not wizard.pos_session_id:
values = {
'user_id' : uid,
'config_id' : wizard.pos_config_id.id,
}
session_id = proxy.create(cr, uid, values, context=context)
s = proxy.browse(cr, uid, session_id, context=context)
if s.state=='opened':
return self.open_ui(cr, uid, ids, context=context)
return self._open_session(session_id)
return self._open_session(wizard.pos_session_id.id)
def open_existing_session_cb(self, cr, uid, ids, context=None):
assert len(ids) == 1
wizard = self.browse(cr, uid, ids[0], context=context)
return self._open_session(wizard.pos_session_id.id)
def _open_session(self, session_id):
return {
'name': _('Session'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'pos.session',
'res_id': session_id,
'view_id': False,
'type': 'ir.actions.act_window',
}
def on_change_config(self, cr, uid, ids, config_id, context=None):
result = {
'pos_session_id': False,
'pos_state': False,
'pos_state_str' : '',
'pos_session_username' : False,
'pos_session_name' : False,
}
if not config_id:
return {'value' : result}
proxy = self.pool.get('pos.session')
session_ids = proxy.search(cr, uid, [
('state', '!=', 'closed'),
('config_id', '=', config_id),
('user_id', '=', uid),
], context=context)
if session_ids:
session = proxy.browse(cr, uid, session_ids[0], context=context)
result['pos_state'] = str(session.state)
result['pos_state_str'] = dict(pos_session.POS_SESSION_STATE).get(session.state, '')
result['pos_session_id'] = session.id
result['pos_session_name'] = session.name
result['pos_session_username'] = session.user_id.name
return {'value' : result}
def default_get(self, cr, uid, fieldnames, context=None):
so = self.pool.get('pos.session')
session_ids = so.search(cr, uid, [('state','<>','closed'), ('user_id','=',uid)], context=context)
if session_ids:
result = so.browse(cr, uid, session_ids[0], context=context).config_id.id
else:
current_user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
result = current_user.pos_config and current_user.pos_config.id or False
if not result:
r = self.pool.get('pos.config').search(cr, uid, [], context=context)
result = r and r[0] or False
count = self.pool.get('pos.config').search_count(cr, uid, [('state', '=', 'active')], context=context)
show_config = bool(count > 1)
return {
'pos_config_id' : result,
'show_config' : show_config,
}
pos_session_opening()
|
RoboCupULaval/UI-Debug | refs/heads/dev | Model/DataObject/DrawingData/__init__.py | 7 | # Under MIT License, see LICENSE.txt
__author__ = 'RoboCupULaval'
|
JioCloud/nova_test_latest | refs/heads/master | nova/tests/functional/v3/test_hide_server_addresses.py | 29 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.compute import vm_states
from nova.tests.functional.v3 import test_servers
CONF = cfg.CONF
CONF.import_opt('osapi_hide_server_address_states',
'nova.api.openstack.compute.plugins.v3.hide_server_addresses')
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
class ServersSampleHideAddressesJsonTest(test_servers.ServersSampleJsonTest):
extension_name = 'os-hide-server-addresses'
# Override the sample dirname because
# test_servers.ServersSampleJsonTest does and so it won't default
# to the extension name
sample_dir = extension_name
def _get_flags(self):
f = super(ServersSampleHideAddressesJsonTest, self)._get_flags()
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.hide_server_addresses.'
'Hide_server_addresses')
return f
def setUp(self):
# We override osapi_hide_server_address_states in order
# to have an example of in the json samples of the
# addresses being hidden
CONF.set_override("osapi_hide_server_address_states",
[vm_states.ACTIVE])
super(ServersSampleHideAddressesJsonTest, self).setUp()
|
ChanduERP/odoo | refs/heads/8.0 | addons/subscription/__init__.py | 441 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import subscription
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Neppord/py2py | refs/heads/master | py2py_lib/ast/literal.py | 1 | from node import Node
class Literal(Node):
def __init__(self, string):
self.string = string
self.clear()
def clear(self):
self.faild = False
self.consume = list(self.string[-1::-1])
def feed(self, char):
if self.faild:
return
else:
if char == self.consume.pop():
if not self.consume:
return self.string
return
else:
self.faild = True
return
|
saratang/servo | refs/heads/master | python/mozlog/mozlog/structured/__init__.py | 45 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import commandline
import structuredlog
from structuredlog import get_default_logger, set_default_logger
|
pizzathief/scipy | refs/heads/master | scipy/signal/tests/test_result_type.py | 18 | # Regressions tests on result types of some signal functions
import numpy as np
from numpy.testing import assert_
import pytest
from scipy.signal import (decimate,
lfilter_zi,
lfiltic,
sos2tf,
sosfilt_zi)
def test_decimate():
ones_f32 = np.ones(32, dtype=np.float32)
assert_(decimate(ones_f32, 2).dtype == np.float32)
ones_i64 = np.ones(32, dtype=np.int64)
assert_(decimate(ones_i64, 2).dtype == np.float64)
def test_lfilter_zi():
b_f32 = np.array([1, 2, 3], dtype=np.float32)
a_f32 = np.array([4, 5, 6], dtype=np.float32)
assert_(lfilter_zi(b_f32, a_f32).dtype == np.float32)
def test_lfiltic():
# this would return f32 when given a mix of f32 / f64 args
b_f32 = np.array([1, 2, 3], dtype=np.float32)
a_f32 = np.array([4, 5, 6], dtype=np.float32)
x_f32 = np.ones(32, dtype=np.float32)
b_f64 = b_f32.astype(np.float64)
a_f64 = a_f32.astype(np.float64)
x_f64 = x_f32.astype(np.float64)
assert_(lfiltic(b_f64, a_f32, x_f32).dtype == np.float64)
assert_(lfiltic(b_f32, a_f64, x_f32).dtype == np.float64)
assert_(lfiltic(b_f32, a_f32, x_f64).dtype == np.float64)
assert_(lfiltic(b_f32, a_f32, x_f32, x_f64).dtype == np.float64)
def test_sos2tf():
sos_f32 = np.array([[4, 5, 6, 1, 2, 3]], dtype=np.float32)
b, a = sos2tf(sos_f32)
assert_(b.dtype == np.float32)
assert_(a.dtype == np.float32)
def test_sosfilt_zi():
sos_f32 = np.array([[4, 5, 6, 1, 2, 3]], dtype=np.float32)
assert_(sosfilt_zi(sos_f32).dtype == np.float32)
|
infOpen/ansible-role-mongodb | refs/heads/develop | tests/test_filter_plugins.py | 23 | """
Fake test for plugins filters
"""
def test_fake():
assert True
|
rigdenlab/conkit | refs/heads/master | conkit/io/tests/test_ccmpred.py | 2 | """Testing facility for conkit.io.CCMpredIO"""
__author__ = "Felix Simkovic"
__date__ = "14 Sep 2016"
import os
import sys
import unittest
from conkit.core.contact import Contact
from conkit.core.contactfile import ContactFile
from conkit.core.contactmap import ContactMap
from conkit.core.sequence import Sequence
from conkit.io.ccmpred import CCMpredParser
from conkit.io.tests.helpers import ParserTestCase
class TestCCMpredParser(ParserTestCase):
def test_read_1(self):
content = """0.00000000000000000000e+00 9.05865192413330078125e-01 4.48399752378463745117e-01 3.83993983268737792969e-02 7.80840754508972167969e-01 5.15280842781066894531e-01 2.66545146703720092773e-01 4.99921828508377075195e-01 4.54095661640167236328e-01 7.60651350021362304688e-01
9.05863702297210693359e-01 0.00000000000000000000e+00 7.22257912158966064453e-01 1.90076664090156555176e-01 1.08203485608100891113e-01 1.23369038105010986328e-01 5.28753221035003662109e-01 3.98827701807022094727e-01 7.34628140926361083984e-01 5.52688777446746826172e-01
4.48399752378463745117e-01 7.22256183624267578125e-01 0.00000000000000000000e+00 1.39001503586769104004e-01 8.06087076663970947266e-01 4.15808916091918945312e-01 3.66488158702850341797e-01 5.48547744750976562500e-01 3.75738739967346191406e-01 6.22575163841247558594e-01
3.83996069431304931641e-02 1.90076768398284912109e-01 1.39001443982124328613e-01 0.00000000000000000000e+00 3.90003859996795654297e-01 2.16557279229164123535e-01 4.70980733633041381836e-01 7.48713970184326171875e-01 3.39133590459823608398e-01 3.63562434911727905273e-01
7.80841588973999023438e-01 1.08203165233135223389e-01 8.06088566780090332031e-01 3.90004277229309082031e-01 0.00000000000000000000e+00 1.00000000000000000000e+00 4.70447808504104614258e-01 2.48236447572708129883e-01 1.31565973162651062012e-01 2.77379095554351806641e-01
5.15280425548553466797e-01 1.23368613421916961670e-01 4.15808051824569702148e-01 2.16556847095489501953e-01 9.99999344348907470703e-01 0.00000000000000000000e+00 4.25846457481384277344e-01 2.21113219857215881348e-01 9.12295103073120117188e-01 5.38769721984863281250e-01
2.66544729471206665039e-01 5.28752684593200683594e-01 3.66489529609680175781e-01 4.70980644226074218750e-01 4.70447897911071777344e-01 4.25846666097640991211e-01 0.00000000000000000000e+00 3.21736276149749755859e-01 5.99321126937866210938e-02 5.63172221183776855469e-01
4.99920457601547241211e-01 3.98826628923416137695e-01 5.48546612262725830078e-01 7.48714208602905273438e-01 2.48236656188964843750e-01 2.21113741397857666016e-01 3.21736931800842285156e-01 0.00000000000000000000e+00 6.80750489234924316406e-01 0.00000000000000000000e+00
4.54095035791397094727e-01 7.34628796577453613281e-01 3.75739067792892456055e-01 3.39132964611053466797e-01 1.31566718220710754395e-01 9.12294447422027587891e-01 5.99323771893978118896e-02 6.80750906467437744141e-01 0.00000000000000000000e+00 3.14438492059707641602e-01
7.60651111602783203125e-01 5.52687942981719970703e-01 6.22575819492340087891e-01 3.63562554121017456055e-01 2.77379721403121948242e-01 5.38770556449890136719e-01 5.63172996044158935547e-01 1.05407856665351573611e-07 3.14439445734024047852e-01 0.00000000000000000000e+00
"""
f_name = self.tempfile(content=content)
with open(f_name, "r") as f_in:
contact_file = CCMpredParser().read(f_in)
contact_map1 = contact_file.top_map
self.assertEqual(1, len(contact_file))
self.assertEqual(55, len(contact_map1))
self.assertEqual(
sorted([1] * 10 + [2] * 9 + [3] * 8 + [4] * 7 + [5] * 6 + [6] * 5 + [7] * 4 + [8] * 3 + [9] * 2 + [10] * 1),
sorted([c.res1_seq for c in contact_map1]),
)
self.assertEqual(
[
1.0,
0.9122951030731201,
0.9058651924133301,
0.806087076663971,
0.7808407545089722,
0.7606513500213623,
0.7487139701843262,
0.7346281409263611,
0.7222579121589661,
0.6807504892349243,
0.6225751638412476,
0.5631722211837769,
0.5526887774467468,
0.5485477447509766,
0.5387697219848633,
0.5287532210350037,
0.5152808427810669,
0.4999218285083771,
0.4709807336330414,
0.4704478085041046,
0.45409566164016724,
0.44839975237846375,
0.4258464574813843,
0.41580891609191895,
0.3988277018070221,
0.39000385999679565,
0.3757387399673462,
0.36648815870285034,
0.3635624349117279,
0.3391335904598236,
0.32173627614974976,
0.31443849205970764,
0.2773790955543518,
0.2665451467037201,
0.24823644757270813,
0.22111321985721588,
0.21655727922916412,
0.19007666409015656,
0.1390015035867691,
0.13156597316265106,
0.12336903810501099,
0.10820348560810089,
0.05993211269378662,
0.03839939832687378,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[c.raw_score for c in contact_map1],
)
def test_write_1(self):
contact_file = ContactFile("test")
contact_map = ContactMap("1")
contact_file.add(contact_map)
for c in [(1, 9, 0, 8, 0.7), (1, 10, 0, 8, 0.7), (2, 8, 0, 8, 0.9), (3, 12, 0, 8, 0.4)]:
contact = Contact(c[0], c[1], c[4], distance_bound=(c[2], c[3]))
contact_map.add(contact)
contact_map.sequence = Sequence("1", "HLEGSIGILLKKHEIVFDGCHDFGRTYIWQMSDHLEGSIGILLKKHEIVFDGCHDFGRTYIWQMSD")
f_name = self.tempfile()
# Not sure if bug in Python3 numpy or intended purpose [Implemented: 21.11.2016]
mode = "wb" if sys.version_info.major == 3 else "w"
with open(f_name, mode) as f_out:
CCMpredParser().write(f_out, contact_file)
content = [
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t6.999999999999999556e-01\t6.999999999999999556e-01\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t9.000000000000000222e-01\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t4.000000000000000222e-01",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t9.000000000000000222e-01\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"6.999999999999999556e-01\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"6.999999999999999556e-01\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
"0.000000000000000000e+00\t0.000000000000000000e+00\t4.000000000000000222e-01\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00\t0.000000000000000000e+00",
]
with open(f_name, "r") as f_in:
output = f_in.read().splitlines()
self.assertEqual(content, output)
def test_write_2(self):
contact_file = ContactFile("test")
contact_map = ContactMap("1")
contact_file.add(contact_map)
for c in [(1, 9, 0, 8, 0.7), (1, 10, 0, 8, 0.7), (2, 8, 0, 8, 0.9), (3, 12, 0, 8, 0.4)]:
contact = Contact(c[0], c[1], c[4], distance_bound=(c[2], c[3]))
contact_map.add(contact)
contact_map.sequence = Sequence("1", "HLEGSIGILLKKHEIVFDGCHDFGRTYIWQMSDHLEGSIGILLKKHEIVFDGCHDFGRTYIWQMSD")
f_name = self.tempfile()
# Not sure if bug in Python3 numpy or intended purpose [Implemented: 21.11.2016]
with open(f_name, "w") as f_out:
if sys.version_info.major == 3:
with self.assertRaises(TypeError):
CCMpredParser().write(f_out, contact_file)
else:
self.assertTrue(True)
if __name__ == "__main__":
unittest.main(verbosity=2)
|
piranha/python-slackclient | refs/heads/master | slackclient/_im.py | 1 | class Im(object):
def __init__(self, server, user, id):
self.server = server
self.user = user
self.id = id
def __eq__(self, compare_str):
return self.id == compare_str or self.user == compare_str
def __str__(self):
data = ""
for key in list(self.__dict__.keys()):
if key != "server":
data += "{0}: {1}\n".format(key, str(self.__dict__[key])[:40])
return data
def __repr__(self):
return self.__str__()
def send_message(self, message):
message_json = {"type": "message", "channel": self.id, "text": message}
self.server.send_to_websocket(message_json)
|
amolenaar/gaphor | refs/heads/master | gaphor/UML/states/transition.py | 1 | """
State transition implementation.
"""
from gaphor import UML
from gaphor.diagram.presentation import LinePresentation, Named
from gaphor.diagram.shapes import Box, EditableText, Text, draw_arrow_tail
from gaphor.diagram.support import represents
from gaphor.UML.modelfactory import stereotypes_str
@represents(UML.Transition)
class TransitionItem(LinePresentation[UML.Transition], Named):
"""
Representation of state transition.
"""
def __init__(self, id=None, model=None):
super().__init__(id, model)
self.shape_tail = Box(
Text(text=lambda: stereotypes_str(self.subject),),
EditableText(text=lambda: self.subject.name or ""),
)
self.watch("subject[NamedElement].name")
self.watch("subject.appliedStereotype.classifier.name")
self.shape_middle = EditableText(
text=lambda: self.subject
and self.subject.guard
and self.subject.guard.specification
or ""
)
self.watch("subject[Transition].guard[Constraint].specification")
self.draw_tail = draw_arrow_tail
|
howardwkim/ctci | refs/heads/master | Pramp/root_number.py | 1 | def root(x, n):
upper_bound = x / n
lower_bound = 0 # optimize?
return root_helper(lower_bound, upper_bound, x, n)
def root_helper(lower_bound, upper_bound, x, n):
if upper_bound < lower_bound:
raise Exception('')
mid = (1. * upper_bound - lower_bound) / 2 + lower_bound
mid_to_nth = mid ** n
if abs(x - mid_to_nth) < 0.001:
return mid
else:
if mid_to_nth > x:
return root_helper(lower_bound, mid, x, n)
else:
return root_helper(mid, upper_bound, x, n)
print root(9,2) |
mou4e/zirconium | refs/heads/master | build/android/pylib/instrumentation/instrumentation_test_instance_test.py | 49 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for instrumentation.TestRunner."""
# pylint: disable=W0212
import os
import sys
import unittest
from pylib import constants
from pylib.base import base_test_result
from pylib.instrumentation import instrumentation_test_instance
sys.path.append(os.path.join(
constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
import mock # pylint: disable=F0401
class InstrumentationTestInstanceTest(unittest.TestCase):
def setUp(self):
options = mock.Mock()
options.tool = ''
def testGenerateTestResults_noStatus(self):
results = instrumentation_test_instance.GenerateTestResults(
None, None, [], 0, 1000)
self.assertEqual([], results)
def testGenerateTestResults_testPassed(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
def testGenerateTestResults_testSkipped_true(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'test_skipped': 'true',
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
def testGenerateTestResults_testSkipped_false(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'test_skipped': 'false',
}),
(0, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
def testGenerateTestResults_testFailed(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(-2, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
if __name__ == '__main__':
unittest.main(verbosity=2)
|
MrNuggles/HeyBoet-Telegram-Bot | refs/heads/master | temboo/Library/LastFm/User/GetWeeklyChartList.py | 5 | # -*- coding: utf-8 -*-
###############################################################################
#
# GetWeeklyChartList
# Retrieves a list of available charts for this user, expressed as date ranges which can be sent to the chart services.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetWeeklyChartList(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetWeeklyChartList Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetWeeklyChartList, self).__init__(temboo_session, '/Library/LastFm/User/GetWeeklyChartList')
def new_input_set(self):
return GetWeeklyChartListInputSet()
def _make_result_set(self, result, path):
return GetWeeklyChartListResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetWeeklyChartListChoreographyExecution(session, exec_id, path)
class GetWeeklyChartListInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetWeeklyChartList
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((string) Your Last.fm API Key.)
"""
super(GetWeeklyChartListInputSet, self)._set_input('APIKey', value)
def set_User(self, value):
"""
Set the value of the User input for this Choreo. ((string) The last.fm username to fetch the charts of.)
"""
super(GetWeeklyChartListInputSet, self)._set_input('User', value)
class GetWeeklyChartListResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetWeeklyChartList Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((XML) The response from Last.fm.)
"""
return self._output.get('Response', None)
class GetWeeklyChartListChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetWeeklyChartListResultSet(response, path)
|
pabloborrego93/edx-platform | refs/heads/master | lms/djangoapps/lms_xblock/migrations/0001_initial.py | 87 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='XBlockAsidesConfig',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('disabled_blocks', models.TextField(default=b'about course_info static_tab', help_text=b'Space-separated list of XBlocks on which XBlockAsides should never render.')),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
options={
'ordering': ('-change_date',),
'abstract': False,
},
),
]
|
GreatSCT/GreatSCT | refs/heads/master | config/update.py | 1 | #!/usr/bin/python
import platform, os, sys, pwd
def which(program):
path = os.getenv('PATH')
for p in path.split(os.path.pathsep):
p = os.path.realpath(os.path.join(p, program))
if os.path.exists(p) and os.access(p, os.X_OK):
return p
return False
def validate_msfpath():
msfpath = None
while not msfpath:
msfpath = input(" [>] Please enter the path of your metasploit installation: ")
if not os.path.isfile(os.path.join(msfpath, 'msfvenom')):
print("[!] Unable to detect metasploit at this path")
msfpath = None
options["METASPLOIT_PATH"] = msfpath
options["MSFVENOM_PATH"] = msfpath
"""
Take an options dictionary and update /etc/greatsct/settings.py
"""
def generateConfig(options):
config = """#!/usr/bin/python
##################################################################################################
#
# Great Scott configuration file
#
# Run update.py to automatically set all these options to their defaults.
#
##################################################################################################
#################################################
#
# General system options
#
#################################################
"""
print("\n Great Scott configuration:")
config += '# OS to use (Kali/Backtrack/Debian/Windows)\n'
config += 'OPERATING_SYSTEM="' + options['OPERATING_SYSTEM'] + '"\n\n'
print("\n [*] OPERATING_SYSTEM = " + options['OPERATING_SYSTEM'])
config += '# Specific Linux distro\n'
# check /etc/issue for the exact linux distro
issue = open("/etc/issue").read()
if issue.startswith("Debian"):
config += 'DISTRO="Debian"\n\n'
else:
config += 'DISTRO="Linux"\n\n'
config += '# Terminal clearing method to use (use "false" to disable it)\n'
config += 'TERMINAL_CLEAR="' + options['TERMINAL_CLEAR'] + '"\n\n'
print(" [*] TERMINAL_CLEAR = " + options['TERMINAL_CLEAR'])
config += '# Wine environment\n'
config += 'WINEPREFIX="' + options["WINEPREFIX"] + '"\n\n'
print(" [*] WINEPREFIX = " + options["WINEPREFIX"])
config += '# Path to temporary directory\n'
config += 'TEMP_DIR="' + options["TEMP_DIR"] + '"\n\n'
print(" [*] TEMP_DIR = " + options["TEMP_DIR"])
config += '# Default options to pass to msfvenom for shellcode creation\n'
config += 'MSFVENOM_OPTIONS="' + options['MSFVENOM_OPTIONS'] + '"\n\n'
print(" [*] MSFVENOM_OPTIONS = " + options['MSFVENOM_OPTIONS'])
config += '# The path to the metasploit framework, for example: /usr/share/metasploit-framework/\n'
config += 'METASPLOIT_PATH="' + options['METASPLOIT_PATH'] + '"\n\n'
print(" [*] METASPLOIT_PATH = " + options['METASPLOIT_PATH'])
config += '# The path to msfvenom for shellcode generation purposes\n'
config += 'MSFVENOM_PATH="' + options["MSFVENOM_PATH"] + '"\n\n'
print(" [*] MSFVENOM_PATH = " + options["MSFVENOM_PATH"])
config += """
#################################################
#
# GreatSCT-Bypass specific options
#
#################################################
"""
config += '# GreatSCT-Bypass install path\n'
config += 'GREATSCT_BYPASS_PATH="' + options['GREATSCT_BYPASS_PATH'] + '"\n\n'
print(" [*] GREATSCT_BYPASS_PATH = " + options['GREATSCT_BYPASS_PATH'])
source_path = os.path.expanduser(options["PAYLOAD_SOURCE_PATH"])
config += '# Path to output the source of payloads\n'
config += 'PAYLOAD_SOURCE_PATH="' + source_path + '"\n\n'
print(" [*] PAYLOAD_SOURCE_PATH = " + source_path)
# create the output source path if it doesn't exist
if not os.path.exists(source_path):
os.makedirs(source_path)
print(" [*] Path '" + source_path + "' Created")
compiled_path = os.path.expanduser(options["PAYLOAD_COMPILED_PATH"])
config += '# Path to output compiled payloads\n'
config += 'PAYLOAD_COMPILED_PATH="' + compiled_path +'"\n\n'
print(" [*] PAYLOAD_COMPILED_PATH = " + compiled_path)
# create the output compiled path if it doesn't exist
if not os.path.exists( compiled_path ):
os.makedirs( compiled_path )
print(" [*] Path '" + compiled_path + "' Created")
handler_path = os.path.expanduser(options["HANDLER_PATH"])
# create the output compiled path if it doesn't exist
if not os.path.exists( handler_path ):
os.makedirs( handler_path )
print(" [*] Path '" + handler_path + "' Created")
config += '# Whether to generate a msf handler script and where to place it\n'
config += 'GENERATE_HANDLER_SCRIPT="' + options['GENERATE_HANDLER_SCRIPT'] + '"\n'
print(" [*] GENERATE_HANDLER_SCRIPT = " + options['GENERATE_HANDLER_SCRIPT'])
config += 'HANDLER_PATH="' + handler_path + '"\n\n'
print(" [*] HANDLER_PATH = " + handler_path)
hash_path = os.path.expanduser(options["HASH_LIST"])
config += '# Running hash list of all payloads generated\n'
config += 'HASH_LIST="' + hash_path + '"\n\n'
print(" [*] HASH_LIST = " + hash_path + "\n")
if platform.system() == "Linux":
# create the output compiled path if it doesn't exist
if not os.path.exists("/etc/greatsct/"):
os.system("sudo mkdir /etc/greatsct/")
os.system("sudo touch /etc/greatsct/settings.py")
os.system("sudo chmod 777 /etc/greatsct/settings.py")
print(" [*] Path '/etc/greatsct/' Created")
f = open("/etc/greatsct/settings.py", 'w')
f.write(config)
f.close()
print(" Configuration File Written To '/etc/greatsct/settings.py'\n")
else:
print(" [!] ERROR: PLATFORM NOT CURRENTLY SUPPORTED")
sys.exit()
if __name__ == '__main__':
options = {}
if platform.system() == "Linux":
# check /etc/issue for the exact linux distro
issue = open("/etc/issue").read()
# resolve metasploit & msfvenom paths
msfpath = os.path.dirname(which('msfvenom'))
if os.path.isdir(msfpath) and os.path.isfile(os.path.join(msfpath, 'msfconsole')):
options["METASPLOIT_PATH"] = msfpath
if os.path.isfile(os.path.join(msfpath, 'msfvenom')):
options["MSFVENOM_PATH"] = msfpath
else:
validate_msfpath()
else:
validate_msfpath()
if issue.startswith("Kali"):
options["OPERATING_SYSTEM"] = "Kali"
options["TERMINAL_CLEAR"] = "clear"
else:
options["OPERATING_SYSTEM"] = "Linux"
options["TERMINAL_CLEAR"] = "clear"
# last of the general options
options["TEMP_DIR"] = "/tmp/"
options["MSFVENOM_OPTIONS"] = ""
# Get the real user if we're being ran under sudo
wineprefix = ""
user = os.environ.get("SUDO_USER", pwd.getpwuid(os.getuid()).pw_name)
if user == 'root':
wineprefix = "/root/.greatsct/"
else:
wineprefix = "/home/{0}/.greatsct/".format(user)
options["WINEPREFIX"] = wineprefix
# GreatSCT-Bypass specific options
greatsct_bypass_path = "/".join(os.getcwd().split("/")[:-1]) + "/"
options["GREATSCT_BYPASS_PATH"] = greatsct_bypass_path
options["PAYLOAD_SOURCE_PATH"] = "/usr/share/greatsct-output/source/"
options["PAYLOAD_COMPILED_PATH"] = "/usr/share/greatsct-output/compiled/"
options["GENERATE_HANDLER_SCRIPT"] = "True"
options["HANDLER_PATH"] = "/usr/share/greatsct-output/handlers/"
options["HASH_LIST"] = "/usr/share/greatsct-output/hashes.txt"
# unsupported platform...
else:
print(" [!] ERROR: PLATFORM NOT CURRENTLY SUPPORTED")
sys.exit()
generateConfig(options)
|
seankelly/buildbot | refs/heads/master | master/buildbot/test/unit/test_db_migrate_versions_045_worker_transition.py | 10 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import sqlalchemy as sa
from sqlalchemy.engine.reflection import Inspector
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.db.types.json import JsonObject
from buildbot.test.util import migration
from buildbot.util import sautils
class Migration(migration.MigrateTestMixin, unittest.TestCase):
def setUp(self):
return self.setUpMigrateTest()
def tearDown(self):
return self.tearDownMigrateTest()
def _define_old_tables(self, metadata):
self.buildrequests = sautils.Table(
'buildrequests', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id'),
nullable=False),
# ...
)
self.buildsets = sautils.Table(
'buildsets', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('parent_buildid', sa.Integer,
sa.ForeignKey('builds.id', use_alter=True, name='parent_buildid')),
# ...
)
self.builders = sautils.Table(
'builders', metadata,
sa.Column('id', sa.Integer, primary_key=True),
# ...
)
self.builder_masters = sautils.Table(
'builder_masters', metadata,
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
# ...
)
self.masters = sautils.Table(
"masters", metadata,
sa.Column('id', sa.Integer, primary_key=True),
# ...
)
self.builds = sautils.Table(
'builds', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('number', sa.Integer, nullable=False),
sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id')),
sa.Column('buildrequestid', sa.Integer, sa.ForeignKey('buildrequests.id'),
nullable=False),
sa.Column('buildslaveid', sa.Integer),
sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'),
nullable=False),
sa.Column('started_at', sa.Integer, nullable=False),
sa.Column('complete_at', sa.Integer),
sa.Column(
'state_string', sa.Text, nullable=False),
sa.Column('results', sa.Integer),
)
self.buildslaves = sautils.Table(
"buildslaves", metadata,
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("name", sa.String(50), nullable=False),
sa.Column("info", JsonObject, nullable=False),
)
self.configured_buildslaves = sautils.Table(
'configured_buildslaves', metadata,
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('buildermasterid', sa.Integer,
sa.ForeignKey('builder_masters.id'), nullable=False),
sa.Column('buildslaveid', sa.Integer, sa.ForeignKey('buildslaves.id'),
nullable=False),
)
self.connected_buildslaves = sautils.Table(
'connected_buildslaves', metadata,
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('masterid', sa.Integer,
sa.ForeignKey('masters.id'), nullable=False),
sa.Column('buildslaveid', sa.Integer, sa.ForeignKey('buildslaves.id'),
nullable=False),
)
def _create_tables_thd(self, conn):
metadata = sa.MetaData()
metadata.bind = conn
self._define_old_tables(metadata)
metadata.create_all()
sa.Index(
'builds_buildrequestid', self.builds.c.buildrequestid).create()
sa.Index('builds_number',
self.builds.c.builderid, self.builds.c.number,
unique=True).create()
sa.Index('builds_buildslaveid', self.builds.c.buildslaveid).create()
sa.Index('builds_masterid', self.builds.c.masterid).create()
sa.Index(
'buildslaves_name', self.buildslaves.c.name, unique=True).create()
sa.Index('configured_slaves_buildmasterid',
self.configured_buildslaves.c.buildermasterid).create()
sa.Index('configured_slaves_slaves',
self.configured_buildslaves.c.buildslaveid).create()
sa.Index('configured_slaves_identity',
self.configured_buildslaves.c.buildermasterid,
self.configured_buildslaves.c.buildslaveid, unique=True).create()
sa.Index('connected_slaves_masterid',
self.connected_buildslaves.c.masterid).create()
sa.Index('connected_slaves_slaves',
self.connected_buildslaves.c.buildslaveid).create()
sa.Index('connected_slaves_identity',
self.connected_buildslaves.c.masterid,
self.connected_buildslaves.c.buildslaveid, unique=True).create()
@defer.inlineCallbacks
def test_update_inconsistent_builds_buildslaves(self):
def setup_thd(conn):
self._create_tables_thd(conn)
conn.execute(self.masters.insert(), [
dict(id=1),
dict(id=2),
])
conn.execute(self.buildsets.insert(), [dict(id=5)])
conn.execute(self.buildrequests.insert(), [
dict(id=3, buildsetid=5),
dict(id=4, buildsetid=5),
])
conn.execute(self.buildslaves.insert(), [
dict(id=30,
name='worker-1',
info={}),
dict(id=31,
name='worker-2',
info={"a": 1}),
])
conn.execute(self.builds.insert(), [
dict(id=10,
number=2,
buildrequestid=3,
buildslaveid=123,
masterid=1,
started_at=0,
state_string='state'),
dict(id=11,
number=1,
buildrequestid=4,
buildslaveid=31,
masterid=2,
started_at=1000,
state_string='state2'),
])
def verify_thd(conn):
metadata = sa.MetaData()
metadata.bind = conn
# Verify database contents.
# 'workers' table contents.
workers = sautils.Table('workers', metadata, autoload=True)
c = workers.c
q = sa.select(
[c.id, c.name, c.info]
).order_by(c.id)
self.assertEqual(
q.execute().fetchall(), [
(30, u'worker-1', u'{}'),
(31, u'worker-2', u'{"a": 1}'),
])
# 'builds' table contents.
builds = sautils.Table('builds', metadata, autoload=True)
c = builds.c
q = sa.select(
[c.id, c.number, c.builderid, c.buildrequestid, c.workerid,
c.masterid, c.started_at, c.complete_at, c.state_string,
c.results]
).order_by(c.id)
# Check that build with invalid reference to buildslaves now
# have no reference to it.
self.assertEqual(
q.execute().fetchall(), [
(10, 2, None, 3, None, 1, 0, None, u'state', None),
(11, 1, None, 4, 31, 2, 1000, None, u'state2', None),
])
yield self.do_test_migration(44, 45, setup_thd, verify_thd)
def test_update(self):
def setup_thd(conn):
self._create_tables_thd(conn)
conn.execute(self.masters.insert(), [
dict(id=10),
dict(id=11),
])
conn.execute(self.buildsets.insert(), [
dict(id=90),
dict(id=91),
])
conn.execute(self.buildrequests.insert(), [
dict(id=20, buildsetid=90),
dict(id=21, buildsetid=91),
])
conn.execute(self.builders.insert(), [
dict(id=50)
])
conn.execute(self.buildslaves.insert(), [
dict(id=30,
name='worker-1',
info={}),
dict(id=31,
name='worker-2',
info={"a": 1}),
])
conn.execute(self.builds.insert(), [
dict(id=40,
number=1,
buildrequestid=20,
buildslaveid=30,
masterid=10,
started_at=1000,
state_string='state'),
])
conn.execute(self.builds.insert(), [
dict(id=41,
number=2,
builderid=50,
buildrequestid=21,
masterid=11,
started_at=2000,
complete_at=3000,
state_string='state 2',
results=9),
])
conn.execute(self.builder_masters.insert(), [
dict(id=70),
dict(id=71),
])
conn.execute(self.configured_buildslaves.insert(), [
dict(id=60,
buildermasterid=70,
buildslaveid=30),
dict(id=61,
buildermasterid=71,
buildslaveid=31),
])
conn.execute(self.connected_buildslaves.insert(), [
dict(id=80,
masterid=10,
buildslaveid=30),
dict(id=81,
masterid=11,
buildslaveid=31),
])
def verify_thd(conn):
metadata = sa.MetaData()
metadata.bind = conn
# Verify database contents.
# 'workers' table contents.
workers = sautils.Table('workers', metadata, autoload=True)
c = workers.c
q = sa.select(
[c.id, c.name, c.info]
).order_by(c.id)
self.assertEqual(
q.execute().fetchall(), [
(30, u'worker-1', u'{}'),
(31, u'worker-2', u'{"a": 1}'),
])
# 'builds' table contents.
builds = sautils.Table('builds', metadata, autoload=True)
c = builds.c
q = sa.select(
[c.id, c.number, c.builderid, c.buildrequestid, c.workerid,
c.masterid, c.started_at, c.complete_at, c.state_string,
c.results]
).order_by(c.id)
self.assertEqual(
q.execute().fetchall(), [
(40, 1, None, 20, 30, 10, 1000, None, u'state', None),
(41, 2, 50, 21, None, 11, 2000, 3000, u'state 2', 9),
])
# 'configured_workers' table contents.
configured_workers = sautils.Table(
'configured_workers', metadata, autoload=True)
c = configured_workers.c
q = sa.select(
[c.id, c.buildermasterid, c.workerid]
).order_by(c.id)
self.assertEqual(
q.execute().fetchall(), [
(60, 70, 30),
(61, 71, 31),
])
# 'connected_workers' table contents.
connected_workers = sautils.Table(
'connected_workers', metadata, autoload=True)
c = connected_workers.c
q = sa.select(
[c.id, c.masterid, c.workerid]
).order_by(c.id)
self.assertEqual(
q.execute().fetchall(), [
(80, 10, 30),
(81, 11, 31),
])
# Verify that there is no "slave"-named items in schema.
inspector = Inspector(conn)
def check_name(name, table_name, item_type):
if not name:
return
self.assertTrue(
u"slave" not in name.lower(),
msg=u"'slave'-named {type} in table '{table}': "
u"'{name}'".format(
type=item_type, table=table_name,
name=name))
# Check every table.
for table_name in inspector.get_table_names():
# Check table name.
check_name(table_name, table_name, u"table name")
# Check column names.
for column_info in inspector.get_columns(table_name):
check_name(column_info['name'], table_name, u"column")
# Check foreign key names.
for fk_info in inspector.get_foreign_keys(table_name):
check_name(fk_info['name'], table_name, u"foreign key")
# Check indexes names.
for index_info in inspector.get_indexes(table_name):
check_name(index_info['name'], table_name, u"index")
# Check primary keys constraints names.
pk_info = inspector.get_pk_constraint(table_name)
check_name(pk_info.get('name'), table_name, u"primary key")
# Test that no "slave"-named items present in schema
for name in inspector.get_schema_names():
self.assertTrue(u"slave" not in name.lower())
return self.do_test_migration(44, 45, setup_thd, verify_thd)
|
hyperNURb/ggrc-core | refs/heads/develop | src/ggrc/models/product.py | 5 | # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
from ggrc import db
from sqlalchemy.orm import validates
from .mixins import deferred, BusinessObject, Timeboxed, CustomAttributable
from .object_document import Documentable
from .object_owner import Ownable
from .object_person import Personable
from .option import Option
from .relationship import Relatable
from .utils import validate_option
from .track_object_state import HasObjectState, track_state_for_class
class Product(HasObjectState, CustomAttributable, Documentable, Personable,
Relatable, Timeboxed, Ownable, BusinessObject, db.Model):
__tablename__ = 'products'
kind_id = deferred(db.Column(db.Integer), 'Product')
version = deferred(db.Column(db.String), 'Product')
kind = db.relationship(
'Option',
primaryjoin='and_(foreign(Product.kind_id) == Option.id, '\
'Option.role == "product_type")',
uselist=False,
)
_publish_attrs = [
'kind',
'version',
]
_sanitize_html = ['version',]
_aliases = {
"url": "Product URL",
"kind": {
"display_name": "Kind/Type",
"filter_by": "_filter_by_kind",
},
}
@validates('kind')
def validate_product_options(self, key, option):
return validate_option(
self.__class__.__name__, key, option, 'product_type')
@classmethod
def _filter_by_kind(cls, predicate):
return Option.query.filter(
(Option.id == cls.kind_id) & predicate(Option.title)
).exists()
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Product, cls).eager_query()
return query.options(orm.joinedload('kind'))
track_state_for_class(Product)
|
jjmleiro/hue | refs/heads/master | desktop/core/ext-py/python-openid-2.2.5/openid/store/nonce.py | 180 | __all__ = [
'split',
'mkNonce',
'checkTimestamp',
]
from openid import cryptutil
from time import strptime, strftime, gmtime, time
from calendar import timegm
import string
NONCE_CHARS = string.ascii_letters + string.digits
# Keep nonces for five hours (allow five hours for the combination of
# request time and clock skew). This is probably way more than is
# necessary, but there is not much overhead in storing nonces.
SKEW = 60 * 60 * 5
time_fmt = '%Y-%m-%dT%H:%M:%SZ'
time_str_len = len('0000-00-00T00:00:00Z')
def split(nonce_string):
"""Extract a timestamp from the given nonce string
@param nonce_string: the nonce from which to extract the timestamp
@type nonce_string: str
@returns: A pair of a Unix timestamp and the salt characters
@returntype: (int, str)
@raises ValueError: if the nonce does not start with a correctly
formatted time string
"""
timestamp_str = nonce_string[:time_str_len]
try:
timestamp = timegm(strptime(timestamp_str, time_fmt))
except AssertionError: # Python 2.2
timestamp = -1
if timestamp < 0:
raise ValueError('time out of range')
return timestamp, nonce_string[time_str_len:]
def checkTimestamp(nonce_string, allowed_skew=SKEW, now=None):
"""Is the timestamp that is part of the specified nonce string
within the allowed clock-skew of the current time?
@param nonce_string: The nonce that is being checked
@type nonce_string: str
@param allowed_skew: How many seconds should be allowed for
completing the request, allowing for clock skew.
@type allowed_skew: int
@param now: The current time, as a Unix timestamp
@type now: int
@returntype: bool
@returns: Whether the timestamp is correctly formatted and within
the allowed skew of the current time.
"""
try:
stamp, _ = split(nonce_string)
except ValueError:
return False
else:
if now is None:
now = time()
# Time after which we should not use the nonce
past = now - allowed_skew
# Time that is too far in the future for us to allow
future = now + allowed_skew
# the stamp is not too far in the future and is not too far in
# the past
return past <= stamp <= future
def mkNonce(when=None):
"""Generate a nonce with the current timestamp
@param when: Unix timestamp representing the issue time of the
nonce. Defaults to the current time.
@type when: int
@returntype: str
@returns: A string that should be usable as a one-way nonce
@see: time
"""
salt = cryptutil.randomString(6, NONCE_CHARS)
if when is None:
t = gmtime()
else:
t = gmtime(when)
time_str = strftime(time_fmt, t)
return time_str + salt
|
837468220/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/test/test_collections.py | 47 | """Unit tests for collections.py."""
import unittest, doctest, operator
import inspect
from test import support
from collections import namedtuple, Counter, OrderedDict, _count_elements
from test import mapping_tests
import pickle, copy
from random import randrange, shuffle
import keyword
import re
import sys
from collections import _ChainMap
from collections import Hashable, Iterable, Iterator
from collections import Sized, Container, Callable
from collections import Set, MutableSet
from collections import Mapping, MutableMapping, KeysView, ItemsView, UserDict
from collections import Sequence, MutableSequence
from collections import ByteString
################################################################################
### _ChainMap (helper class for configparser)
################################################################################
ChainMap = _ChainMap # rename to keep test code in sync with 3.3 version
class TestChainMap(unittest.TestCase):
def test_basics(self):
c = ChainMap()
c['a'] = 1
c['b'] = 2
d = c.new_child()
d['b'] = 20
d['c'] = 30
self.assertEqual(d.maps, [{'b':20, 'c':30}, {'a':1, 'b':2}]) # check internal state
self.assertEqual(d.items(), dict(a=1, b=20, c=30).items()) # check items/iter/getitem
self.assertEqual(len(d), 3) # check len
for key in 'abc': # check contains
self.assertIn(key, d)
for k, v in dict(a=1, b=20, c=30, z=100).items(): # check get
self.assertEqual(d.get(k, 100), v)
del d['b'] # unmask a value
self.assertEqual(d.maps, [{'c':30}, {'a':1, 'b':2}]) # check internal state
self.assertEqual(d.items(), dict(a=1, b=2, c=30).items()) # check items/iter/getitem
self.assertEqual(len(d), 3) # check len
for key in 'abc': # check contains
self.assertIn(key, d)
for k, v in dict(a=1, b=2, c=30, z=100).items(): # check get
self.assertEqual(d.get(k, 100), v)
self.assertIn(repr(d), [ # check repr
type(d).__name__ + "({'c': 30}, {'a': 1, 'b': 2})",
type(d).__name__ + "({'c': 30}, {'b': 2, 'a': 1})"
])
for e in d.copy(), copy.copy(d): # check shallow copies
self.assertEqual(d, e)
self.assertEqual(d.maps, e.maps)
self.assertIsNot(d, e)
self.assertIsNot(d.maps[0], e.maps[0])
for m1, m2 in zip(d.maps[1:], e.maps[1:]):
self.assertIs(m1, m2)
for e in [pickle.loads(pickle.dumps(d)),
copy.deepcopy(d),
eval(repr(d))
]: # check deep copies
self.assertEqual(d, e)
self.assertEqual(d.maps, e.maps)
self.assertIsNot(d, e)
for m1, m2 in zip(d.maps, e.maps):
self.assertIsNot(m1, m2, e)
f = d.new_child()
f['b'] = 5
self.assertEqual(f.maps, [{'b': 5}, {'c':30}, {'a':1, 'b':2}])
self.assertEqual(f.parents.maps, [{'c':30}, {'a':1, 'b':2}]) # check parents
self.assertEqual(f['b'], 5) # find first in chain
self.assertEqual(f.parents['b'], 2) # look beyond maps[0]
def test_contructor(self):
self.assertEqual(ChainMap().maps, [{}]) # no-args --> one new dict
self.assertEqual(ChainMap({1:2}).maps, [{1:2}]) # 1 arg --> list
def test_bool(self):
self.assertFalse(ChainMap())
self.assertFalse(ChainMap({}, {}))
self.assertTrue(ChainMap({1:2}, {}))
self.assertTrue(ChainMap({}, {1:2}))
def test_missing(self):
class DefaultChainMap(ChainMap):
def __missing__(self, key):
return 999
d = DefaultChainMap(dict(a=1, b=2), dict(b=20, c=30))
for k, v in dict(a=1, b=2, c=30, d=999).items():
self.assertEqual(d[k], v) # check __getitem__ w/missing
for k, v in dict(a=1, b=2, c=30, d=77).items():
self.assertEqual(d.get(k, 77), v) # check get() w/ missing
for k, v in dict(a=True, b=True, c=True, d=False).items():
self.assertEqual(k in d, v) # check __contains__ w/missing
self.assertEqual(d.pop('a', 1001), 1, d)
self.assertEqual(d.pop('a', 1002), 1002) # check pop() w/missing
self.assertEqual(d.popitem(), ('b', 2)) # check popitem() w/missing
with self.assertRaises(KeyError):
d.popitem()
def test_dict_coercion(self):
d = ChainMap(dict(a=1, b=2), dict(b=20, c=30))
self.assertEqual(dict(d), dict(a=1, b=2, c=30))
self.assertEqual(dict(d.items()), dict(a=1, b=2, c=30))
################################################################################
### Named Tuples
################################################################################
TestNT = namedtuple('TestNT', 'x y z') # type used for pickle tests
class TestNamedTuple(unittest.TestCase):
def test_factory(self):
Point = namedtuple('Point', 'x y')
self.assertEqual(Point.__name__, 'Point')
self.assertEqual(Point.__slots__, ())
self.assertEqual(Point.__module__, __name__)
self.assertEqual(Point.__getitem__, tuple.__getitem__)
self.assertEqual(Point._fields, ('x', 'y'))
self.assertRaises(ValueError, namedtuple, 'abc%', 'efg ghi') # type has non-alpha char
self.assertRaises(ValueError, namedtuple, 'class', 'efg ghi') # type has keyword
self.assertRaises(ValueError, namedtuple, '9abc', 'efg ghi') # type starts with digit
self.assertRaises(ValueError, namedtuple, 'abc', 'efg g%hi') # field with non-alpha char
self.assertRaises(ValueError, namedtuple, 'abc', 'abc class') # field has keyword
self.assertRaises(ValueError, namedtuple, 'abc', '8efg 9ghi') # field starts with digit
self.assertRaises(ValueError, namedtuple, 'abc', '_efg ghi') # field with leading underscore
self.assertRaises(ValueError, namedtuple, 'abc', 'efg efg ghi') # duplicate field
namedtuple('Point0', 'x1 y2') # Verify that numbers are allowed in names
namedtuple('_', 'a b c') # Test leading underscores in a typename
nt = namedtuple('nt', 'the quick brown fox') # check unicode input
self.assertNotIn("u'", repr(nt._fields))
nt = namedtuple('nt', ('the', 'quick')) # check unicode input
self.assertNotIn("u'", repr(nt._fields))
self.assertRaises(TypeError, Point._make, [11]) # catch too few args
self.assertRaises(TypeError, Point._make, [11, 22, 33]) # catch too many args
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_factory_doc_attr(self):
Point = namedtuple('Point', 'x y')
self.assertEqual(Point.__doc__, 'Point(x, y)')
def test_name_fixer(self):
for spec, renamed in [
[('efg', 'g%hi'), ('efg', '_1')], # field with non-alpha char
[('abc', 'class'), ('abc', '_1')], # field has keyword
[('8efg', '9ghi'), ('_0', '_1')], # field starts with digit
[('abc', '_efg'), ('abc', '_1')], # field with leading underscore
[('abc', 'efg', 'efg', 'ghi'), ('abc', 'efg', '_2', 'ghi')], # duplicate field
[('abc', '', 'x'), ('abc', '_1', 'x')], # fieldname is a space
]:
self.assertEqual(namedtuple('NT', spec, rename=True)._fields, renamed)
def test_instance(self):
Point = namedtuple('Point', 'x y')
p = Point(11, 22)
self.assertEqual(p, Point(x=11, y=22))
self.assertEqual(p, Point(11, y=22))
self.assertEqual(p, Point(y=22, x=11))
self.assertEqual(p, Point(*(11, 22)))
self.assertEqual(p, Point(**dict(x=11, y=22)))
self.assertRaises(TypeError, Point, 1) # too few args
self.assertRaises(TypeError, Point, 1, 2, 3) # too many args
self.assertRaises(TypeError, eval, 'Point(XXX=1, y=2)', locals()) # wrong keyword argument
self.assertRaises(TypeError, eval, 'Point(x=1)', locals()) # missing keyword argument
self.assertEqual(repr(p), 'Point(x=11, y=22)')
self.assertNotIn('__weakref__', dir(p))
self.assertEqual(p, Point._make([11, 22])) # test _make classmethod
self.assertEqual(p._fields, ('x', 'y')) # test _fields attribute
self.assertEqual(p._replace(x=1), (1, 22)) # test _replace method
self.assertEqual(p._asdict(), dict(x=11, y=22)) # test _asdict method
self.assertEqual(vars(p), p._asdict()) # verify that vars() works
try:
p._replace(x=1, error=2)
except ValueError:
pass
else:
self._fail('Did not detect an incorrect fieldname')
# verify that field string can have commas
Point = namedtuple('Point', 'x, y')
p = Point(x=11, y=22)
self.assertEqual(repr(p), 'Point(x=11, y=22)')
# verify that fieldspec can be a non-string sequence
Point = namedtuple('Point', ('x', 'y'))
p = Point(x=11, y=22)
self.assertEqual(repr(p), 'Point(x=11, y=22)')
def test_tupleness(self):
Point = namedtuple('Point', 'x y')
p = Point(11, 22)
self.assertIsInstance(p, tuple)
self.assertEqual(p, (11, 22)) # matches a real tuple
self.assertEqual(tuple(p), (11, 22)) # coercable to a real tuple
self.assertEqual(list(p), [11, 22]) # coercable to a list
self.assertEqual(max(p), 22) # iterable
self.assertEqual(max(*p), 22) # star-able
x, y = p
self.assertEqual(p, (x, y)) # unpacks like a tuple
self.assertEqual((p[0], p[1]), (11, 22)) # indexable like a tuple
self.assertRaises(IndexError, p.__getitem__, 3)
self.assertEqual(p.x, x)
self.assertEqual(p.y, y)
self.assertRaises(AttributeError, eval, 'p.z', locals())
def test_odd_sizes(self):
Zero = namedtuple('Zero', '')
self.assertEqual(Zero(), ())
self.assertEqual(Zero._make([]), ())
self.assertEqual(repr(Zero()), 'Zero()')
self.assertEqual(Zero()._asdict(), {})
self.assertEqual(Zero()._fields, ())
Dot = namedtuple('Dot', 'd')
self.assertEqual(Dot(1), (1,))
self.assertEqual(Dot._make([1]), (1,))
self.assertEqual(Dot(1).d, 1)
self.assertEqual(repr(Dot(1)), 'Dot(d=1)')
self.assertEqual(Dot(1)._asdict(), {'d':1})
self.assertEqual(Dot(1)._replace(d=999), (999,))
self.assertEqual(Dot(1)._fields, ('d',))
# n = 5000
n = 254 # SyntaxError: more than 255 arguments:
import string, random
names = list(set(''.join([random.choice(string.ascii_letters)
for j in range(10)]) for i in range(n)))
n = len(names)
Big = namedtuple('Big', names)
b = Big(*range(n))
self.assertEqual(b, tuple(range(n)))
self.assertEqual(Big._make(range(n)), tuple(range(n)))
for pos, name in enumerate(names):
self.assertEqual(getattr(b, name), pos)
repr(b) # make sure repr() doesn't blow-up
d = b._asdict()
d_expected = dict(zip(names, range(n)))
self.assertEqual(d, d_expected)
b2 = b._replace(**dict([(names[1], 999),(names[-5], 42)]))
b2_expected = list(range(n))
b2_expected[1] = 999
b2_expected[-5] = 42
self.assertEqual(b2, tuple(b2_expected))
self.assertEqual(b._fields, tuple(names))
def test_pickle(self):
p = TestNT(x=10, y=20, z=30)
for module in (pickle,):
loads = getattr(module, 'loads')
dumps = getattr(module, 'dumps')
for protocol in -1, 0, 1, 2:
q = loads(dumps(p, protocol))
self.assertEqual(p, q)
self.assertEqual(p._fields, q._fields)
def test_copy(self):
p = TestNT(x=10, y=20, z=30)
for copier in copy.copy, copy.deepcopy:
q = copier(p)
self.assertEqual(p, q)
self.assertEqual(p._fields, q._fields)
def test_name_conflicts(self):
# Some names like "self", "cls", "tuple", "itemgetter", and "property"
# failed when used as field names. Test to make sure these now work.
T = namedtuple('T', 'itemgetter property self cls tuple')
t = T(1, 2, 3, 4, 5)
self.assertEqual(t, (1,2,3,4,5))
newt = t._replace(itemgetter=10, property=20, self=30, cls=40, tuple=50)
self.assertEqual(newt, (10,20,30,40,50))
# Broader test of all interesting names in a template
with support.captured_stdout() as template:
T = namedtuple('T', 'x', verbose=True)
words = set(re.findall('[A-Za-z]+', template.getvalue()))
words -= set(keyword.kwlist)
T = namedtuple('T', words)
# test __new__
values = tuple(range(len(words)))
t = T(*values)
self.assertEqual(t, values)
t = T(**dict(zip(T._fields, values)))
self.assertEqual(t, values)
# test _make
t = T._make(values)
self.assertEqual(t, values)
# exercise __repr__
repr(t)
# test _asdict
self.assertEqual(t._asdict(), dict(zip(T._fields, values)))
# test _replace
t = T._make(values)
newvalues = tuple(v*10 for v in values)
newt = t._replace(**dict(zip(T._fields, newvalues)))
self.assertEqual(newt, newvalues)
# test _fields
self.assertEqual(T._fields, tuple(words))
# test __getnewargs__
self.assertEqual(t.__getnewargs__(), values)
def test_repr(self):
with support.captured_stdout() as template:
A = namedtuple('A', 'x', verbose=True)
self.assertEqual(repr(A(1)), 'A(x=1)')
# repr should show the name of the subclass
class B(A):
pass
self.assertEqual(repr(B(1)), 'B(x=1)')
################################################################################
### Abstract Base Classes
################################################################################
class ABCTestCase(unittest.TestCase):
def validate_abstract_methods(self, abc, *names):
methodstubs = dict.fromkeys(names, lambda s, *args: 0)
# everything should work will all required methods are present
C = type('C', (abc,), methodstubs)
C()
# instantiation should fail if a required method is missing
for name in names:
stubs = methodstubs.copy()
del stubs[name]
C = type('C', (abc,), stubs)
self.assertRaises(TypeError, C, name)
def validate_isinstance(self, abc, name):
stub = lambda s, *args: 0
C = type('C', (object,), {'__hash__': None})
setattr(C, name, stub)
self.assertIsInstance(C(), abc)
self.assertTrue(issubclass(C, abc))
C = type('C', (object,), {'__hash__': None})
self.assertNotIsInstance(C(), abc)
self.assertFalse(issubclass(C, abc))
def validate_comparison(self, instance):
ops = ['lt', 'gt', 'le', 'ge', 'ne', 'or', 'and', 'xor', 'sub']
operators = {}
for op in ops:
name = '__' + op + '__'
operators[name] = getattr(operator, name)
class Other:
def __init__(self):
self.right_side = False
def __eq__(self, other):
self.right_side = True
return True
__lt__ = __eq__
__gt__ = __eq__
__le__ = __eq__
__ge__ = __eq__
__ne__ = __eq__
__ror__ = __eq__
__rand__ = __eq__
__rxor__ = __eq__
__rsub__ = __eq__
for name, op in operators.items():
if not hasattr(instance, name):
continue
other = Other()
op(instance, other)
self.assertTrue(other.right_side,'Right side not called for %s.%s'
% (type(instance), name))
class TestOneTrickPonyABCs(ABCTestCase):
def test_Hashable(self):
# Check some non-hashables
non_samples = [bytearray(), list(), set(), dict()]
for x in non_samples:
self.assertNotIsInstance(x, Hashable)
self.assertFalse(issubclass(type(x), Hashable), repr(type(x)))
# Check some hashables
samples = [None,
int(), float(), complex(),
str(),
tuple(), frozenset(),
int, list, object, type, bytes()
]
for x in samples:
self.assertIsInstance(x, Hashable)
self.assertTrue(issubclass(type(x), Hashable), repr(type(x)))
self.assertRaises(TypeError, Hashable)
# Check direct subclassing
class H(Hashable):
def __hash__(self):
return super().__hash__()
self.assertEqual(hash(H()), 0)
self.assertFalse(issubclass(int, H))
self.validate_abstract_methods(Hashable, '__hash__')
self.validate_isinstance(Hashable, '__hash__')
def test_Iterable(self):
# Check some non-iterables
non_samples = [None, 42, 3.14, 1j]
for x in non_samples:
self.assertNotIsInstance(x, Iterable)
self.assertFalse(issubclass(type(x), Iterable), repr(type(x)))
# Check some iterables
samples = [bytes(), str(),
tuple(), list(), set(), frozenset(), dict(),
dict().keys(), dict().items(), dict().values(),
(lambda: (yield))(),
(x for x in []),
]
for x in samples:
self.assertIsInstance(x, Iterable)
self.assertTrue(issubclass(type(x), Iterable), repr(type(x)))
# Check direct subclassing
class I(Iterable):
def __iter__(self):
return super().__iter__()
self.assertEqual(list(I()), [])
self.assertFalse(issubclass(str, I))
self.validate_abstract_methods(Iterable, '__iter__')
self.validate_isinstance(Iterable, '__iter__')
def test_Iterator(self):
non_samples = [None, 42, 3.14, 1j, b"", "", (), [], {}, set()]
for x in non_samples:
self.assertNotIsInstance(x, Iterator)
self.assertFalse(issubclass(type(x), Iterator), repr(type(x)))
samples = [iter(bytes()), iter(str()),
iter(tuple()), iter(list()), iter(dict()),
iter(set()), iter(frozenset()),
iter(dict().keys()), iter(dict().items()),
iter(dict().values()),
(lambda: (yield))(),
(x for x in []),
]
for x in samples:
self.assertIsInstance(x, Iterator)
self.assertTrue(issubclass(type(x), Iterator), repr(type(x)))
self.validate_abstract_methods(Iterator, '__next__', '__iter__')
# Issue 10565
class NextOnly:
def __next__(self):
yield 1
raise StopIteration
self.assertNotIsInstance(NextOnly(), Iterator)
def test_Sized(self):
non_samples = [None, 42, 3.14, 1j,
(lambda: (yield))(),
(x for x in []),
]
for x in non_samples:
self.assertNotIsInstance(x, Sized)
self.assertFalse(issubclass(type(x), Sized), repr(type(x)))
samples = [bytes(), str(),
tuple(), list(), set(), frozenset(), dict(),
dict().keys(), dict().items(), dict().values(),
]
for x in samples:
self.assertIsInstance(x, Sized)
self.assertTrue(issubclass(type(x), Sized), repr(type(x)))
self.validate_abstract_methods(Sized, '__len__')
self.validate_isinstance(Sized, '__len__')
def test_Container(self):
non_samples = [None, 42, 3.14, 1j,
(lambda: (yield))(),
(x for x in []),
]
for x in non_samples:
self.assertNotIsInstance(x, Container)
self.assertFalse(issubclass(type(x), Container), repr(type(x)))
samples = [bytes(), str(),
tuple(), list(), set(), frozenset(), dict(),
dict().keys(), dict().items(),
]
for x in samples:
self.assertIsInstance(x, Container)
self.assertTrue(issubclass(type(x), Container), repr(type(x)))
self.validate_abstract_methods(Container, '__contains__')
self.validate_isinstance(Container, '__contains__')
def test_Callable(self):
non_samples = [None, 42, 3.14, 1j,
"", b"", (), [], {}, set(),
(lambda: (yield))(),
(x for x in []),
]
for x in non_samples:
self.assertNotIsInstance(x, Callable)
self.assertFalse(issubclass(type(x), Callable), repr(type(x)))
samples = [lambda: None,
type, int, object,
len,
list.append, [].append,
]
for x in samples:
self.assertIsInstance(x, Callable)
self.assertTrue(issubclass(type(x), Callable), repr(type(x)))
self.validate_abstract_methods(Callable, '__call__')
self.validate_isinstance(Callable, '__call__')
def test_direct_subclassing(self):
for B in Hashable, Iterable, Iterator, Sized, Container, Callable:
class C(B):
pass
self.assertTrue(issubclass(C, B))
self.assertFalse(issubclass(int, C))
def test_registration(self):
for B in Hashable, Iterable, Iterator, Sized, Container, Callable:
class C:
__hash__ = None # Make sure it isn't hashable by default
self.assertFalse(issubclass(C, B), B.__name__)
B.register(C)
self.assertTrue(issubclass(C, B))
class WithSet(MutableSet):
def __init__(self, it=()):
self.data = set(it)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __contains__(self, item):
return item in self.data
def add(self, item):
self.data.add(item)
def discard(self, item):
self.data.discard(item)
class TestCollectionABCs(ABCTestCase):
# XXX For now, we only test some virtual inheritance properties.
# We should also test the proper behavior of the collection ABCs
# as real base classes or mix-in classes.
def test_Set(self):
for sample in [set, frozenset]:
self.assertIsInstance(sample(), Set)
self.assertTrue(issubclass(sample, Set))
self.validate_abstract_methods(Set, '__contains__', '__iter__', '__len__')
class MySet(Set):
def __contains__(self, x):
return False
def __len__(self):
return 0
def __iter__(self):
return iter([])
self.validate_comparison(MySet())
def test_hash_Set(self):
class OneTwoThreeSet(Set):
def __init__(self):
self.contents = [1, 2, 3]
def __contains__(self, x):
return x in self.contents
def __len__(self):
return len(self.contents)
def __iter__(self):
return iter(self.contents)
def __hash__(self):
return self._hash()
a, b = OneTwoThreeSet(), OneTwoThreeSet()
self.assertTrue(hash(a) == hash(b))
def test_MutableSet(self):
self.assertIsInstance(set(), MutableSet)
self.assertTrue(issubclass(set, MutableSet))
self.assertNotIsInstance(frozenset(), MutableSet)
self.assertFalse(issubclass(frozenset, MutableSet))
self.validate_abstract_methods(MutableSet, '__contains__', '__iter__', '__len__',
'add', 'discard')
def test_issue_5647(self):
# MutableSet.__iand__ mutated the set during iteration
s = WithSet('abcd')
s &= WithSet('cdef') # This used to fail
self.assertEqual(set(s), set('cd'))
def test_issue_4920(self):
# MutableSet.pop() method did not work
class MySet(MutableSet):
__slots__=['__s']
def __init__(self,items=None):
if items is None:
items=[]
self.__s=set(items)
def __contains__(self,v):
return v in self.__s
def __iter__(self):
return iter(self.__s)
def __len__(self):
return len(self.__s)
def add(self,v):
result=v not in self.__s
self.__s.add(v)
return result
def discard(self,v):
result=v in self.__s
self.__s.discard(v)
return result
def __repr__(self):
return "MySet(%s)" % repr(list(self))
s = MySet([5,43,2,1])
self.assertEqual(s.pop(), 1)
def test_issue8750(self):
empty = WithSet()
full = WithSet(range(10))
s = WithSet(full)
s -= s
self.assertEqual(s, empty)
s = WithSet(full)
s ^= s
self.assertEqual(s, empty)
s = WithSet(full)
s &= s
self.assertEqual(s, full)
s |= s
self.assertEqual(s, full)
def test_Mapping(self):
for sample in [dict]:
self.assertIsInstance(sample(), Mapping)
self.assertTrue(issubclass(sample, Mapping))
self.validate_abstract_methods(Mapping, '__contains__', '__iter__', '__len__',
'__getitem__')
class MyMapping(Mapping):
def __len__(self):
return 0
def __getitem__(self, i):
raise IndexError
def __iter__(self):
return iter(())
self.validate_comparison(MyMapping())
def test_MutableMapping(self):
for sample in [dict]:
self.assertIsInstance(sample(), MutableMapping)
self.assertTrue(issubclass(sample, MutableMapping))
self.validate_abstract_methods(MutableMapping, '__contains__', '__iter__', '__len__',
'__getitem__', '__setitem__', '__delitem__')
def test_MutableMapping_subclass(self):
# Test issue 9214
mymap = UserDict()
mymap['red'] = 5
self.assertIsInstance(mymap.keys(), Set)
self.assertIsInstance(mymap.keys(), KeysView)
self.assertIsInstance(mymap.items(), Set)
self.assertIsInstance(mymap.items(), ItemsView)
mymap = UserDict()
mymap['red'] = 5
z = mymap.keys() | {'orange'}
self.assertIsInstance(z, set)
list(z)
mymap['blue'] = 7 # Shouldn't affect 'z'
self.assertEqual(sorted(z), ['orange', 'red'])
mymap = UserDict()
mymap['red'] = 5
z = mymap.items() | {('orange', 3)}
self.assertIsInstance(z, set)
list(z)
mymap['blue'] = 7 # Shouldn't affect 'z'
self.assertEqual(sorted(z), [('orange', 3), ('red', 5)])
def test_Sequence(self):
for sample in [tuple, list, bytes, str]:
self.assertIsInstance(sample(), Sequence)
self.assertTrue(issubclass(sample, Sequence))
self.assertIsInstance(range(10), Sequence)
self.assertTrue(issubclass(range, Sequence))
self.assertTrue(issubclass(str, Sequence))
self.validate_abstract_methods(Sequence, '__contains__', '__iter__', '__len__',
'__getitem__')
def test_ByteString(self):
for sample in [bytes, bytearray]:
self.assertIsInstance(sample(), ByteString)
self.assertTrue(issubclass(sample, ByteString))
for sample in [str, list, tuple]:
self.assertNotIsInstance(sample(), ByteString)
self.assertFalse(issubclass(sample, ByteString))
self.assertNotIsInstance(memoryview(b""), ByteString)
self.assertFalse(issubclass(memoryview, ByteString))
def test_MutableSequence(self):
for sample in [tuple, str, bytes]:
self.assertNotIsInstance(sample(), MutableSequence)
self.assertFalse(issubclass(sample, MutableSequence))
for sample in [list, bytearray]:
self.assertIsInstance(sample(), MutableSequence)
self.assertTrue(issubclass(sample, MutableSequence))
self.assertFalse(issubclass(str, MutableSequence))
self.validate_abstract_methods(MutableSequence, '__contains__', '__iter__',
'__len__', '__getitem__', '__setitem__', '__delitem__', 'insert')
################################################################################
### Counter
################################################################################
class TestCounter(unittest.TestCase):
def test_basics(self):
c = Counter('abcaba')
self.assertEqual(c, Counter({'a':3 , 'b': 2, 'c': 1}))
self.assertEqual(c, Counter(a=3, b=2, c=1))
self.assertIsInstance(c, dict)
self.assertIsInstance(c, Mapping)
self.assertTrue(issubclass(Counter, dict))
self.assertTrue(issubclass(Counter, Mapping))
self.assertEqual(len(c), 3)
self.assertEqual(sum(c.values()), 6)
self.assertEqual(sorted(c.values()), [1, 2, 3])
self.assertEqual(sorted(c.keys()), ['a', 'b', 'c'])
self.assertEqual(sorted(c), ['a', 'b', 'c'])
self.assertEqual(sorted(c.items()),
[('a', 3), ('b', 2), ('c', 1)])
self.assertEqual(c['b'], 2)
self.assertEqual(c['z'], 0)
self.assertEqual(c.__contains__('c'), True)
self.assertEqual(c.__contains__('z'), False)
self.assertEqual(c.get('b', 10), 2)
self.assertEqual(c.get('z', 10), 10)
self.assertEqual(c, dict(a=3, b=2, c=1))
self.assertEqual(repr(c), "Counter({'a': 3, 'b': 2, 'c': 1})")
self.assertEqual(c.most_common(), [('a', 3), ('b', 2), ('c', 1)])
for i in range(5):
self.assertEqual(c.most_common(i),
[('a', 3), ('b', 2), ('c', 1)][:i])
self.assertEqual(''.join(sorted(c.elements())), 'aaabbc')
c['a'] += 1 # increment an existing value
c['b'] -= 2 # sub existing value to zero
del c['c'] # remove an entry
del c['c'] # make sure that del doesn't raise KeyError
c['d'] -= 2 # sub from a missing value
c['e'] = -5 # directly assign a missing value
c['f'] += 4 # add to a missing value
self.assertEqual(c, dict(a=4, b=0, d=-2, e=-5, f=4))
self.assertEqual(''.join(sorted(c.elements())), 'aaaaffff')
self.assertEqual(c.pop('f'), 4)
self.assertNotIn('f', c)
for i in range(3):
elem, cnt = c.popitem()
self.assertNotIn(elem, c)
c.clear()
self.assertEqual(c, {})
self.assertEqual(repr(c), 'Counter()')
self.assertRaises(NotImplementedError, Counter.fromkeys, 'abc')
self.assertRaises(TypeError, hash, c)
c.update(dict(a=5, b=3))
c.update(c=1)
c.update(Counter('a' * 50 + 'b' * 30))
c.update() # test case with no args
c.__init__('a' * 500 + 'b' * 300)
c.__init__('cdc')
c.__init__()
self.assertEqual(c, dict(a=555, b=333, c=3, d=1))
self.assertEqual(c.setdefault('d', 5), 1)
self.assertEqual(c['d'], 1)
self.assertEqual(c.setdefault('e', 5), 5)
self.assertEqual(c['e'], 5)
def test_copying(self):
# Check that counters are copyable, deepcopyable, picklable, and
#have a repr/eval round-trip
words = Counter('which witch had which witches wrist watch'.split())
update_test = Counter()
update_test.update(words)
for i, dup in enumerate([
words.copy(),
copy.copy(words),
copy.deepcopy(words),
pickle.loads(pickle.dumps(words, 0)),
pickle.loads(pickle.dumps(words, 1)),
pickle.loads(pickle.dumps(words, 2)),
pickle.loads(pickle.dumps(words, -1)),
eval(repr(words)),
update_test,
Counter(words),
]):
msg = (i, dup, words)
self.assertTrue(dup is not words)
self.assertEqual(dup, words)
self.assertEqual(len(dup), len(words))
self.assertEqual(type(dup), type(words))
def test_copy_subclass(self):
class MyCounter(Counter):
pass
c = MyCounter('slartibartfast')
d = c.copy()
self.assertEqual(d, c)
self.assertEqual(len(d), len(c))
self.assertEqual(type(d), type(c))
def test_conversions(self):
# Convert to: set, list, dict
s = 'she sells sea shells by the sea shore'
self.assertEqual(sorted(Counter(s).elements()), sorted(s))
self.assertEqual(sorted(Counter(s)), sorted(set(s)))
self.assertEqual(dict(Counter(s)), dict(Counter(s).items()))
self.assertEqual(set(Counter(s)), set(s))
def test_invariant_for_the_in_operator(self):
c = Counter(a=10, b=-2, c=0)
for elem in c:
self.assertTrue(elem in c)
self.assertIn(elem, c)
def test_multiset_operations(self):
# Verify that adding a zero counter will strip zeros and negatives
c = Counter(a=10, b=-2, c=0) + Counter()
self.assertEqual(dict(c), dict(a=10))
elements = 'abcd'
for i in range(1000):
# test random pairs of multisets
p = Counter(dict((elem, randrange(-2,4)) for elem in elements))
p.update(e=1, f=-1, g=0)
q = Counter(dict((elem, randrange(-2,4)) for elem in elements))
q.update(h=1, i=-1, j=0)
for counterop, numberop in [
(Counter.__add__, lambda x, y: max(0, x+y)),
(Counter.__sub__, lambda x, y: max(0, x-y)),
(Counter.__or__, lambda x, y: max(0,x,y)),
(Counter.__and__, lambda x, y: max(0, min(x,y))),
]:
result = counterop(p, q)
for x in elements:
self.assertEqual(numberop(p[x], q[x]), result[x],
(counterop, x, p, q))
# verify that results exclude non-positive counts
self.assertTrue(x>0 for x in result.values())
elements = 'abcdef'
for i in range(100):
# verify that random multisets with no repeats are exactly like sets
p = Counter(dict((elem, randrange(0, 2)) for elem in elements))
q = Counter(dict((elem, randrange(0, 2)) for elem in elements))
for counterop, setop in [
(Counter.__sub__, set.__sub__),
(Counter.__or__, set.__or__),
(Counter.__and__, set.__and__),
]:
counter_result = counterop(p, q)
set_result = setop(set(p.elements()), set(q.elements()))
self.assertEqual(counter_result, dict.fromkeys(set_result, 1))
def test_subtract(self):
c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
c.subtract(a=1, b=2, c=-3, d=10, e=20, f=30, h=-50)
self.assertEqual(c, Counter(a=-6, b=-2, c=8, d=0, e=-5, f=-30, g=40, h=50))
c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
c.subtract(Counter(a=1, b=2, c=-3, d=10, e=20, f=30, h=-50))
self.assertEqual(c, Counter(a=-6, b=-2, c=8, d=0, e=-5, f=-30, g=40, h=50))
c = Counter('aaabbcd')
c.subtract('aaaabbcce')
self.assertEqual(c, Counter(a=-1, b=0, c=-1, d=1, e=-1))
def test_helper_function(self):
# two paths, one for real dicts and one for other mappings
elems = list('abracadabra')
d = dict()
_count_elements(d, elems)
self.assertEqual(d, {'a': 5, 'r': 2, 'b': 2, 'c': 1, 'd': 1})
m = OrderedDict()
_count_elements(m, elems)
self.assertEqual(m,
OrderedDict([('a', 5), ('b', 2), ('r', 2), ('c', 1), ('d', 1)]))
################################################################################
### OrderedDict
################################################################################
class TestOrderedDict(unittest.TestCase):
def test_init(self):
with self.assertRaises(TypeError):
OrderedDict([('a', 1), ('b', 2)], None) # too many args
pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
self.assertEqual(sorted(OrderedDict(dict(pairs)).items()), pairs) # dict input
self.assertEqual(sorted(OrderedDict(**dict(pairs)).items()), pairs) # kwds input
self.assertEqual(list(OrderedDict(pairs).items()), pairs) # pairs input
self.assertEqual(list(OrderedDict([('a', 1), ('b', 2), ('c', 9), ('d', 4)],
c=3, e=5).items()), pairs) # mixed input
# make sure no positional args conflict with possible kwdargs
self.assertEqual(inspect.getargspec(OrderedDict.__dict__['__init__']).args,
['self'])
# Make sure that direct calls to __init__ do not clear previous contents
d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
d.__init__([('e', 5), ('f', 6)], g=7, d=4)
self.assertEqual(list(d.items()),
[('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)])
def test_update(self):
with self.assertRaises(TypeError):
OrderedDict().update([('a', 1), ('b', 2)], None) # too many args
pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
od = OrderedDict()
od.update(dict(pairs))
self.assertEqual(sorted(od.items()), pairs) # dict input
od = OrderedDict()
od.update(**dict(pairs))
self.assertEqual(sorted(od.items()), pairs) # kwds input
od = OrderedDict()
od.update(pairs)
self.assertEqual(list(od.items()), pairs) # pairs input
od = OrderedDict()
od.update([('a', 1), ('b', 2), ('c', 9), ('d', 4)], c=3, e=5)
self.assertEqual(list(od.items()), pairs) # mixed input
# Issue 9137: Named argument called 'other' or 'self'
# shouldn't be treated specially.
od = OrderedDict()
od.update(self=23)
self.assertEqual(list(od.items()), [('self', 23)])
od = OrderedDict()
od.update(other={})
self.assertEqual(list(od.items()), [('other', {})])
od = OrderedDict()
od.update(red=5, blue=6, other=7, self=8)
self.assertEqual(sorted(list(od.items())),
[('blue', 6), ('other', 7), ('red', 5), ('self', 8)])
# Make sure that direct calls to update do not clear previous contents
# add that updates items are not moved to the end
d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
d.update([('e', 5), ('f', 6)], g=7, d=4)
self.assertEqual(list(d.items()),
[('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)])
def test_abc(self):
self.assertIsInstance(OrderedDict(), MutableMapping)
self.assertTrue(issubclass(OrderedDict, MutableMapping))
def test_clear(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
self.assertEqual(len(od), len(pairs))
od.clear()
self.assertEqual(len(od), 0)
def test_delitem(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
del od['a']
self.assertNotIn('a', od)
with self.assertRaises(KeyError):
del od['a']
self.assertEqual(list(od.items()), pairs[:2] + pairs[3:])
def test_setitem(self):
od = OrderedDict([('d', 1), ('b', 2), ('c', 3), ('a', 4), ('e', 5)])
od['c'] = 10 # existing element
od['f'] = 20 # new element
self.assertEqual(list(od.items()),
[('d', 1), ('b', 2), ('c', 10), ('a', 4), ('e', 5), ('f', 20)])
def test_iterators(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
self.assertEqual(list(od), [t[0] for t in pairs])
self.assertEqual(list(od.keys()), [t[0] for t in pairs])
self.assertEqual(list(od.values()), [t[1] for t in pairs])
self.assertEqual(list(od.items()), pairs)
self.assertEqual(list(reversed(od)),
[t[0] for t in reversed(pairs)])
def test_popitem(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
while pairs:
self.assertEqual(od.popitem(), pairs.pop())
with self.assertRaises(KeyError):
od.popitem()
self.assertEqual(len(od), 0)
def test_pop(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
shuffle(pairs)
while pairs:
k, v = pairs.pop()
self.assertEqual(od.pop(k), v)
with self.assertRaises(KeyError):
od.pop('xyz')
self.assertEqual(len(od), 0)
self.assertEqual(od.pop(k, 12345), 12345)
# make sure pop still works when __missing__ is defined
class Missing(OrderedDict):
def __missing__(self, key):
return 0
m = Missing(a=1)
self.assertEqual(m.pop('b', 5), 5)
self.assertEqual(m.pop('a', 6), 1)
self.assertEqual(m.pop('a', 6), 6)
with self.assertRaises(KeyError):
m.pop('a')
def test_equality(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od1 = OrderedDict(pairs)
od2 = OrderedDict(pairs)
self.assertEqual(od1, od2) # same order implies equality
pairs = pairs[2:] + pairs[:2]
od2 = OrderedDict(pairs)
self.assertNotEqual(od1, od2) # different order implies inequality
# comparison to regular dict is not order sensitive
self.assertEqual(od1, dict(od2))
self.assertEqual(dict(od2), od1)
# different length implied inequality
self.assertNotEqual(od1, OrderedDict(pairs[:-1]))
def test_copying(self):
# Check that ordered dicts are copyable, deepcopyable, picklable,
# and have a repr/eval round-trip
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
update_test = OrderedDict()
update_test.update(od)
for i, dup in enumerate([
od.copy(),
copy.copy(od),
copy.deepcopy(od),
pickle.loads(pickle.dumps(od, 0)),
pickle.loads(pickle.dumps(od, 1)),
pickle.loads(pickle.dumps(od, 2)),
pickle.loads(pickle.dumps(od, 3)),
pickle.loads(pickle.dumps(od, -1)),
eval(repr(od)),
update_test,
OrderedDict(od),
]):
self.assertTrue(dup is not od)
self.assertEqual(dup, od)
self.assertEqual(list(dup.items()), list(od.items()))
self.assertEqual(len(dup), len(od))
self.assertEqual(type(dup), type(od))
def test_yaml_linkage(self):
# Verify that __reduce__ is setup in a way that supports PyYAML's dump() feature.
# In yaml, lists are native but tuples are not.
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
# yaml.dump(od) -->
# '!!python/object/apply:__main__.OrderedDict\n- - [a, 1]\n - [b, 2]\n'
self.assertTrue(all(type(pair)==list for pair in od.__reduce__()[1]))
def test_reduce_not_too_fat(self):
# do not save instance dictionary if not needed
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
self.assertEqual(len(od.__reduce__()), 2)
od.x = 10
self.assertEqual(len(od.__reduce__()), 3)
def test_repr(self):
od = OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])
self.assertEqual(repr(od),
"OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])")
self.assertEqual(eval(repr(od)), od)
self.assertEqual(repr(OrderedDict()), "OrderedDict()")
def test_repr_recursive(self):
# See issue #9826
od = OrderedDict.fromkeys('abc')
od['x'] = od
self.assertEqual(repr(od),
"OrderedDict([('a', None), ('b', None), ('c', None), ('x', ...)])")
def test_setdefault(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
pair_order = list(od.items())
self.assertEqual(od.setdefault('a', 10), 3)
# make sure order didn't change
self.assertEqual(list(od.items()), pair_order)
self.assertEqual(od.setdefault('x', 10), 10)
# make sure 'x' is added to the end
self.assertEqual(list(od.items())[-1], ('x', 10))
# make sure setdefault still works when __missing__ is defined
class Missing(OrderedDict):
def __missing__(self, key):
return 0
self.assertEqual(Missing().setdefault(5, 9), 9)
def test_reinsert(self):
# Given insert a, insert b, delete a, re-insert a,
# verify that a is now later than b.
od = OrderedDict()
od['a'] = 1
od['b'] = 2
del od['a']
od['a'] = 1
self.assertEqual(list(od.items()), [('b', 2), ('a', 1)])
def test_move_to_end(self):
od = OrderedDict.fromkeys('abcde')
self.assertEqual(list(od), list('abcde'))
od.move_to_end('c')
self.assertEqual(list(od), list('abdec'))
od.move_to_end('c', 0)
self.assertEqual(list(od), list('cabde'))
od.move_to_end('c', 0)
self.assertEqual(list(od), list('cabde'))
od.move_to_end('e')
self.assertEqual(list(od), list('cabde'))
with self.assertRaises(KeyError):
od.move_to_end('x')
def test_sizeof(self):
# Wimpy test: Just verify the reported size is larger than a regular dict
d = dict(a=1)
od = OrderedDict(**d)
self.assertGreater(sys.getsizeof(od), sys.getsizeof(d))
def test_override_update(self):
# Verify that subclasses can override update() without breaking __init__()
class MyOD(OrderedDict):
def update(self, *args, **kwds):
raise Exception()
items = [('a', 1), ('c', 3), ('b', 2)]
self.assertEqual(list(MyOD(items).items()), items)
class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = OrderedDict
def test_popitem(self):
d = self._empty_mapping()
self.assertRaises(KeyError, d.popitem)
class MyOrderedDict(OrderedDict):
pass
class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = MyOrderedDict
def test_popitem(self):
d = self._empty_mapping()
self.assertRaises(KeyError, d.popitem)
################################################################################
### Run tests
################################################################################
import doctest, collections
def test_main(verbose=None):
NamedTupleDocs = doctest.DocTestSuite(module=collections)
test_classes = [TestNamedTuple, NamedTupleDocs, TestOneTrickPonyABCs,
TestCollectionABCs, TestCounter, TestChainMap,
TestOrderedDict, GeneralMappingTests, SubclassMappingTests]
support.run_unittest(*test_classes)
support.run_doctest(collections, verbose)
if __name__ == "__main__":
test_main(verbose=True)
|
plotly/plotly.py | refs/heads/master | packages/python/plotly/plotly/validators/histogram2d/_ids.py | 1 | import _plotly_utils.basevalidators
class IdsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="ids", parent_name="histogram2d", **kwargs):
super(IdsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
encukou/freeipa | refs/heads/master | ipaplatform/redhat/tasks.py | 1 | # Authors: Simo Sorce <[email protected]>
# Alexander Bokovoy <[email protected]>
# Martin Kosek <[email protected]>
# Tomas Babej <[email protected]>
#
# Copyright (C) 2007-2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
This module contains default Red Hat OS family-specific implementations of
system tasks.
'''
from __future__ import print_function, absolute_import
import ctypes
import logging
import os
from pathlib import Path
import socket
import traceback
import errno
import urllib
import subprocess
import sys
import textwrap
from ctypes.util import find_library
from functools import total_ordering
from subprocess import CalledProcessError
from pyasn1.error import PyAsn1Error
from ipapython import directivesetter
from ipapython import ipautil
import ipapython.errors
from ipaplatform.constants import constants
from ipaplatform.paths import paths
from ipaplatform.redhat.authconfig import get_auth_tool
from ipaplatform.base.tasks import BaseTaskNamespace
logger = logging.getLogger(__name__)
# /etc/pkcs11/modules override
# base filen ame, module, list of disabled-in
# 'p11-kit-proxy' disables proxying of module, see man(5) pkcs11.conf
PKCS11_MODULES = [
('softhsm2', paths.LIBSOFTHSM2_SO, ['p11-kit-proxy']),
]
NM_IPA_CONF = textwrap.dedent("""
# auto-generated by IPA installer
[main]
dns=default
[global-dns]
searches={searches}
[global-dns-domain-*]
servers={servers}
""")
@total_ordering
class IPAVersion:
_rpmvercmp_func = None
@classmethod
def _rpmvercmp(cls, a, b):
"""Lazy load and call librpm's rpmvercmp
"""
rpmvercmp_func = cls._rpmvercmp_func
if rpmvercmp_func is None:
librpm = ctypes.CDLL(find_library('rpm'))
rpmvercmp_func = librpm.rpmvercmp
# int rpmvercmp(const char *a, const char *b)
rpmvercmp_func.argtypes = [ctypes.c_char_p, ctypes.c_char_p]
rpmvercmp_func.restype = ctypes.c_int
cls._rpmvercmp_func = rpmvercmp_func
return rpmvercmp_func(a, b)
def __init__(self, version):
self._version = version
self._bytes = version.encode('utf-8')
@property
def version(self):
return self._version
def __eq__(self, other):
if not isinstance(other, IPAVersion):
return NotImplemented
return self._rpmvercmp(self._bytes, other._bytes) == 0
def __lt__(self, other):
if not isinstance(other, IPAVersion):
return NotImplemented
return self._rpmvercmp(self._bytes, other._bytes) < 0
def __hash__(self):
return hash(self._version)
class RedHatTaskNamespace(BaseTaskNamespace):
def restore_context(self, filepath, force=False):
"""Restore SELinux security context on the given filepath.
SELinux equivalent is /path/to/restorecon <filepath>
restorecon's return values are not reliable so we have to
ignore them (BZ #739604).
ipautil.run() will do the logging.
"""
restorecon = paths.SBIN_RESTORECON
if not self.is_selinux_enabled() or not os.path.exists(restorecon):
return
# Force reset of context to match file_context for customizable
# files, and the default file context, changing the user, role,
# range portion as well as the type.
args = [restorecon]
if force:
args.append('-F')
args.append(filepath)
ipautil.run(args, raiseonerr=False)
def is_selinux_enabled(self):
"""Check if SELinux is available and enabled
"""
try:
ipautil.run([paths.SELINUXENABLED])
except ipautil.CalledProcessError:
# selinuxenabled returns 1 if not enabled
return False
except OSError:
# selinuxenabled binary not available
return False
else:
return True
def check_selinux_status(self, restorecon=paths.RESTORECON):
"""
We don't have a specific package requirement for policycoreutils
which provides restorecon. This is because we don't require
SELinux on client installs. However if SELinux is enabled then
this package is required.
This function returns nothing but may raise a Runtime exception
if SELinux is enabled but restorecon is not available.
"""
if not self.is_selinux_enabled():
return False
if not os.path.exists(restorecon):
raise RuntimeError('SELinux is enabled but %s does not exist.\n'
'Install the policycoreutils package and start '
'the installation again.' % restorecon)
return True
def check_ipv6_stack_enabled(self):
"""Checks whether IPv6 kernel module is loaded.
Function checks if /proc/net/if_inet6 is present. If IPv6 stack is
enabled, it exists and contains the interfaces configuration.
:raises: RuntimeError when IPv6 stack is disabled
"""
if not os.path.exists(paths.IF_INET6):
raise RuntimeError(
"IPv6 stack has to be enabled in the kernel and some "
"interface has to have ::1 address assigned. Typically "
"this is 'lo' interface. If you do not wish to use IPv6 "
"globally, disable it on the specific interfaces in "
"sysctl.conf except 'lo' interface.")
try:
localhost6 = ipautil.CheckedIPAddress('::1', allow_loopback=True)
if localhost6.get_matching_interface() is None:
raise ValueError("no interface for ::1 address found")
except ValueError:
raise RuntimeError(
"IPv6 stack is enabled in the kernel but there is no "
"interface that has ::1 address assigned. Add ::1 address "
"resolution to 'lo' interface. You might need to enable IPv6 "
"on the interface 'lo' in sysctl.conf.")
def detect_container(self):
"""Check if running inside a container
:returns: container runtime or None
:rtype: str, None
"""
try:
output = subprocess.check_output(
[paths.SYSTEMD_DETECT_VIRT, '--container'],
stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as e:
if e.returncode == 1:
# No container runtime detected
return None
else:
raise
else:
return output.decode('utf-8').strip()
def restore_pre_ipa_client_configuration(self, fstore, statestore,
was_sssd_installed,
was_sssd_configured):
auth_config = get_auth_tool()
auth_config.unconfigure(
fstore, statestore, was_sssd_installed, was_sssd_configured
)
def set_nisdomain(self, nisdomain):
try:
with open(paths.SYSCONF_NETWORK, 'r') as f:
content = [
line for line in f
if not line.strip().upper().startswith('NISDOMAIN')
]
except IOError:
content = []
content.append("NISDOMAIN={}\n".format(nisdomain))
with open(paths.SYSCONF_NETWORK, 'w') as f:
f.writelines(content)
def modify_nsswitch_pam_stack(self, sssd, mkhomedir, statestore,
sudo=True):
auth_config = get_auth_tool()
auth_config.configure(sssd, mkhomedir, statestore, sudo)
def is_nosssd_supported(self):
# The flag --no-sssd is not supported any more for rhel-based distros
return False
def backup_auth_configuration(self, path):
auth_config = get_auth_tool()
auth_config.backup(path)
def restore_auth_configuration(self, path):
auth_config = get_auth_tool()
auth_config.restore(path)
def migrate_auth_configuration(self, statestore):
"""
Migrate the pam stack configuration from authconfig to an authselect
profile.
"""
# Check if mkhomedir was enabled during installation
mkhomedir = statestore.get_state('authconfig', 'mkhomedir')
# Force authselect 'sssd' profile
authselect_cmd = [paths.AUTHSELECT, "select", "sssd", "with-sudo"]
if mkhomedir:
authselect_cmd.append("with-mkhomedir")
authselect_cmd.append("--force")
ipautil.run(authselect_cmd)
# Remove all remaining keys from the authconfig module
for conf in ('ldap', 'krb5', 'sssd', 'sssdauth', 'mkhomedir'):
statestore.restore_state('authconfig', conf)
# Create new authselect module in the statestore
statestore.backup_state('authselect', 'profile', 'sssd')
statestore.backup_state(
'authselect', 'features_list', '')
statestore.backup_state('authselect', 'mkhomedir', bool(mkhomedir))
def reload_systemwide_ca_store(self):
try:
ipautil.run([paths.UPDATE_CA_TRUST])
except CalledProcessError as e:
logger.error(
"Could not update systemwide CA trust database: %s", e)
return False
else:
logger.info("Systemwide CA database updated.")
return True
def platform_insert_ca_certs(self, ca_certs):
return any([
self.write_p11kit_certs(paths.IPA_P11_KIT, ca_certs),
self.remove_ca_certificates_bundle(
paths.SYSTEMWIDE_IPA_CA_CRT
),
])
def write_p11kit_certs(self, filename, ca_certs):
# pylint: disable=ipa-forbidden-import
from ipalib import x509 # FixMe: break import cycle
from ipalib.errors import CertificateError
# pylint: enable=ipa-forbidden-import
path = Path(filename)
try:
f = open(path, 'w')
except IOError:
logger.error("Failed to open %s", path)
raise
with f:
f.write("# This file was created by IPA. Do not edit.\n"
"\n")
try:
os.fchmod(f.fileno(), 0o644)
except IOError:
logger.error("Failed to set mode of %s", path)
raise
has_eku = set()
for cert, nickname, trusted, _ext_key_usage in ca_certs:
try:
subject = cert.subject_bytes
issuer = cert.issuer_bytes
serial_number = cert.serial_number_bytes
public_key_info = cert.public_key_info_bytes
except (PyAsn1Error, ValueError, CertificateError):
logger.error(
"Failed to decode certificate \"%s\"", nickname)
raise
label = urllib.parse.quote(nickname)
subject = urllib.parse.quote(subject)
issuer = urllib.parse.quote(issuer)
serial_number = urllib.parse.quote(serial_number)
public_key_info = urllib.parse.quote(public_key_info)
obj = ("[p11-kit-object-v1]\n"
"class: certificate\n"
"certificate-type: x-509\n"
"certificate-category: authority\n"
"label: \"%(label)s\"\n"
"subject: \"%(subject)s\"\n"
"issuer: \"%(issuer)s\"\n"
"serial-number: \"%(serial_number)s\"\n"
"x-public-key-info: \"%(public_key_info)s\"\n" %
dict(label=label,
subject=subject,
issuer=issuer,
serial_number=serial_number,
public_key_info=public_key_info))
if trusted is True:
obj += "trusted: true\n"
elif trusted is False:
obj += "x-distrusted: true\n"
obj += "{pem}\n\n".format(
pem=cert.public_bytes(x509.Encoding.PEM).decode('ascii'))
f.write(obj)
if (cert.extended_key_usage is not None and
public_key_info not in has_eku):
try:
ext_key_usage = cert.extended_key_usage_bytes
except PyAsn1Error:
logger.error(
"Failed to encode extended key usage for \"%s\"",
nickname)
raise
value = urllib.parse.quote(ext_key_usage)
obj = ("[p11-kit-object-v1]\n"
"class: x-certificate-extension\n"
"label: \"ExtendedKeyUsage for %(label)s\"\n"
"x-public-key-info: \"%(public_key_info)s\"\n"
"object-id: 2.5.29.37\n"
"value: \"%(value)s\"\n\n" %
dict(label=label,
public_key_info=public_key_info,
value=value))
f.write(obj)
has_eku.add(public_key_info)
return True
def platform_remove_ca_certs(self):
return any([
self.remove_ca_certificates_bundle(paths.IPA_P11_KIT),
self.remove_ca_certificates_bundle(paths.SYSTEMWIDE_IPA_CA_CRT),
])
def remove_ca_certificates_bundle(self, filename):
path = Path(filename)
if not path.is_file():
return False
try:
path.unlink()
except Exception:
logger.error("Could not remove %s", path)
raise
return True
def backup_hostname(self, fstore, statestore):
filepath = paths.ETC_HOSTNAME
if os.path.exists(filepath):
fstore.backup_file(filepath)
# store old hostname
old_hostname = socket.gethostname()
statestore.backup_state('network', 'hostname', old_hostname)
def restore_hostname(self, fstore, statestore):
old_hostname = statestore.restore_state('network', 'hostname')
if old_hostname is not None:
try:
self.set_hostname(old_hostname)
except ipautil.CalledProcessError as e:
logger.debug("%s", traceback.format_exc())
logger.error(
"Failed to restore this machine hostname to %s (%s).",
old_hostname, e
)
filepath = paths.ETC_HOSTNAME
if fstore.has_file(filepath):
fstore.restore_file(filepath)
def set_selinux_booleans(self, required_settings, backup_func=None):
def get_setsebool_args(changes):
args = [paths.SETSEBOOL, "-P"]
args.extend(["%s=%s" % update for update in changes.items()])
return args
if not self.is_selinux_enabled():
return False
updated_vars = {}
failed_vars = {}
for setting, state in required_settings.items():
if state is None:
continue
try:
result = ipautil.run(
[paths.GETSEBOOL, setting],
capture_output=True
)
original_state = result.output.split()[2]
if backup_func is not None:
backup_func(setting, original_state)
if original_state != state:
updated_vars[setting] = state
except ipautil.CalledProcessError as e:
logger.error("Cannot get SELinux boolean '%s': %s", setting, e)
failed_vars[setting] = state
if updated_vars:
args = get_setsebool_args(updated_vars)
try:
ipautil.run(args)
except ipautil.CalledProcessError:
failed_vars.update(updated_vars)
if failed_vars:
raise ipapython.errors.SetseboolError(
failed=failed_vars,
command=' '.join(get_setsebool_args(failed_vars)))
return True
def parse_ipa_version(self, version):
"""
:param version: textual version
:return: object implementing proper __cmp__ method for version compare
"""
return IPAVersion(version)
def configure_httpd_service_ipa_conf(self):
"""Create systemd config for httpd service to work with IPA
"""
if not os.path.exists(paths.SYSTEMD_SYSTEM_HTTPD_D_DIR):
os.mkdir(paths.SYSTEMD_SYSTEM_HTTPD_D_DIR, 0o755)
ipautil.copy_template_file(
os.path.join(paths.USR_SHARE_IPA_DIR, 'ipa-httpd.conf.template'),
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF,
dict(
KDCPROXY_CONFIG=paths.KDCPROXY_CONFIG,
IPA_HTTPD_KDCPROXY=paths.IPA_HTTPD_KDCPROXY,
KRB5CC_HTTPD=paths.KRB5CC_HTTPD,
)
)
os.chmod(paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF, 0o644)
self.restore_context(paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF)
self.systemd_daemon_reload()
def systemd_daemon_reload(self):
"""Tell systemd to reload config files"""
ipautil.run([paths.SYSTEMCTL, "--system", "daemon-reload"])
def configure_http_gssproxy_conf(self, ipaapi_user):
ipautil.copy_template_file(
os.path.join(paths.USR_SHARE_IPA_DIR, 'gssproxy.conf.template'),
paths.GSSPROXY_CONF,
dict(
HTTP_KEYTAB=paths.HTTP_KEYTAB,
HTTP_CCACHE=paths.HTTP_CCACHE,
HTTPD_USER=constants.HTTPD_USER,
IPAAPI_USER=ipaapi_user,
)
)
os.chmod(paths.GSSPROXY_CONF, 0o600)
self.restore_context(paths.GSSPROXY_CONF)
def configure_httpd_wsgi_conf(self):
"""Configure WSGI for correct Python version (Fedora)
See https://pagure.io/freeipa/issue/7394
"""
conf = paths.HTTPD_IPA_WSGI_MODULES_CONF
if sys.version_info.major == 2:
wsgi_module = constants.MOD_WSGI_PYTHON2
else:
wsgi_module = constants.MOD_WSGI_PYTHON3
if conf is None or wsgi_module is None:
logger.info("Nothing to do for configure_httpd_wsgi_conf")
return
confdir = os.path.dirname(conf)
if not os.path.isdir(confdir):
os.makedirs(confdir)
ipautil.copy_template_file(
os.path.join(
paths.USR_SHARE_IPA_DIR, 'ipa-httpd-wsgi.conf.template'
),
conf,
dict(WSGI_MODULE=wsgi_module)
)
os.chmod(conf, 0o644)
self.restore_context(conf)
def remove_httpd_service_ipa_conf(self):
"""Remove systemd config for httpd service of IPA"""
try:
os.unlink(paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF)
except OSError as e:
if e.errno == errno.ENOENT:
logger.debug(
'Trying to remove %s but file does not exist',
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF
)
else:
logger.error(
'Error removing %s: %s',
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF, e
)
return
self.systemd_daemon_reload()
def configure_httpd_protocol(self):
# use default crypto policy for SSLProtocol
directivesetter.set_directive(
paths.HTTPD_SSL_CONF, 'SSLProtocol', None, False
)
def set_hostname(self, hostname):
ipautil.run([paths.BIN_HOSTNAMECTL, 'set-hostname', hostname])
def is_fips_enabled(self):
"""
Checks whether this host is FIPS-enabled.
Returns a boolean indicating if the host is FIPS-enabled, i.e. if the
file /proc/sys/crypto/fips_enabled contains a non-0 value. Otherwise,
or if the file /proc/sys/crypto/fips_enabled does not exist,
the function returns False.
"""
try:
with open(paths.PROC_FIPS_ENABLED, 'r') as f:
if f.read().strip() != '0':
return True
except IOError:
# Consider that the host is not fips-enabled if the file does not
# exist
pass
return False
def setup_httpd_logging(self):
directivesetter.set_directive(paths.HTTPD_SSL_CONF,
'ErrorLog',
'logs/error_log', False)
directivesetter.set_directive(paths.HTTPD_SSL_CONF,
'TransferLog',
'logs/access_log', False)
def configure_dns_resolver(self, nameservers, searchdomains, fstore=None):
"""Configure global DNS resolver (e.g. /etc/resolv.conf)
:param nameservers: list of IP addresses
:param searchdomains: list of search domaons
:param fstore: optional file store for backup
"""
assert nameservers and isinstance(nameservers, list)
assert searchdomains and isinstance(searchdomains, list)
# break circular import
from ipaplatform.services import knownservices
if fstore is not None and not fstore.has_file(paths.RESOLV_CONF):
fstore.backup_file(paths.RESOLV_CONF)
nm = knownservices['NetworkManager']
if nm.is_enabled():
logger.debug(
"Network Manager is enabled, write %s",
paths.NETWORK_MANAGER_IPA_CONF
)
# write DNS override and reload network manager to have it create
# a new resolv.conf. The file is prefixed with ``zzz`` to
# make it the last file. Global dns options do not stack and last
# man standing wins.
cfg = NM_IPA_CONF.format(
servers=','.join(nameservers),
searches=','.join(searchdomains)
)
with open(paths.NETWORK_MANAGER_IPA_CONF, 'w') as f:
os.fchmod(f.fileno(), 0o644)
f.write(cfg)
# reload NetworkManager
nm.reload_or_restart()
else:
# no NM running, fall back to /etc/resolv.conf
logger.debug(
"Network Manager is not enabled, write %s directly.",
paths.RESOLV_CONF
)
cfg = [
"# auto-generated by IPA installer",
"search {}".format(' '.join(searchdomains)),
]
for nameserver in nameservers:
cfg.append("nameserver {}".format(nameserver))
with open(paths.RESOLV_CONF, 'w') as f:
f.write('\n'.join(cfg))
def unconfigure_dns_resolver(self, fstore=None):
"""Unconfigure global DNS resolver (e.g. /etc/resolv.conf)
:param fstore: optional file store for restore
"""
# break circular import
from ipaplatform.services import knownservices
if fstore is not None and fstore.has_file(paths.RESOLV_CONF):
fstore.restore_file(paths.RESOLV_CONF)
nm = knownservices['NetworkManager']
if os.path.isfile(paths.NETWORK_MANAGER_IPA_CONF):
os.unlink(paths.NETWORK_MANAGER_IPA_CONF)
if nm.is_enabled():
nm.reload_or_restart()
def configure_pkcs11_modules(self, fstore):
"""Disable global p11-kit configuration for NSS
"""
filenames = []
for name, module, disabled_in in PKCS11_MODULES:
filename = os.path.join(
paths.ETC_PKCS11_MODULES_DIR,
"{}.module".format(name)
)
if os.path.isfile(filename):
# Only back up if file is not yet backed up and it does not
# look like a file that is generated by IPA.
with open(filename) as f:
content = f.read()
is_ipa_file = "IPA" in content
if not is_ipa_file and not fstore.has_file(filename):
logger.debug("Backing up existing '%s'.", filename)
fstore.backup_file(filename)
with open(filename, "w") as f:
f.write("# created by IPA installer\n")
f.write("module: {}\n".format(module))
# see man(5) pkcs11.conf
f.write("disable-in: {}\n".format(", ".join(disabled_in)))
os.fchmod(f.fileno(), 0o644)
self.restore_context(filename)
logger.debug("Created PKCS#11 module config '%s'.", filename)
filenames.append(filename)
return filenames
def restore_pkcs11_modules(self, fstore):
"""Restore global p11-kit configuration for NSS
"""
filenames = []
for name, _module, _disabled_in in PKCS11_MODULES:
filename = os.path.join(
paths.ETC_PKCS11_MODULES_DIR,
"{}.module".format(name)
)
try:
os.unlink(filename)
except OSError:
pass
else:
filenames.append(filename)
if fstore.has_file(filename):
fstore.restore_file(filename)
return filenames
def get_pkcs11_modules(self):
"""Return the list of module config files setup by IPA
"""
return tuple(os.path.join(paths.ETC_PKCS11_MODULES_DIR,
"{}.module".format(name))
for name, _module, _disabled in PKCS11_MODULES)
def enable_ldap_automount(self, statestore):
"""
Point automount to ldap in nsswitch.conf.
This function is for non-SSSD setups only.
"""
super(RedHatTaskNamespace, self).enable_ldap_automount(statestore)
authselect_cmd = [paths.AUTHSELECT, "enable-feature",
"with-custom-automount"]
ipautil.run(authselect_cmd)
def disable_ldap_automount(self, statestore):
"""Disable ldap-based automount"""
super(RedHatTaskNamespace, self).disable_ldap_automount(statestore)
authselect_cmd = [paths.AUTHSELECT, "disable-feature",
"with-custom-automount"]
ipautil.run(authselect_cmd)
tasks = RedHatTaskNamespace()
|
fossevents/fossevents.in | refs/heads/master | fossevents/users/views.py | 1 | from django.contrib.auth import logout as auth_logout
from django.contrib.auth.views import login as django_login
from django.core.urlresolvers import reverse
from django.http.response import HttpResponseRedirect
def login(request, *args, **kwargs):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('home'))
return django_login(request, template_name='users/login.html', *args, **kwargs)
def logout(request, *args, **kwargs):
auth_logout(request)
return HttpResponseRedirect(reverse('users:login'))
|
openstack/openstack-health | refs/heads/master | openstack_health/distributed_dbm.py | 1 | # Copyright 2016 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dogpile.cache.backends import memcached
from dogpile.cache import proxy
from dogpile import util
from dogpile.util import compat
from pymemcache.client import base
class MemcachedLockedDBMProxy(proxy.ProxyBackend):
def __init__(self, url, port=11211, lock_timeout=0):
super(MemcachedLockedDBMProxy, self).__init__()
self.lock_timeout = lock_timeout
self.url = url
self.port = port
@util.memoized_property
def _clients(self):
backend = self
class ClientPool(compat.threading.local):
def __init__(self):
self.memcached = backend._create_client()
return ClientPool()
@property
def client(self):
"""Return the memcached client.
This uses a threading.local by
default as it appears most modern
memcached libs aren't inherently
threadsafe.
"""
return self._clients.memcached
def _create_client(self):
return base.Client((self.url, self.port))
def get_mutex(self, key):
return memcached.MemcachedLock(lambda: self.client, key,
timeout=self.lock_timeout)
|
beswarm/django-social-auth | refs/heads/master | social_auth/backends/contrib/rdio.py | 14 | from social.backends.rdio import RdioOAuth1 as RdioOAuth1Backend, \
RdioOAuth2 as RdioOAuth2Backend
|
ryfeus/lambda-packs | refs/heads/master | pytorch/source/caffe2/python/operator_test/negate_gradient_op_test.py | 1 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import workspace, core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given
import hypothesis.strategies as st
import numpy as np
class TestNegateGradient(serial.SerializedTestCase):
@serial.given(X=hu.tensor(), inplace=st.booleans(), **hu.gcs)
def test_forward(self, X, inplace, gc, dc):
def neg_grad_ref(X):
return (X,)
op = core.CreateOperator("NegateGradient", ["X"], ["Y" if not inplace else "X"])
self.assertReferenceChecks(gc, op, [X], neg_grad_ref)
self.assertDeviceChecks(dc, op, [X], [0])
@given(size=st.lists(st.integers(min_value=1, max_value=20),
min_size=1, max_size=5))
def test_grad(self, size):
X = np.random.random_sample(size)
workspace.ResetWorkspace()
workspace.FeedBlob("X", X.astype(np.float32))
net = core.Net("negate_grad_test")
Y = net.NegateGradient(["X"], ["Y"])
grad_map = net.AddGradientOperators([Y])
workspace.RunNetOnce(net)
# check X_grad == negate of Y_grad
x_val, y_val = workspace.FetchBlobs(['X', 'Y'])
x_grad_val, y_grad_val = workspace.FetchBlobs([grad_map['X'],
grad_map['Y']])
np.testing.assert_array_equal(x_val, y_val)
np.testing.assert_array_equal(x_grad_val, y_grad_val * (-1))
|
liwangdong/augmented-traffic-control | refs/heads/master | atc/django-atc-profile-storage/atc_profile_storage/urls.py | 17 | #
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
#
from django.conf.urls import url
from atc_profile_storage import views
urlpatterns = [
url(r'^$', views.profile_list),
url(r'^(?P<pk>[0-9]+)/$', views.profile_detail),
]
|
BigDataforYou/movie_recommendation_workshop_1 | refs/heads/master | big_data_4_you_demo_1/venv/lib/python2.7/site-packages/requests/compat.py | 35 | # -*- coding: utf-8 -*-
"""
requests.compat
~~~~~~~~~~~~~~~
This module handles import compatibility issues between Python 2 and
Python 3.
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it throws a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
|
aoeu256/langmer | refs/heads/master | lib/werkzeug/http.py | 317 | # -*- coding: utf-8 -*-
"""
werkzeug.http
~~~~~~~~~~~~~
Werkzeug comes with a bunch of utilities that help Werkzeug to deal with
HTTP data. Most of the classes and functions provided by this module are
used by the wrappers, but they are useful on their own, too, especially if
the response and request objects are not used.
This covers some of the more HTTP centric features of WSGI, some other
utilities such as cookie handling are documented in the `werkzeug.utils`
module.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
from time import time, gmtime
try:
from email.utils import parsedate_tz
except ImportError: # pragma: no cover
from email.Utils import parsedate_tz
try:
from urllib2 import parse_http_list as _parse_list_header
except ImportError: # pragma: no cover
from urllib.request import parse_http_list as _parse_list_header
from datetime import datetime, timedelta
from hashlib import md5
import base64
from werkzeug._internal import _cookie_quote, _make_cookie_domain, \
_cookie_parse_impl
from werkzeug._compat import to_unicode, iteritems, text_type, \
string_types, try_coerce_native, to_bytes, PY2, \
integer_types
# incorrect
_cookie_charset = 'latin1'
_accept_re = re.compile(r'([^\s;,]+)(?:[^,]*?;\s*q=(\d*(?:\.\d+)?))?')
_token_chars = frozenset("!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
'^_`abcdefghijklmnopqrstuvwxyz|~')
_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)')
_unsafe_header_chars = set('()<>@,;:\"/[]?={} \t')
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(r';\s*(%s|[^\s;=]+)\s*(?:=\s*(%s|[^;]+))?\s*' %
(_quoted_string_re, _quoted_string_re))
_entity_headers = frozenset([
'allow', 'content-encoding', 'content-language', 'content-length',
'content-location', 'content-md5', 'content-range', 'content-type',
'expires', 'last-modified'
])
_hop_by_hop_headers = frozenset([
'connection', 'keep-alive', 'proxy-authenticate',
'proxy-authorization', 'te', 'trailer', 'transfer-encoding',
'upgrade'
])
HTTP_STATUS_CODES = {
100: 'Continue',
101: 'Switching Protocols',
102: 'Processing',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
207: 'Multi Status',
226: 'IM Used', # see RFC 3229
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required', # unused
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
418: 'I\'m a teapot', # see RFC 2324
422: 'Unprocessable Entity',
423: 'Locked',
424: 'Failed Dependency',
426: 'Upgrade Required',
428: 'Precondition Required', # see RFC 6585
429: 'Too Many Requests',
431: 'Request Header Fields Too Large',
449: 'Retry With', # proprietary MS extension
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
507: 'Insufficient Storage',
510: 'Not Extended'
}
def wsgi_to_bytes(data):
"""coerce wsgi unicode represented bytes to real ones
"""
if isinstance(data, bytes):
return data
return data.encode('latin1') #XXX: utf8 fallback?
def bytes_to_wsgi(data):
assert isinstance(data, bytes), 'data must be bytes'
if isinstance(data, str):
return data
else:
return data.decode('latin1')
def quote_header_value(value, extra_chars='', allow_token=True):
"""Quote a header value if necessary.
.. versionadded:: 0.5
:param value: the value to quote.
:param extra_chars: a list of extra characters to skip quoting.
:param allow_token: if this is enabled token values are returned
unchanged.
"""
if isinstance(value, bytes):
value = bytes_to_wsgi(value)
value = str(value)
if allow_token:
token_chars = _token_chars | set(extra_chars)
if set(value).issubset(token_chars):
return value
return '"%s"' % value.replace('\\', '\\\\').replace('"', '\\"')
def unquote_header_value(value, is_filename=False):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
.. versionadded:: 0.5
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
# if this is a filename and the starting characters look like
# a UNC path, then just return the value without quotes. Using the
# replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\':
return value.replace('\\\\', '\\').replace('\\"', '"')
return value
def dump_options_header(header, options):
"""The reverse function to :func:`parse_options_header`.
:param header: the header to dump
:param options: a dict of options to append.
"""
segments = []
if header is not None:
segments.append(header)
for key, value in iteritems(options):
if value is None:
segments.append(key)
else:
segments.append('%s=%s' % (key, quote_header_value(value)))
return '; '.join(segments)
def dump_header(iterable, allow_token=True):
"""Dump an HTTP header again. This is the reversal of
:func:`parse_list_header`, :func:`parse_set_header` and
:func:`parse_dict_header`. This also quotes strings that include an
equals sign unless you pass it as dict of key, value pairs.
>>> dump_header({'foo': 'bar baz'})
'foo="bar baz"'
>>> dump_header(('foo', 'bar baz'))
'foo, "bar baz"'
:param iterable: the iterable or dict of values to quote.
:param allow_token: if set to `False` tokens as values are disallowed.
See :func:`quote_header_value` for more details.
"""
if isinstance(iterable, dict):
items = []
for key, value in iteritems(iterable):
if value is None:
items.append(key)
else:
items.append('%s=%s' % (
key,
quote_header_value(value, allow_token=allow_token)
))
else:
items = [quote_header_value(x, allow_token=allow_token)
for x in iterable]
return ', '.join(items)
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
It basically works like :func:`parse_set_header` just that items
may appear multiple times and case sensitivity is preserved.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
To create a header from the :class:`list` again, use the
:func:`dump_header` function.
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in _parse_list_header(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
def parse_dict_header(value, cls=dict):
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict (or any other mapping object created from
the type with a dict like interface provided by the `cls` arugment):
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
If there is no value for a key it will be `None`:
>>> parse_dict_header('key_without_value')
{'key_without_value': None}
To create a header from the :class:`dict` again, use the
:func:`dump_header` function.
.. versionchanged:: 0.9
Added support for `cls` argument.
:param value: a string with a dict header.
:param cls: callable to use for storage of parsed results.
:return: an instance of `cls`
"""
result = cls()
if not isinstance(value, text_type):
#XXX: validate
value = bytes_to_wsgi(value)
for item in _parse_list_header(value):
if '=' not in item:
result[item] = None
continue
name, value = item.split('=', 1)
if value[:1] == value[-1:] == '"':
value = unquote_header_value(value[1:-1])
result[name] = value
return result
def parse_options_header(value):
"""Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('text/html; charset=utf8')
('text/html', {'charset': 'utf8'})
This should not be used to parse ``Cache-Control`` like headers that use
a slightly different format. For these headers use the
:func:`parse_dict_header` function.
.. versionadded:: 0.5
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value, key == 'filename')
yield key, value
if not value:
return '', {}
parts = _tokenize(';' + value)
name = next(parts)[0]
extra = dict(parts)
return name, extra
def parse_accept_header(value, cls=None):
"""Parses an HTTP Accept-* header. This does not implement a complete
valid algorithm but one that supports at least value and quality
extraction.
Returns a new :class:`Accept` object (basically a list of ``(value, quality)``
tuples sorted by the quality with some additional accessor methods).
The second parameter can be a subclass of :class:`Accept` that is created
with the parsed values and returned.
:param value: the accept header string to be parsed.
:param cls: the wrapper class for the return value (can be
:class:`Accept` or a subclass thereof)
:return: an instance of `cls`.
"""
if cls is None:
cls = Accept
if not value:
return cls(None)
result = []
for match in _accept_re.finditer(value):
quality = match.group(2)
if not quality:
quality = 1
else:
quality = max(min(float(quality), 1), 0)
result.append((match.group(1), quality))
return cls(result)
def parse_cache_control_header(value, on_update=None, cls=None):
"""Parse a cache control header. The RFC differs between response and
request cache control, this method does not. It's your responsibility
to not use the wrong control statements.
.. versionadded:: 0.5
The `cls` was added. If not specified an immutable
:class:`~werkzeug.datastructures.RequestCacheControl` is returned.
:param value: a cache control header to be parsed.
:param on_update: an optional callable that is called every time a value
on the :class:`~werkzeug.datastructures.CacheControl`
object is changed.
:param cls: the class for the returned object. By default
:class:`~werkzeug.datastructures.RequestCacheControl` is used.
:return: a `cls` object.
"""
if cls is None:
cls = RequestCacheControl
if not value:
return cls(None, on_update)
return cls(parse_dict_header(value), on_update)
def parse_set_header(value, on_update=None):
"""Parse a set-like header and return a
:class:`~werkzeug.datastructures.HeaderSet` object:
>>> hs = parse_set_header('token, "quoted value"')
The return value is an object that treats the items case-insensitively
and keeps the order of the items:
>>> 'TOKEN' in hs
True
>>> hs.index('quoted value')
1
>>> hs
HeaderSet(['token', 'quoted value'])
To create a header from the :class:`HeaderSet` again, use the
:func:`dump_header` function.
:param value: a set header to be parsed.
:param on_update: an optional callable that is called every time a
value on the :class:`~werkzeug.datastructures.HeaderSet`
object is changed.
:return: a :class:`~werkzeug.datastructures.HeaderSet`
"""
if not value:
return HeaderSet(None, on_update)
return HeaderSet(parse_list_header(value), on_update)
def parse_authorization_header(value):
"""Parse an HTTP basic/digest authorization header transmitted by the web
browser. The return value is either `None` if the header was invalid or
not given, otherwise an :class:`~werkzeug.datastructures.Authorization`
object.
:param value: the authorization header to parse.
:return: a :class:`~werkzeug.datastructures.Authorization` object or `None`.
"""
if not value:
return
value = wsgi_to_bytes(value)
try:
auth_type, auth_info = value.split(None, 1)
auth_type = auth_type.lower()
except ValueError:
return
if auth_type == b'basic':
try:
username, password = base64.b64decode(auth_info).split(b':', 1)
except Exception as e:
return
return Authorization('basic', {'username': bytes_to_wsgi(username),
'password': bytes_to_wsgi(password)})
elif auth_type == b'digest':
auth_map = parse_dict_header(auth_info)
for key in 'username', 'realm', 'nonce', 'uri', 'response':
if not key in auth_map:
return
if 'qop' in auth_map:
if not auth_map.get('nc') or not auth_map.get('cnonce'):
return
return Authorization('digest', auth_map)
def parse_www_authenticate_header(value, on_update=None):
"""Parse an HTTP WWW-Authenticate header into a
:class:`~werkzeug.datastructures.WWWAuthenticate` object.
:param value: a WWW-Authenticate header to parse.
:param on_update: an optional callable that is called every time a value
on the :class:`~werkzeug.datastructures.WWWAuthenticate`
object is changed.
:return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object.
"""
if not value:
return WWWAuthenticate(on_update=on_update)
try:
auth_type, auth_info = value.split(None, 1)
auth_type = auth_type.lower()
except (ValueError, AttributeError):
return WWWAuthenticate(value.strip().lower(), on_update=on_update)
return WWWAuthenticate(auth_type, parse_dict_header(auth_info),
on_update)
def parse_if_range_header(value):
"""Parses an if-range header which can be an etag or a date. Returns
a :class:`~werkzeug.datastructures.IfRange` object.
.. versionadded:: 0.7
"""
if not value:
return IfRange()
date = parse_date(value)
if date is not None:
return IfRange(date=date)
# drop weakness information
return IfRange(unquote_etag(value)[0])
def parse_range_header(value, make_inclusive=True):
"""Parses a range header into a :class:`~werkzeug.datastructures.Range`
object. If the header is missing or malformed `None` is returned.
`ranges` is a list of ``(start, stop)`` tuples where the ranges are
non-inclusive.
.. versionadded:: 0.7
"""
if not value or '=' not in value:
return None
ranges = []
last_end = 0
units, rng = value.split('=', 1)
units = units.strip().lower()
for item in rng.split(','):
item = item.strip()
if '-' not in item:
return None
if item.startswith('-'):
if last_end < 0:
return None
begin = int(item)
end = None
last_end = -1
elif '-' in item:
begin, end = item.split('-', 1)
begin = int(begin)
if begin < last_end or last_end < 0:
return None
if end:
end = int(end) + 1
if begin >= end:
return None
else:
end = None
last_end = end
ranges.append((begin, end))
return Range(units, ranges)
def parse_content_range_header(value, on_update=None):
"""Parses a range header into a
:class:`~werkzeug.datastructures.ContentRange` object or `None` if
parsing is not possible.
.. versionadded:: 0.7
:param value: a content range header to be parsed.
:param on_update: an optional callable that is called every time a value
on the :class:`~werkzeug.datastructures.ContentRange`
object is changed.
"""
if value is None:
return None
try:
units, rangedef = (value or '').strip().split(None, 1)
except ValueError:
return None
if '/' not in rangedef:
return None
rng, length = rangedef.split('/', 1)
if length == '*':
length = None
elif length.isdigit():
length = int(length)
else:
return None
if rng == '*':
return ContentRange(units, None, None, length, on_update=on_update)
elif '-' not in rng:
return None
start, stop = rng.split('-', 1)
try:
start = int(start)
stop = int(stop) + 1
except ValueError:
return None
if is_byte_range_valid(start, stop, length):
return ContentRange(units, start, stop, length, on_update=on_update)
def quote_etag(etag, weak=False):
"""Quote an etag.
:param etag: the etag to quote.
:param weak: set to `True` to tag it "weak".
"""
if '"' in etag:
raise ValueError('invalid etag')
etag = '"%s"' % etag
if weak:
etag = 'w/' + etag
return etag
def unquote_etag(etag):
"""Unquote a single etag:
>>> unquote_etag('w/"bar"')
('bar', True)
>>> unquote_etag('"bar"')
('bar', False)
:param etag: the etag identifier to unquote.
:return: a ``(etag, weak)`` tuple.
"""
if not etag:
return None, None
etag = etag.strip()
weak = False
if etag[:2] in ('w/', 'W/'):
weak = True
etag = etag[2:]
if etag[:1] == etag[-1:] == '"':
etag = etag[1:-1]
return etag, weak
def parse_etags(value):
"""Parse an etag header.
:param value: the tag header to parse
:return: an :class:`~werkzeug.datastructures.ETags` object.
"""
if not value:
return ETags()
strong = []
weak = []
end = len(value)
pos = 0
while pos < end:
match = _etag_re.match(value, pos)
if match is None:
break
is_weak, quoted, raw = match.groups()
if raw == '*':
return ETags(star_tag=True)
elif quoted:
raw = quoted
if is_weak:
weak.append(raw)
else:
strong.append(raw)
pos = match.end()
return ETags(strong, weak)
def generate_etag(data):
"""Generate an etag for some data."""
return md5(data).hexdigest()
def parse_date(value):
"""Parse one of the following date formats into a datetime object:
.. sourcecode:: text
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036
Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
If parsing fails the return value is `None`.
:param value: a string with a supported date format.
:return: a :class:`datetime.datetime` object.
"""
if value:
t = parsedate_tz(value.strip())
if t is not None:
try:
year = t[0]
# unfortunately that function does not tell us if two digit
# years were part of the string, or if they were prefixed
# with two zeroes. So what we do is to assume that 69-99
# refer to 1900, and everything below to 2000
if year >= 0 and year <= 68:
year += 2000
elif year >= 69 and year <= 99:
year += 1900
return datetime(*((year,) + t[1:7])) - \
timedelta(seconds=t[-1] or 0)
except (ValueError, OverflowError):
return None
def _dump_date(d, delim):
"""Used for `http_date` and `cookie_date`."""
if d is None:
d = gmtime()
elif isinstance(d, datetime):
d = d.utctimetuple()
elif isinstance(d, (integer_types, float)):
d = gmtime(d)
return '%s, %02d%s%s%s%s %02d:%02d:%02d GMT' % (
('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')[d.tm_wday],
d.tm_mday, delim,
('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
'Oct', 'Nov', 'Dec')[d.tm_mon - 1],
delim, str(d.tm_year), d.tm_hour, d.tm_min, d.tm_sec
)
def cookie_date(expires=None):
"""Formats the time to ensure compatibility with Netscape's cookie
standard.
Accepts a floating point number expressed in seconds since the epoch in, a
datetime object or a timetuple. All times in UTC. The :func:`parse_date`
function can be used to parse such a date.
Outputs a string in the format ``Wdy, DD-Mon-YYYY HH:MM:SS GMT``.
:param expires: If provided that date is used, otherwise the current.
"""
return _dump_date(expires, '-')
def http_date(timestamp=None):
"""Formats the time to match the RFC1123 date format.
Accepts a floating point number expressed in seconds since the epoch in, a
datetime object or a timetuple. All times in UTC. The :func:`parse_date`
function can be used to parse such a date.
Outputs a string in the format ``Wdy, DD Mon YYYY HH:MM:SS GMT``.
:param timestamp: If provided that date is used, otherwise the current.
"""
return _dump_date(timestamp, ' ')
def is_resource_modified(environ, etag=None, data=None, last_modified=None):
"""Convenience method for conditional requests.
:param environ: the WSGI environment of the request to be checked.
:param etag: the etag for the response for comparison.
:param data: or alternatively the data of the response to automatically
generate an etag using :func:`generate_etag`.
:param last_modified: an optional date of the last modification.
:return: `True` if the resource was modified, otherwise `False`.
"""
if etag is None and data is not None:
etag = generate_etag(data)
elif data is not None:
raise TypeError('both data and etag given')
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
return False
unmodified = False
if isinstance(last_modified, string_types):
last_modified = parse_date(last_modified)
# ensure that microsecond is zero because the HTTP spec does not transmit
# that either and we might have some false positives. See issue #39
if last_modified is not None:
last_modified = last_modified.replace(microsecond=0)
modified_since = parse_date(environ.get('HTTP_IF_MODIFIED_SINCE'))
if modified_since and last_modified and last_modified <= modified_since:
unmodified = True
if etag:
if_none_match = parse_etags(environ.get('HTTP_IF_NONE_MATCH'))
if if_none_match:
unmodified = if_none_match.contains_raw(etag)
return not unmodified
def remove_entity_headers(headers, allowed=('expires', 'content-location')):
"""Remove all entity headers from a list or :class:`Headers` object. This
operation works in-place. `Expires` and `Content-Location` headers are
by default not removed. The reason for this is :rfc:`2616` section
10.3.5 which specifies some entity headers that should be sent.
.. versionchanged:: 0.5
added `allowed` parameter.
:param headers: a list or :class:`Headers` object.
:param allowed: a list of headers that should still be allowed even though
they are entity headers.
"""
allowed = set(x.lower() for x in allowed)
headers[:] = [(key, value) for key, value in headers if
not is_entity_header(key) or key.lower() in allowed]
def remove_hop_by_hop_headers(headers):
"""Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or
:class:`Headers` object. This operation works in-place.
.. versionadded:: 0.5
:param headers: a list or :class:`Headers` object.
"""
headers[:] = [(key, value) for key, value in headers if
not is_hop_by_hop_header(key)]
def is_entity_header(header):
"""Check if a header is an entity header.
.. versionadded:: 0.5
:param header: the header to test.
:return: `True` if it's an entity header, `False` otherwise.
"""
return header.lower() in _entity_headers
def is_hop_by_hop_header(header):
"""Check if a header is an HTTP/1.1 "Hop-by-Hop" header.
.. versionadded:: 0.5
:param header: the header to test.
:return: `True` if it's an entity header, `False` otherwise.
"""
return header.lower() in _hop_by_hop_headers
def parse_cookie(header, charset='utf-8', errors='replace', cls=None):
"""Parse a cookie. Either from a string or WSGI environ.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
:exc:`HTTPUnicodeError` is raised.
.. versionchanged:: 0.5
This function now returns a :class:`TypeConversionDict` instead of a
regular dict. The `cls` parameter was added.
:param header: the header to be used to parse the cookie. Alternatively
this can be a WSGI environment.
:param charset: the charset for the cookie values.
:param errors: the error behavior for the charset decoding.
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`TypeConversionDict` is
used.
"""
if isinstance(header, dict):
header = header.get('HTTP_COOKIE', '')
elif header is None:
header = ''
# If the value is an unicode string it's mangled through latin1. This
# is done because on PEP 3333 on Python 3 all headers are assumed latin1
# which however is incorrect for cookies, which are sent in page encoding.
# As a result we
if isinstance(header, text_type):
header = header.encode('latin1', 'replace')
if cls is None:
cls = TypeConversionDict
def _parse_pairs():
for key, val in _cookie_parse_impl(header):
key = to_unicode(key, charset, errors, allow_none_charset=True)
val = to_unicode(val, charset, errors, allow_none_charset=True)
yield try_coerce_native(key), val
return cls(_parse_pairs())
def dump_cookie(key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False,
charset='utf-8', sync_expires=True):
"""Creates a new Set-Cookie header without the ``Set-Cookie`` prefix
The parameters are the same as in the cookie Morsel object in the
Python standard library but it accepts unicode data, too.
On Python 3 the return value of this function will be a unicode
string, on Python 2 it will be a native string. In both cases the
return value is usually restricted to ascii as the vast majority of
values are properly escaped, but that is no guarantee. If a unicode
string is returned it's tunneled through latin1 as required by
PEP 3333.
The return value is not ASCII safe if the key contains unicode
characters. This is technically against the specification but
happens in the wild. It's strongly recommended to not use
non-ASCII values for the keys.
:param max_age: should be a number of seconds, or `None` (default) if
the cookie should last only as long as the client's
browser session. Additionally `timedelta` objects
are accepted, too.
:param expires: should be a `datetime` object or unix timestamp.
:param path: limits the cookie to a given path, per default it will
span the whole domain.
:param domain: Use this if you want to set a cross-domain cookie. For
example, ``domain=".example.com"`` will set a cookie
that is readable by the domain ``www.example.com``,
``foo.example.com`` etc. Otherwise, a cookie will only
be readable by the domain that set it.
:param secure: The cookie will only be available via HTTPS
:param httponly: disallow JavaScript to access the cookie. This is an
extension to the cookie standard and probably not
supported by all browsers.
:param charset: the encoding for unicode values.
:param sync_expires: automatically set expires if max_age is defined
but expires not.
"""
key = to_bytes(key, charset)
value = to_bytes(value, charset)
if path is not None:
path = iri_to_uri(path, charset)
domain = _make_cookie_domain(domain)
if isinstance(max_age, timedelta):
max_age = (max_age.days * 60 * 60 * 24) + max_age.seconds
if expires is not None:
if not isinstance(expires, string_types):
expires = cookie_date(expires)
elif max_age is not None and sync_expires:
expires = to_bytes(cookie_date(time() + max_age))
buf = [key + b'=' + _cookie_quote(value)]
# XXX: In theory all of these parameters that are not marked with `None`
# should be quoted. Because stdlib did not quote it before I did not
# want to introduce quoting there now.
for k, v, q in ((b'Domain', domain, True),
(b'Expires', expires, False,),
(b'Max-Age', max_age, False),
(b'Secure', secure, None),
(b'HttpOnly', httponly, None),
(b'Path', path, False)):
if q is None:
if v:
buf.append(k)
continue
if v is None:
continue
tmp = bytearray(k)
if not isinstance(v, (bytes, bytearray)):
v = to_bytes(text_type(v), charset)
if q:
v = _cookie_quote(v)
tmp += b'=' + v
buf.append(bytes(tmp))
# The return value will be an incorrectly encoded latin1 header on
# Python 3 for consistency with the headers object and a bytestring
# on Python 2 because that's how the API makes more sense.
rv = b'; '.join(buf)
if not PY2:
rv = rv.decode('latin1')
return rv
def is_byte_range_valid(start, stop, length):
"""Checks if a given byte content range is valid for the given length.
.. versionadded:: 0.7
"""
if (start is None) != (stop is None):
return False
elif start is None:
return length is None or length >= 0
elif length is None:
return 0 <= start < stop
elif start >= stop:
return False
return 0 <= start < length
# circular dependency fun
from werkzeug.datastructures import Accept, HeaderSet, ETags, Authorization, \
WWWAuthenticate, TypeConversionDict, IfRange, Range, ContentRange, \
RequestCacheControl
# DEPRECATED
# backwards compatible imports
from werkzeug.datastructures import MIMEAccept, CharsetAccept, \
LanguageAccept, Headers
from werkzeug.urls import iri_to_uri
|
rajul/Pydev | refs/heads/development | plugins/org.python.pydev.jython/Lib/encodings/shift_jis.py | 816 | #
# shift_jis.py: Python Unicode Codec for SHIFT_JIS
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('shift_jis')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='shift_jis',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
Basis/pip | refs/heads/develop | tests/functional/test_install_vcs.py | 5 | from tests.lib import _create_test_package, _change_test_package_version
from tests.lib.local_repos import local_checkout
def test_install_editable_from_git_with_https(script, tmpdir):
"""
Test cloning from Git with https.
"""
result = script.pip('install', '-e',
'%s#egg=pip-test-package' %
local_checkout('git+https://github.com/pypa/pip-test-package.git', tmpdir.join("cache")),
expect_error=True)
result.assert_installed('pip-test-package', with_files=['.git'])
def test_git_with_sha1_revisions(script):
"""
Git backend should be able to install from SHA1 revisions
"""
version_pkg_path = _create_test_package(script)
_change_test_package_version(script, version_pkg_path)
sha1 = script.run('git', 'rev-parse', 'HEAD~1', cwd=version_pkg_path).stdout.strip()
script.pip('install', '-e', '%s@%s#egg=version_pkg' % ('git+file://' + version_pkg_path.abspath.replace('\\', '/'), sha1))
version = script.run('version_pkg')
assert '0.1' in version.stdout, version.stdout
def test_git_with_branch_name_as_revision(script):
"""
Git backend should be able to install from branch names
"""
version_pkg_path = _create_test_package(script)
script.run('git', 'checkout', '-b', 'test_branch', expect_stderr=True, cwd=version_pkg_path)
_change_test_package_version(script, version_pkg_path)
script.pip('install', '-e', '%s@test_branch#egg=version_pkg' % ('git+file://' + version_pkg_path.abspath.replace('\\', '/')))
version = script.run('version_pkg')
assert 'some different version' in version.stdout
def test_git_with_tag_name_as_revision(script):
"""
Git backend should be able to install from tag names
"""
version_pkg_path = _create_test_package(script)
script.run('git', 'tag', 'test_tag', expect_stderr=True, cwd=version_pkg_path)
_change_test_package_version(script, version_pkg_path)
script.pip('install', '-e', '%s@test_tag#egg=version_pkg' % ('git+file://' + version_pkg_path.abspath.replace('\\', '/')))
version = script.run('version_pkg')
assert '0.1' in version.stdout
def test_git_with_tag_name_and_update(script, tmpdir):
"""
Test cloning a git repository and updating to a different version.
"""
result = script.pip('install', '-e', '%s#egg=pip-test-package' %
local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join("cache")),
expect_error=True)
result.assert_installed('pip-test-package', with_files=['.git'])
result = script.pip('install', '--global-option=--version', '-e',
'%[email protected]#egg=pip-test-package' %
local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join("cache")),
expect_error=True)
assert '0.1.2' in result.stdout
def test_git_branch_should_not_be_changed(script, tmpdir):
"""
Editable installations should not change branch
related to issue #32 and #161
"""
script.pip('install', '-e', '%s#egg=pip-test-package' %
local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join("cache")),
expect_error=True)
source_dir = script.venv_path/'src'/'pip-test-package'
result = script.run('git', 'branch', cwd=source_dir)
assert '* master' in result.stdout, result.stdout
def test_git_with_non_editable_unpacking(script, tmpdir):
"""
Test cloning a git repository from a non-editable URL with a given tag.
"""
result = script.pip('install', '--global-option=--version', local_checkout(
'git+http://github.com/pypa/[email protected]#egg=pip-test-package',
tmpdir.join("cache")
), expect_error=True)
assert '0.1.2' in result.stdout
def test_git_with_editable_where_egg_contains_dev_string(script, tmpdir):
"""
Test cloning a git repository from an editable url which contains "dev" string
"""
result = script.pip('install', '-e', '%s#egg=django-devserver' %
local_checkout('git+git://github.com/dcramer/django-devserver.git', tmpdir.join("cache")))
result.assert_installed('django-devserver', with_files=['.git'])
def test_git_with_non_editable_where_egg_contains_dev_string(script, tmpdir):
"""
Test cloning a git repository from a non-editable url which contains "dev" string
"""
result = script.pip('install', '%s#egg=django-devserver' %
local_checkout('git+git://github.com/dcramer/django-devserver.git', tmpdir.join("cache")))
devserver_folder = script.site_packages/'devserver'
assert devserver_folder in result.files_created, str(result)
def test_git_with_ambiguous_revs(script):
"""
Test git with two "names" (tag/branch) pointing to the same commit
"""
version_pkg_path = _create_test_package(script)
package_url = 'git+file://%[email protected]#egg=version_pkg' % (version_pkg_path.abspath.replace('\\', '/'))
script.run('git', 'tag', '0.1', cwd=version_pkg_path)
result = script.pip('install', '-e', package_url)
assert 'Could not find a tag or branch' not in result.stdout
# it is 'version-pkg' instead of 'version_pkg' because
# egg-link name is version-pkg.egg-link because it is a single .py module
result.assert_installed('version-pkg', with_files=['.git'])
def test_git_works_with_editable_non_origin_repo(script):
# set up, create a git repo and install it as editable from a local directory path
version_pkg_path = _create_test_package(script)
script.pip('install', '-e', version_pkg_path.abspath)
# 'freeze'ing this should not fall over, but should result in stderr output warning
result = script.pip('freeze', expect_stderr=True)
assert "Error when trying to get requirement" in result.stderr
assert "Could not determine repository location" in result.stdout
assert "version-pkg==0.1" in result.stdout
|
brummer-simon/RIOT | refs/heads/master | tests/pkg_semtech-loramac/tests-with-config/01-run.py | 11 | #!/usr/bin/env python3
# Copyright (C) 2019 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
import time
from testrunner import run
# It's assumed that the same APPEUI is used for abp and otaa
DEVEUI_ABP = os.getenv('DEVEUI_ABP')
DEVEUI_OTA = os.getenv('DEVEUI_OTA')
APPEUI = os.getenv('APPEUI')
APPKEY = os.getenv('APPKEY')
DEVADDR = os.getenv('DEVADDR')
NWKSKEY = os.getenv('NWKSKEY')
APPSKEY = os.getenv('APPSKEY')
# Default to things network RX2_DR
TEST_RX2_DR = os.getenv('RX2_DR', 3)
# Theoretical duty cycling timeoff for EU863-870
# https://www.semtech.com/uploads/documents/LoraDesignGuide_STD.pdf#page=7
TEST_DATA_RATES = {"0": 164.6, "3": 20.6, "5": 6.2}
# Dummy Message
MSG = "This is RIOT"
def _send_line_echo(child, line):
child.sendline(line)
child.expect_exact(line)
child.expect_exact(">")
def _send_line(child, line, expect_line):
child.sendline(line)
child.expect_exact(expect_line)
child.expect_exact(">")
def _reset_config(child):
# Start with a clean config
child.sendline("loramac erase")
child.expect("loramac erase")
child.expect_exact(">")
child.sendline("reboot")
child.expect_exact("reboot")
child.expect_exact("All up, running the shell now")
child.expect_exact(">")
def _check_eeprom(child):
# Check if eeprom is supported
child.sendline("loramac help")
child.expect(r'Usage: loramac \<([\w+\|?]+)\>')
return (len(child.match.group(1).split('|')) == 7)
def _reboot(child, join):
if join == "abp":
child.sendline("loramac get ul_cnt")
child.expect(r'Uplink Counter: (\d+)')
uplink_counter = int(child.match.group(1))
child.sendline("reboot")
child.expect_exact("All up, running the shell now")
child.expect_exact(">")
if join == "abp":
_send_line_echo(child, "loramac set ul_cnt {}".format(uplink_counter))
def _loramac_setup(child, join):
if join == "abp":
_send_line_echo(child, "loramac set deveui {}".format(DEVEUI_ABP))
_send_line_echo(child, "loramac set appeui {}".format(APPEUI))
_send_line_echo(child, "loramac set devaddr {}".format(DEVADDR))
_send_line_echo(child, "loramac set nwkskey {}".format(NWKSKEY))
_send_line_echo(child, "loramac set appskey {}".format(APPSKEY))
_send_line_echo(child, "loramac set rx2_dr {}".format(TEST_RX2_DR))
else:
_send_line_echo(child, "loramac set deveui {}".format(DEVEUI_OTA))
_send_line_echo(child, "loramac set appeui {}".format(APPEUI))
_send_line_echo(child, "loramac set appkey {}".format(APPKEY))
def loramac_tx_test(child, join):
_reset_config(child)
# test all data rates
for key, time_off in TEST_DATA_RATES.items():
# Setup keys and rx2_dr
_loramac_setup(child, join)
# Set DR and join
_send_line_echo(child, "loramac set dr {}".format(key))
child.sendline("loramac join {}".format(join))
child.expect_exact(["Join procedure succeeded!",
"Warning: already joined!"])
child.expect_exact(">")
# Transmit cnf message
child.sendline("loramac tx \"{}\" cnf 123".format(MSG))
child.expect_exact("Received ACK from network", timeout=30)
child.expect_exact("Message sent with success")
child.expect_exact(">")
# Wake-up just before time_off, fail to send
time.sleep(time_off)
# Send uncnf message with success
child.sendline("loramac tx \"{}\" uncnf 42".format(MSG))
child.expect_exact("Message sent with success")
child.expect_exact(">")
# Reboot node
_reboot(child, join)
def test_task02(child):
loramac_tx_test(child, "otaa")
def test_task03(child):
loramac_tx_test(child, "abp")
def test_task04(child):
# Erase eeprom
_reset_config(child)
# Verify start from erased state
_send_line(child, "loramac get deveui", "DEVEUI: 0000000000000000")
_send_line(child, "loramac get appeui", "APPEUI: 0000000000000000")
_send_line(child, "loramac get appkey",
"APPKEY: 00000000000000000000000000000000")
_send_line(child, "loramac get devaddr", "DEVADDR: 00000000")
_send_line(child, "loramac get nwkskey",
"NWKSKEY: 00000000000000000000000000000000")
_send_line(child, "loramac get appskey",
"APPSKEY: 00000000000000000000000000000000")
# Save and verify otaa keys
_loramac_setup(child, "otaa")
_send_line_echo(child, "loramac save")
child.sendline("reboot")
child.expect_exact("All up, running the shell now")
child.expect_exact(">")
_send_line(child, "loramac get deveui", "DEVEUI: {}".format(DEVEUI_OTA))
_send_line(child, "loramac get appeui", "APPEUI: {}".format(APPEUI))
_send_line(child, "loramac get appkey", "APPKEY: {}".format(APPKEY))
_reset_config(child)
# Save and verify abp keys
_loramac_setup(child, "abp")
_send_line_echo(child, "loramac save")
child.sendline("reboot")
child.expect_exact("All up, running the shell now")
child.expect_exact(">")
_send_line(child, "loramac get devaddr", "DEVADDR: {}".format(DEVADDR))
_send_line(child, "loramac get nwkskey", "NWKSKEY: {}".format(NWKSKEY))
_send_line(child, "loramac get appskey", "APPSKEY: {}".format(APPSKEY))
def testfunc(child):
def run(func):
if child.logfile == sys.stdout:
func(child)
else:
try:
func(child)
print(".", end="", flush=True)
except Exception as e:
print("FAILED")
raise e
run(test_task02)
run(test_task03)
if(_check_eeprom(child)):
run(test_task04)
print("TEST PASSED")
if __name__ == "__main__":
sys.exit(run(testfunc))
|
bfirsh/pspec | refs/heads/master | pspec/groups/root.py | 1 | import imp
import os
from .base import BaseGroup
class RootGroup(BaseGroup):
"""
A group that represents a spec module.
"""
# Name of magic group
magic_group_name = '_pspec_group'
def __init__(self, *args, **kwargs):
super(RootGroup, self).__init__(*args, **kwargs)
# Spec is the root collecting group
self.is_collecting = True
@classmethod
def from_frame(cls, frame):
"""
Returns the RootGroup for a given frame, creating a new one if it doesn't
exist.
"""
if cls.magic_group_name not in frame.f_locals:
filename = frame.f_locals['__file__']
if filename.endswith('.pyc'):
filename = filename[:-1]
spec = RootGroup(filename)
frame.f_locals[cls.magic_group_name] = spec
return spec
return frame.f_locals[cls.magic_group_name]
|
Twangist/log_calls | refs/heads/develop | tests/test_log_calls_v30_minor_features_fixes.py | 1 | __author__ = "Brian O'Neill"
_version__ = '0.3.0'
from log_calls import log_calls
import doctest
#-------------------------------------------------------------------
# test__omit_property_attr__repr_with_init_active
#-------------------------------------------------------------------
def test__omit_property_attr__repr_with_init_active():
"""
>>> @log_calls(omit='pair.deleter')
... class P():
... def __init__(self, x, y):
... self.x = x
... self.y = y
... @property
... def pair(self):
... return (self.x, self.y)
... @pair.setter
... def pair(self, pr):
... self.x, self.y = pr
... @pair.deleter
... def pair(self):
... print("pair.deleter called -- wouldn't know what to do.")
... def __repr__(self):
... return '<(%r, %r) at 0x%x>' % (self.x, self.y, id(self))
>>> p = P(0, 0) # doctest: +ELLIPSIS
P.__init__ <== called by <module>
arguments: self=<__main__.P object at 0x...>, x=0, y=0
P.__init__ ==> returning to <module>
>>> print(p.pair) # doctest: +ELLIPSIS
P.pair <== called by <module>
arguments: self=<(0, 0) at 0x...>
P.pair ==> returning to <module>
(0, 0)
>>> p.pair = (10, 11) # doctest: +ELLIPSIS
P.pair <== called by <module>
arguments: self=<(0, 0) at 0x...>, pr=(10, 11)
P.pair ==> returning to <module>
>>> print(p) # doctest: +ELLIPSIS
<(10, 11) at 0x...>
>>> del p.pair
pair.deleter called -- wouldn't know what to do.
"""
pass
# SURGERY:
test__omit_property_attr__repr_with_init_active.__doc__ = \
test__omit_property_attr__repr_with_init_active.__doc__.replace("__main__", __name__)
#-------------------------------------------------------------------
# test__repr_with_init_active_2
# __repr__ handling for objects still in construction;
# class inside a function
#-------------------------------------------------------------------
def test__repr_with_init_active_2():
"""
>>> @log_calls()
... def another_fn(y):
... 'called by X.__init__'
... return 2 * y
>>> def globfn():
... class X():
... def __init__(self):
... self.helper(0)
... another_fn(43)
...
... @log_calls()
... def helper(self, z):
... 'called by __init__ and by users of X objs'
... return z + 1
...
... def __repr__(self):
... return "<X() at 0x%x>" % id(self)
...
... return X()
`helper` called with __init__ active, so generic `object.__repr__` is used to display `self`,
rather than the class's `__repr__`.
>>> x = globfn() # doctest: +ELLIPSIS
globfn.<locals>.X.helper <== called by __init__
arguments: self=<__main__.globfn.<locals>.X object at 0x...>, z=0
globfn.<locals>.X.helper ==> returning to __init__
another_fn <== called by __init__
arguments: y=43
another_fn ==> returning to __init__
The class's `__repr__` (address `0x...` is the same as above):
>>> print(repr(x)) # doctest: +ELLIPSIS
<X() at 0x...>
The instance's __init__ is not active so the class's `__repr__` is used:
>>> _ = x.helper(100) # doctest: +ELLIPSIS
globfn.<locals>.X.helper <== called by <module>
arguments: self=<X() at 0x...>, z=100
globfn.<locals>.X.helper ==> returning to <module>
"""
pass
# SURGERY:
test__repr_with_init_active_2.__doc__ = \
test__repr_with_init_active_2.__doc__.replace("__main__", __name__)
#-------------------------------------------------------------------
# MORE __repr__ handling for objects still in construction
#-------------------------------------------------------------------
def test__repr_with_init_active_3():
"""
>>> @log_calls()
... def g(obj):
... pass
>>> class Y():
... def __init__(self, y):
... g(self)
... self.y = y
...
... def method(self):
... g(self)
...
... def __repr__(self):
... return "Y(%r)" % self.y
>>> Y('arg').method() # doctest: +ELLIPSIS
g <== called by __init__
arguments: obj=<__main__.Y object at 0x...>
g ==> returning to __init__
g <== called by method
arguments: obj=Y('arg')
g ==> returning to method
"""
pass
# SURGERY:
test__repr_with_init_active_3.__doc__ = \
test__repr_with_init_active_3.__doc__.replace("__main__", __name__)
#-------------------------------------------------------------------
# Tests/examples of `mute`
#-------------------------------------------------------------------
def test__mute():
"""
When a decorated function is not muted (`mute` is `log_calls.MUTE.NOTHING`, the default),
log_calls produces output as do `log_message` and `log_exprs`, which uses `log_message`:
>>> @log_calls()
... def f():
... f.log_message('Hello, world!')
>>> f()
f <== called by <module>
Hello, world!
f ==> returning to <module>
When `mute` is `log_calls.MUTE.CALLS` ( == `True`),
no extra indent level, and messages are prefixed with function's display name:
>>> f.log_calls_settings.mute = log_calls.MUTE.CALLS
>>> f()
f: Hello, world!
When `mute` is `log_calls.MUTE.ALL`, log_message produces no output:
>>> f.log_calls_settings.mute = log_calls.MUTE.ALL
>>> f() # (no output)
"""
pass
#-------------------------------------------------------------------
# Tests/examples of log_message writing only if enabled
#-------------------------------------------------------------------
def test__log_message_only_if_enabled():
"""
Basic test
`log_message` writes only if `enabled` is true:
>>> @log_calls()
... def f():
... f.log_message('Hello, cruel world!')
>>> f()
f <== called by <module>
Hello, cruel world!
f ==> returning to <module>
>>> f.log_calls_settings.enabled = False
>>> f() # (no output)
>>> f.log_calls_settings.enabled = -1
>>> f() # (no output)
Test with recursion and indirect `enabled` values -
each instance of the fn should behave properly,
and should not destroy the behavior of instances further up the call chain.
>>> @log_calls(enabled='_on=', indent=True)
... def rec(level, _on=True):
... if level < 0:
... rec.log_message("Hit bottom")
... return
... rec.log_message("About to call rec(%d, _on=%s)" % (level-1, not _on))
... rec(level-1, _on=(not _on))
... rec.log_message("Returned from rec(%d, _on=%s)" % (level-1, not _on))
>>> rec(3)
rec <== called by <module>
arguments: level=3
defaults: _on=True
About to call rec(2, _on=False)
rec <== called by rec <== rec
arguments: level=1, _on=True
About to call rec(0, _on=False)
rec <== called by rec <== rec
arguments: level=-1, _on=True
Hit bottom
rec ==> returning to rec ==> rec
Returned from rec(0, _on=False)
rec ==> returning to rec ==> rec
Returned from rec(2, _on=False)
rec ==> returning to <module>
NOTE: In the call chains `rec <== called by rec <== rec` (and
similarly for `rec ==> returning to rec ==> rec`), the nearest `rec`,
to the left of "<==", is not enabled, and `log_calls` has chased back
further till it found an *enabled* function it decorated (namely, another
invocation of `rec`, with an odd value for `level`).
"""
pass
#-------------------------------------------------------------------
# Tests/examples of mute setting as an indirect value
#-------------------------------------------------------------------
def test__log_message__indirect_mute():
"""
settings = {'indent': True, 'mute': 'mute_'}
@log_calls(settings=settings)
def f(extra_mute_val=None, **kwargs):
f.log_message("before g", prefix_with_name=True)
g(extra_mute_val=extra_mute_val, **kwargs)
f.log_message("after g", prefix_with_name=True)
@log_calls(settings=settings)
def g(extra_mute_val=None, **kwargs):
g.log_message("before h", prefix_with_name=True)
if extra_mute_val is not None and 'mute_' in kwargs:
kwargs['mute_'] = extra_mute_val
h(**kwargs)
g.log_message("after h", prefix_with_name=True)
@log_calls(settings=settings)
def h(**kwargs):
h.log_message("greetings", prefix_with_name=True)
f(mute_=False)
'''
f <== called by <module>
arguments: [**]kwargs={'mute_': False}
defaults: extra_mute_val=None
f: before g
g <== called by f
arguments: extra_mute_val=None, [**]kwargs={'mute_': False}
g: before h
h <== called by g
arguments: [**]kwargs={'mute_': False}
h: greetings
h ==> returning to g
g: after h
g ==> returning to f
f: after g
f ==> returning to <module>
'''
print('-----------------------')
f(mute_=True) # True == log_calls.MUTE.CALLS
'''
f: before g
g: before h
h: greetings
g: after h
f: after g
'''
print('-----------------------')
f(mute_=True, extra_mute_val=log_calls.MUTE.ALL) # shut up h
'''
f: before g
g: before h
g: after h
f: after g
'''
print('-----------------------')
f(mute_=log_calls.MUTE.ALL)
# (no output)
"""
pass
#-------------------------------------------------------------------
# test__global_mute
#-------------------------------------------------------------------
def test__global_mute():
"""
Mute setting applied for a function's log_calls output
and within a function for log_message and log_expr output
is
`max(`*function's mute setting*`, `*global mute*`)`
### Basic examples/tests
>>> @log_calls()
... def f(): f.log_message("Hi"); g()
>>> @log_calls()
... def g(): g.log_message("Hi")
>>> assert log_calls.mute == False # default
>>> f()
f <== called by <module>
Hi
g <== called by f
Hi
g ==> returning to f
f ==> returning to <module>
>>> log_calls.mute = log_calls.MUTE.CALLS
>>> f()
f: Hi
g: Hi
>>> log_calls.mute = log_calls.MUTE.ALL
>>> f() # (no output)
>>> log_calls.mute = False
>>> g.log_calls_settings.mute = log_calls.MUTE.CALLS
>>> f()
f <== called by <module>
Hi
g: Hi
f ==> returning to <module>
>>> log_calls.mute = log_calls.MUTE.CALLS
>>> g.log_calls_settings.mute = log_calls.MUTE.ALL
>>> f()
f: Hi
### Dynamic examples/tests
Global mute is always checked realtime
>>> @log_calls()
... def f(mute=False):
... f.log_message("before g")
... g(mute=mute)
... f.log_message("after g")
>>> @log_calls()
... def g(mute=False):
... g.log_message("entering g")
... log_calls.mute = mute
... g.log_message("leaving g")
>>> log_calls.mute = False
Calls to f(), with default arg False, in effect turn off global mute midway through g:
>>> f()
f <== called by <module>
arguments: <none>
defaults: mute=False
before g
g <== called by f
arguments: mute=False
entering g
leaving g
g ==> returning to f
after g
f ==> returning to <module>
>>> log_calls.mute = log_calls.MUTE.CALLS
>>> f()
f: before g
g: entering g
leaving g
g ==> returning to f
after g
f ==> returning to <module>
>>> log_calls.mute = log_calls.MUTE.ALL
>>> f()
leaving g
g ==> returning to f
after g
f ==> returning to <module>
>>> log_calls.mute = log_calls.MUTE.CALLS
>>> g.log_calls_settings.mute = log_calls.MUTE.ALL
>>> f()
f: before g
after g
f ==> returning to <module>
>>> log_calls.mute = False
`g` is still completely muted
>>> f(mute=log_calls.MUTE.CALLS)
f <== called by <module>
arguments: mute=True
before g
f: after g
`g` is still completely muted, and `log_calls.mute == log_calls.MUTE.CALLS`
>>> f(mute=log_calls.MUTE.ALL)
f: before g
>>> log_calls.mute = False # restore default!
"""
pass
#-------------------------------------------------------------------
# Tests/examples of log_exprs
#-------------------------------------------------------------------
def test__log_exprs():
"""
>>> import os
>>> import math
>>> @log_calls(mute=True)
... def fn(num_files):
... order_of_magnitude = round(math.log10(num_files), 2)
... fn.log_exprs('num_files', 'order_of_magnitude')
... files_per_CPU = math.ceil(num_files/8)
... username = "Joe Doe"
... fn.log_exprs('files_per_CPU', 'username')
... # ...
... # bad exprs:
... z = []
... fn.log_exprs('16-', 'no_such_variable', '"some bum string', 'z[0]',
... sep='\\n' + (' ' * len('fn: ')))
>>> fn(10000)
fn: num_files = 10000, order_of_magnitude = 4.0
fn: files_per_CPU = 1250, username = 'Joe Doe'
fn: 16- = '<** unexpected EOF while parsing (<string>, line 1) **>'
no_such_variable = "<** name 'no_such_variable' is not defined **>"
"some bum string = '<** EOL while scanning string literal (<string>, line 1) **>'
z[0] = '<** list index out of range **>'
Another:
>>> def f(i):
... return 2.5 * i**3 - 5 * i**2 + 17
>>> @log_calls() # mute=True
... def g(n):
... for i in range(n):
... g.log_exprs('i', 'f(i)')
...
>>> g(5)
g <== called by <module>
arguments: n=5
i = 0, f(i) = 17.0
i = 1, f(i) = 14.5
i = 2, f(i) = 17.0
i = 3, f(i) = 39.5
i = 4, f(i) = 97.0
g ==> returning to <module>
0.3.1
>>> @log_calls()
... def gcd(a, b):
... log_calls.print("At bottom of loop:")
... while b:
... a, b = b, (a % b)
... log_calls.print_exprs('a', 'b', prefix="\\t", suffix= '\\t<--')
... return a
>>> gcd(48, 246) # doctest: +NORMALIZE_WHITESPACE
gcd <== called by <module>
arguments: a=48, b=246
At bottom of loop:
a = 246, b = 48 <--
a = 48, b = 6 <--
a = 6, b = 0 <--
gcd ==> returning to <module>
6
>>> gcd.log_calls_settings.enabled = False
>>> gcd(48, 246)
6
"""
pass
##############################################################################
# end of tests.
##############################################################################
#-----------------------------------------------------------------------------
# For unittest integration
#-----------------------------------------------------------------------------
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite())
return tests
if __name__ == '__main__':
doctest.testmod()
|
stdweird/aquilon | refs/heads/master | lib/python2.6/aquilon/worker/commands/del_interface.py | 2 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq del interface`."""
from aquilon.exceptions_ import ArgumentError
from aquilon.aqdb.model import Chassis, Machine, Switch, Interface
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.dbwrappers.interface import assign_address
from aquilon.worker.templates.machine import PlenaryMachineInfo
class _Goto(Exception):
pass
class CommandDelInterface(BrokerCommand):
required_parameters = []
def render(self, session, logger, interface, machine, switch, chassis, mac,
user, **arguments):
if not (machine or switch or chassis or mac):
raise ArgumentError("Please specify at least one of --chassis, "
"--machine, --switch or --mac.")
if machine:
dbhw_ent = Machine.get_unique(session, machine, compel=True)
elif switch:
dbhw_ent = Switch.get_unique(session, switch, compel=True)
elif chassis:
dbhw_ent = Chassis.get_unique(session, chassis, compel=True)
else:
dbhw_ent = None
dbinterface = Interface.get_unique(session, hardware_entity=dbhw_ent,
name=interface, mac=mac, compel=True)
if not dbhw_ent:
dbhw_ent = dbinterface.hardware_entity
if dbinterface.vlans:
vlans = ", ".join([iface.name for iface in
dbinterface.vlans.values()])
raise ArgumentError("{0} is the parent of the following VLAN "
"interfaces, delete them first: "
"{1}.".format(dbinterface, vlans))
if dbinterface.slaves:
slaves = ", ".join([iface.name for iface in dbinterface.slaves])
raise ArgumentError("{0} is the master of the following slave "
"interfaces, delete them first: "
"{1}.".format(dbinterface, slaves))
try:
for addr in dbinterface.assignments:
if addr.ip != dbhw_ent.primary_ip:
continue
# Special handling: if this interface was created automatically,
# and there is exactly one other interface with no IP address,
# then re-assign the primary address to that interface
if not dbinterface.mac and dbinterface.comments is not None and \
dbinterface.comments.startswith("Created automatically") and \
len(dbhw_ent.interfaces) == 2:
if dbinterface == dbhw_ent.interfaces[0]:
other = dbhw_ent.interfaces[1]
else:
other = dbhw_ent.interfaces[0]
if len(other.assignments) == 0:
assign_address(other, dbhw_ent.primary_ip,
dbhw_ent.primary_name.network)
dbinterface.addresses.remove(dbhw_ent.primary_ip)
raise _Goto
# If this is a machine, it is possible to delete the host to get rid
# of the primary name
if dbhw_ent.hardware_type == "machine":
msg = " You should delete the host first."
else:
msg = ""
raise ArgumentError("{0} holds the primary address of the {1:cl}, "
"therefore it cannot be deleted."
"{2}".format(dbinterface, dbhw_ent, msg))
except _Goto:
pass
addrs = ", ".join(["%s: %s" % (addr.logical_name, addr.ip) for addr in
dbinterface.assignments])
if addrs:
raise ArgumentError("{0} still has the following addresses "
"configured, delete them first: "
"{1}.".format(dbinterface, addrs))
dbhw_ent.interfaces.remove(dbinterface)
session.flush()
if dbhw_ent.hardware_type == 'machine':
plenary_info = PlenaryMachineInfo(dbhw_ent, logger=logger)
plenary_info.write()
return
|
linktlh/Toontown-journey | refs/heads/master | toontown/minigame/TwoDStomper.py | 4 | from direct.showbase.DirectObject import DirectObject
from toontown.toonbase.ToontownGlobals import *
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import *
from toontown.minigame import ToonBlitzGlobals
GOING_UP = 1
GOING_DOWN = 2
STUCK_DOWN = 3
class TwoDStomper(DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('TwoDStomper')
def __init__(self, stomperMgr, index, stomperAttribs, model):
self.game = stomperMgr.section.sectionMgr.game
self.index = index
stomperName = 'stomper-' + str(self.index)
self.model = NodePath(stomperName)
self.nodePath = model.copyTo(self.model)
self.ival = None
self.stashCollisionsIval = None
self.removeHeadFloor = 0
self.stomperState = STUCK_DOWN
self.setupStomper(stomperAttribs)
return
def destroy(self):
self.game = None
self.ignoreAll()
if self.ival:
self.ival.pause()
del self.ival
self.ival = None
if self.smoke:
self.smoke.removeNode()
del self.smoke
self.smoke = None
if self.stashCollisionsIval:
self.stashCollisionsIval.finish()
del self.stashCollisionsIval
self.stashCollisionsIval = None
for collSolid in self.collSolids:
collSolid.stash()
self.nodePath.removeNode()
del self.nodePath
self.model.removeNode()
if self.model:
self.model.removeNode()
del self.model
self.model = None
return
def setupStomper(self, stomperAttribs):
stomperType = stomperAttribs[0]
self.pos = Point3(stomperAttribs[1][0], stomperAttribs[1][1], stomperAttribs[1][2])
self.period = stomperAttribs[2]
typeAttribs = ToonBlitzGlobals.StomperTypes[stomperType]
self.motionType = typeAttribs[0]
self.scale = typeAttribs[1]
self.headStartZ, self.headEndZ = typeAttribs[2]
self.shaftStartScaleZ, self.shaftEndScaleZ = typeAttribs[3]
self.numCollSolids = typeAttribs[4]
self.stompSound = loader.loadSfx('phase_4/audio/sfx/CHQ_FACT_stomper_small.ogg')
self.model.setPos(self.pos)
self.model.setScale(self.scale)
self.model.find('**/block').setScale(1.0 / self.scale)
self.head = self.model.find('**/head')
self.shaft = self.model.find('**/shaft')
self.collisions = self.model.find('**/stomper_collision')
originalColl = self.model.find('**/stomper_collision')
self.range = self.headEndZ - self.headStartZ
self.collSolids = []
self.collSolids.append(originalColl)
for i in xrange(self.numCollSolids - 1):
newColl = originalColl.copyTo(self.model)
self.collSolids.append(newColl)
self.collSolids[-1].reparentTo(self.head)
self.smoke = loader.loadModel('phase_4/models/props/test_clouds')
self.smoke.setZ(self.headEndZ - 1)
self.smoke.setColor(0.8, 0.7, 0.5, 1)
self.smoke.setBillboardPointEye()
self.smoke.setScale(1.0 / self.scale)
self.smoke.setDepthWrite(False)
def getMotionIval(self):
def motionFunc(t, self = self):
stickTime = 0.2
turnaround = 0.95
t = t % 1
if t < stickTime:
self.head.setFluidZ(0 + self.headEndZ)
if self.stomperState != STUCK_DOWN:
self.stomperState = STUCK_DOWN
elif t < turnaround:
self.head.setFluidZ((t - stickTime) * -self.range / (turnaround - stickTime) + self.headEndZ)
if self.stomperState != GOING_UP:
self.stomperState = GOING_UP
elif t > turnaround:
self.head.setFluidZ(-self.range + (t - turnaround) * self.range / (1 - turnaround) + self.headEndZ)
if self.stomperState != GOING_DOWN:
self.stomperState = GOING_DOWN
self.checkSquashedToon()
motionIval = Sequence(LerpFunctionInterval(motionFunc, duration=self.period))
return motionIval
def getSmokeTrack(self):
smokeTrack = Sequence(Parallel(LerpScaleInterval(self.smoke, 0.2, Point3(1, 1, 1.5)), LerpColorScaleInterval(self.smoke, 0.4, VBase4(1, 1, 1, 0), VBase4(1, 1, 1, 0.5))), Func(self.smoke.reparentTo, hidden), Func(self.smoke.clearColorScale))
return smokeTrack
def adjustShaftScale(self, t):
heightDiff = self.head.getZ() - self.headStartZ
self.shaft.setScale(1, 1, self.shaftStartScaleZ + heightDiff * (self.shaftEndScaleZ - self.shaftStartScaleZ) / self.range)
def adjustCollSolidHeight(self, t):
heightDiff = self.head.getZ() - self.headStartZ
for i in xrange(1, len(self.collSolids) - 1):
self.collSolids[i].setZ(heightDiff * i / (self.numCollSolids - 1))
def start(self, elapsedTime):
if self.ival:
self.ival.pause()
del self.ival
self.ival = None
self.ival = Parallel()
self.ival.append(Sequence(self.getMotionIval(), Func(base.playSfx, self.stompSound, node=self.model, volume=0.3), Func(self.smoke.reparentTo, self.model), self.getSmokeTrack()))
self.ival.append(LerpFunctionInterval(self.adjustShaftScale, duration=self.period))
self.ival.append(LerpFunctionInterval(self.adjustCollSolidHeight, duration=self.period))
self.ival.loop()
self.ival.setT(elapsedTime)
return
def enterPause(self):
if self.ival:
self.ival.pause()
def exitPause(self):
if self.ival:
self.ival.loop()
def checkSquashedToon(self):
toonXDiff = (base.localAvatar.getX(render) - self.model.getX(render)) / self.scale
toonZ = base.localAvatar.getZ(render)
headEndZAbs = self.model.getZ(render) + self.headEndZ * self.scale
if toonXDiff > -1.0 and toonXDiff < 1.0 and toonZ > headEndZAbs and toonZ < self.head.getZ(render):
if not base.localAvatar.isStunned:
def stashCollisions(self = self):
for collSolid in self.collSolids:
collSolid.stash()
def unstashCollisions(self = self):
for collSolid in self.collSolids:
collSolid.unstash()
self.stashCollisionsIval = Sequence(Func(stashCollisions), Wait(2.5), Func(unstashCollisions))
self.stashCollisionsIval.start()
self.game.localToonSquished()
|
TathagataChakraborti/resource-conflicts | refs/heads/master | PLANROB-2015/py2.5/lib/python2.5/idlelib/idle.py | 257 | try:
import idlelib.PyShell
except ImportError:
# IDLE is not installed, but maybe PyShell is on sys.path:
try:
import PyShell
except ImportError:
raise
else:
import os
idledir = os.path.dirname(os.path.abspath(PyShell.__file__))
if idledir != os.getcwd():
# We're not in the IDLE directory, help the subprocess find run.py
pypath = os.environ.get('PYTHONPATH', '')
if pypath:
os.environ['PYTHONPATH'] = pypath + ':' + idledir
else:
os.environ['PYTHONPATH'] = idledir
PyShell.main()
else:
idlelib.PyShell.main()
|
cchurch/ansible | refs/heads/devel | test/units/module_utils/xenserver/test_get_object_ref.py | 23 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from .FakeAnsibleModule import FailJsonException
from .common import fake_xenapi_ref
def test_get_object_ref_xenapi_failure(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests catching of XenAPI failures."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', side_effect=XenAPI.Failure('Fake XAPI method call error!'))
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name")
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: Fake XAPI method call error!"
def test_get_object_ref_bad_uuid_and_name(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests failure on bad object uuid and/or name."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request')
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, None, msg_prefix="Test: ")
mocked_xenapi.xenapi_request.assert_not_called()
assert exc_info.value.kwargs['msg'] == "Test: no valid name or UUID supplied for VM!"
def test_get_object_ref_uuid_not_found(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests when object is not found by uuid."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', side_effect=XenAPI.Failure('Fake XAPI not found error!'))
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", uuid="fake-uuid", msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == "Test: VM with UUID 'fake-uuid' not found!"
assert xenserver.get_object_ref(fake_ansible_module, "name", uuid="fake-uuid", fail=False, msg_prefix="Test: ") is None
def test_get_object_ref_name_not_found(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests when object is not found by name."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', return_value=[])
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == "Test: VM with name 'name' not found!"
assert xenserver.get_object_ref(fake_ansible_module, "name", fail=False, msg_prefix="Test: ") is None
def test_get_object_ref_name_multiple_found(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests when multiple objects are found by name."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', return_value=[fake_xenapi_ref('VM'), fake_xenapi_ref('VM')])
error_msg = "Test: multiple VMs with name 'name' found! Please use UUID."
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == error_msg
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", fail=False, msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == error_msg
|
ColOfAbRiX/ansible | refs/heads/devel | lib/ansible/modules/notification/slack.py | 8 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, René Moser <[email protected]>
# (c) 2015, Stefan Berggren <[email protected]>
# (c) 2014, Ramon de la Fuente <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
module: slack
short_description: Send Slack notifications
description:
- The M(slack) module sends notifications to U(http://slack.com) via the Incoming WebHook integration
version_added: "1.6"
author: "Ramon de la Fuente (@ramondelafuente)"
options:
domain:
description:
- Slack (sub)domain for your environment without protocol. (i.e.
C(example.slack.com)) In 1.8 and beyond, this is deprecated and may
be ignored. See token documentation for information.
required: false
default: None
token:
description:
- Slack integration token. This authenticates you to the slack service.
Prior to 1.8, a token looked like C(3Ffe373sfhRE6y42Fg3rvf4GlK). In
1.8 and above, ansible adapts to the new slack API where tokens look
like C(G922VJP24/D921DW937/3Ffe373sfhRE6y42Fg3rvf4GlK). If tokens
are in the new format then slack will ignore any value of domain. If
the token is in the old format the domain is required. Ansible has no
control of when slack will get rid of the old API. When slack does
that the old format will stop working. ** Please keep in mind the tokens
are not the API tokens but are the webhook tokens. In slack these are
found in the webhook URL which are obtained under the apps and integrations.
The incoming webhooks can be added in that area. In some cases this may
be locked by your Slack admin and you must request access. It is there
that the incoming webhooks can be added. The key is on the end of the
URL given to you in that section.
required: true
msg:
description:
- Message to send.
required: false
default: None
channel:
description:
- Channel to send the message to. If absent, the message goes to the channel selected for the I(token).
required: false
default: None
username:
description:
- This is the sender of the message.
required: false
default: "Ansible"
icon_url:
description:
- Url for the message sender's icon (default C(https://www.ansible.com/favicon.ico))
required: false
icon_emoji:
description:
- Emoji for the message sender. See Slack documentation for options.
(if I(icon_emoji) is set, I(icon_url) will not be used)
required: false
default: None
link_names:
description:
- Automatically create links for channels and usernames in I(msg).
required: false
default: 1
choices:
- 1
- 0
parse:
description:
- Setting for the message parser at Slack
required: false
default: None
choices:
- 'full'
- 'none'
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices:
- 'yes'
- 'no'
color:
version_added: "2.0"
description:
- Allow text to use default colors - use the default of 'normal' to not send a custom color bar at the start of the message
required: false
default: 'normal'
choices:
- 'normal'
- 'good'
- 'warning'
- 'danger'
attachments:
description:
- Define a list of attachments. This list mirrors the Slack JSON API. For more information, see https://api.slack.com/docs/attachments
required: false
default: None
"""
EXAMPLES = """
- name: Send notification message via Slack
slack:
token: thetoken/generatedby/slack
msg: '{{ inventory_hostname }} completed'
delegate_to: localhost
- name: Send notification message via Slack all options
slack:
token: thetoken/generatedby/slack
msg: '{{ inventory_hostname }} completed'
channel: #ansible
username: 'Ansible on {{ inventory_hostname }}'
icon_url: http://www.example.com/some-image-file.png
link_names: 0
parse: 'none'
delegate_to: localhost
- name: insert a color bar in front of the message for visibility purposes and use the default webhook icon and name configured in Slack
slack:
token: thetoken/generatedby/slack
msg: '{{ inventory_hostname }} is alive!'
color: good
username: ''
icon_url: ''
- name: Use the attachments API
slack:
token: thetoken/generatedby/slack
attachments:
- text: Display my system load on host A and B
color: #ff00dd
title: System load
fields:
- title: System A
value: load average: 0,74, 0,66, 0,63"
short: True
- title: System B
value: 'load average: 5,16, 4,64, 2,43'
short: True
"""
OLD_SLACK_INCOMING_WEBHOOK = 'https://%s/services/hooks/incoming-webhook?token=%s'
SLACK_INCOMING_WEBHOOK = 'https://hooks.slack.com/services/%s'
# See https://api.slack.com/docs/message-formatting#how_to_escape_characters
# Escaping quotes and apostrophe however is related to how Ansible handles them.
html_escape_table = {
'&': "&",
'>': ">",
'<': "<",
'"': "\"",
"'": "\'",
}
def html_escape(text):
'''Produce entities within text.'''
return "".join(html_escape_table.get(c,c) for c in text)
def build_payload_for_slack(module, text, channel, username, icon_url, icon_emoji, link_names, parse, color, attachments):
payload = {}
if color == "normal" and text is not None:
payload = dict(text=html_escape(text))
elif text is not None:
# With a custom color we have to set the message as attachment, and explicitly turn markdown parsing on for it.
payload = dict(attachments=[dict(text=html_escape(text), color=color, mrkdwn_in=["text"])])
if channel is not None:
if (channel[0] == '#') or (channel[0] == '@'):
payload['channel'] = channel
else:
payload['channel'] = '#'+channel
if username is not None:
payload['username'] = username
if icon_emoji is not None:
payload['icon_emoji'] = icon_emoji
else:
payload['icon_url'] = icon_url
if link_names is not None:
payload['link_names'] = link_names
if parse is not None:
payload['parse'] = parse
if attachments is not None:
if 'attachments' not in payload:
payload['attachments'] = []
if attachments is not None:
keys_to_escape = [
'title',
'text',
'author_name',
'pretext',
'fallback',
]
for attachment in attachments:
for key in keys_to_escape:
if key in attachment:
attachment[key] = html_escape(attachment[key])
if 'fallback' not in attachment:
attachment['fallback'] = attachment['text']
payload['attachments'].append(attachment)
payload=module.jsonify(payload)
return payload
def do_notify_slack(module, domain, token, payload):
if token.count('/') >= 2:
# New style token
slack_incoming_webhook = SLACK_INCOMING_WEBHOOK % (token)
else:
if not domain:
module.fail_json(msg="Slack has updated its webhook API. You need to specify a token of the form XXXX/YYYY/ZZZZ in your playbook")
slack_incoming_webhook = OLD_SLACK_INCOMING_WEBHOOK % (domain, token)
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
response, info = fetch_url(module=module, url=slack_incoming_webhook, headers=headers, method='POST', data=payload)
if info['status'] != 200:
obscured_incoming_webhook = SLACK_INCOMING_WEBHOOK % ('[obscured]')
module.fail_json(msg=" failed to send %s to %s: %s" % (payload, obscured_incoming_webhook, info['msg']))
def main():
module = AnsibleModule(
argument_spec = dict(
domain = dict(type='str', required=False, default=None),
token = dict(type='str', required=True, no_log=True),
msg = dict(type='str', required=False, default=None),
channel = dict(type='str', default=None),
username = dict(type='str', default='Ansible'),
icon_url = dict(type='str', default='https://www.ansible.com/favicon.ico'),
icon_emoji = dict(type='str', default=None),
link_names = dict(type='int', default=1, choices=[0,1]),
parse = dict(type='str', default=None, choices=['none', 'full']),
validate_certs = dict(default='yes', type='bool'),
color = dict(type='str', default='normal', choices=['normal', 'good', 'warning', 'danger']),
attachments = dict(type='list', required=False, default=None)
)
)
domain = module.params['domain']
token = module.params['token']
text = module.params['msg']
channel = module.params['channel']
username = module.params['username']
icon_url = module.params['icon_url']
icon_emoji = module.params['icon_emoji']
link_names = module.params['link_names']
parse = module.params['parse']
color = module.params['color']
attachments = module.params['attachments']
payload = build_payload_for_slack(module, text, channel, username, icon_url, icon_emoji, link_names, parse, color, attachments)
do_notify_slack(module, domain, token, payload)
module.exit_json(msg="OK")
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
dimacus/selenium | refs/heads/master | py/test/selenium/webdriver/common/click_tests.py | 65 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from selenium.webdriver.common.by import By
class ClickTest(unittest.TestCase):
def setUp(self):
self._loadPage("clicks")
def tearDown(self):
self.driver.delete_all_cookies()
def testAddingACookieThatExpiredInThePast(self):
self.driver.find_element(By.ID, "overflowLink").click();
self.assertEqual(self.driver.title, "XHTML Test Page")
def testClickingALinkMadeUpOfNumbersIsHandledCorrectly(self):
self.driver.find_element(By.LINK_TEXT, "333333").click();
self.assertEqual(self.driver.title, "XHTML Test Page")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
|
jackkiej/SickRage | refs/heads/master | lib/rtorrent/err.py | 182 | # Copyright (c) 2013 Chris Lucas, <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from rtorrent.common import convert_version_tuple_to_str
class RTorrentVersionError(Exception):
def __init__(self, min_version, cur_version):
self.min_version = min_version
self.cur_version = cur_version
self.msg = "Minimum version required: {0}".format(
convert_version_tuple_to_str(min_version))
def __str__(self):
return(self.msg)
class MethodError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return(self.msg)
|
Peddle/hue | refs/heads/master | desktop/core/ext-py/tablib-0.10.0/tablib/packages/yaml/dumper.py | 543 |
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
from emitter import *
from serializer import *
from representer import *
from resolver import *
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
SafeRepresenter.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
|
CourseTalk/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/xml_module.py | 5 | import json
import copy
import logging
import os
import sys
from lxml import etree
from xblock.fields import Dict, Scope, ScopeIds
from xblock.runtime import KvsFieldData
from xmodule.x_module import XModuleDescriptor, DEPRECATION_VSCOMPAT_EVENT
from xmodule.modulestore.inheritance import own_metadata, InheritanceKeyValueStore
from xmodule.modulestore import EdxJSONEncoder
import dogstats_wrapper as dog_stats_api
from lxml.etree import (
Element, ElementTree, XMLParser,
)
log = logging.getLogger(__name__)
# assume all XML files are persisted as utf-8.
EDX_XML_PARSER = XMLParser(dtd_validation=False, load_dtd=False,
remove_comments=True, remove_blank_text=True,
encoding='utf-8')
def name_to_pathname(name):
"""
Convert a location name for use in a path: replace ':' with '/'.
This allows users of the xml format to organize content into directories
"""
return name.replace(':', '/')
def is_pointer_tag(xml_obj):
"""
Check if xml_obj is a pointer tag: <blah url_name="something" />.
No children, one attribute named url_name, no text.
Special case for course roots: the pointer is
<course url_name="something" org="myorg" course="course">
xml_obj: an etree Element
Returns a bool.
"""
if xml_obj.tag != "course":
expected_attr = set(['url_name'])
else:
expected_attr = set(['url_name', 'course', 'org'])
actual_attr = set(xml_obj.attrib.keys())
has_text = xml_obj.text is not None and len(xml_obj.text.strip()) > 0
return len(xml_obj) == 0 and actual_attr == expected_attr and not has_text
def serialize_field(value):
"""
Return a string version of the value (where value is the JSON-formatted, internally stored value).
If the value is a string, then we simply return what was passed in.
Otherwise, we return json.dumps on the input value.
"""
if isinstance(value, basestring):
return value
return json.dumps(value, cls=EdxJSONEncoder)
def deserialize_field(field, value):
"""
Deserialize the string version to the value stored internally.
Note that this is not the same as the value returned by from_json, as model types typically store
their value internally as JSON. By default, this method will return the result of calling json.loads
on the supplied value, unless json.loads throws a TypeError, or the type of the value returned by json.loads
is not supported for this class (from_json throws an Error). In either of those cases, this method returns
the input value.
"""
try:
deserialized = json.loads(value)
if deserialized is None:
return deserialized
try:
field.from_json(deserialized)
return deserialized
except (ValueError, TypeError):
# Support older serialized version, which was just a string, not result of json.dumps.
# If the deserialized version cannot be converted to the type (via from_json),
# just return the original value. For example, if a string value of '3.4' was
# stored for a String field (before we started storing the result of json.dumps),
# then it would be deserialized as 3.4, but 3.4 is not supported for a String
# field. Therefore field.from_json(3.4) will throw an Error, and we should
# actually return the original value of '3.4'.
return value
except (ValueError, TypeError):
# Support older serialized version.
return value
class XmlParserMixin(object):
"""
Class containing XML parsing functionality shared between XBlock and XModuleDescriptor.
"""
# Extension to append to filename paths
filename_extension = 'xml'
xml_attributes = Dict(help="Map of unhandled xml attributes, used only for storage between import and export",
default={}, scope=Scope.settings)
# VS[compat]. Backwards compatibility code that can go away after
# importing 2012 courses.
# A set of metadata key conversions that we want to make
metadata_translations = {
'slug': 'url_name',
'name': 'display_name',
}
@classmethod
def _translate(cls, key):
"""
VS[compat]
"""
return cls.metadata_translations.get(key, key)
# The attributes will be removed from the definition xml passed
# to definition_from_xml, and from the xml returned by definition_to_xml
# Note -- url_name isn't in this list because it's handled specially on
# import and export.
metadata_to_strip = ('data_dir',
'tabs', 'grading_policy',
'discussion_blackouts',
# VS[compat] -- remove the below attrs once everything is in the CMS
'course', 'org', 'url_name', 'filename',
# Used for storing xml attributes between import and export, for roundtrips
'xml_attributes')
metadata_to_export_to_policy = ('discussion_topics',)
@staticmethod
def _get_metadata_from_xml(xml_object, remove=True):
"""
Extract the metadata from the XML.
"""
meta = xml_object.find('meta')
if meta is None:
return ''
dmdata = meta.text
if remove:
xml_object.remove(meta)
return dmdata
@classmethod
def definition_from_xml(cls, xml_object, system):
"""
Return the definition to be passed to the newly created descriptor
during from_xml
xml_object: An etree Element
"""
raise NotImplementedError("%s does not implement definition_from_xml" % cls.__name__)
@classmethod
def clean_metadata_from_xml(cls, xml_object):
"""
Remove any attribute named for a field with scope Scope.settings from the supplied
xml_object
"""
for field_name, field in cls.fields.items():
if field.scope == Scope.settings and xml_object.get(field_name) is not None:
del xml_object.attrib[field_name]
@classmethod
def file_to_xml(cls, file_object):
"""
Used when this module wants to parse a file object to xml
that will be converted to the definition.
Returns an lxml Element
"""
return etree.parse(file_object, parser=EDX_XML_PARSER).getroot()
@classmethod
def load_file(cls, filepath, fs, def_id): # pylint: disable=invalid-name
"""
Open the specified file in fs, and call cls.file_to_xml on it,
returning the lxml object.
Add details and reraise on error.
"""
try:
with fs.open(filepath) as xml_file:
return cls.file_to_xml(xml_file)
except Exception as err:
# Add info about where we are, but keep the traceback
msg = 'Unable to load file contents at path %s for item %s: %s ' % (
filepath, def_id, err)
raise Exception, msg, sys.exc_info()[2]
@classmethod
def load_definition(cls, xml_object, system, def_id, id_generator):
"""
Load a descriptor definition from the specified xml_object.
Subclasses should not need to override this except in special
cases (e.g. html module)
Args:
xml_object: an lxml.etree._Element containing the definition to load
system: the modulestore system (aka, runtime) which accesses data and provides access to services
def_id: the definition id for the block--used to compute the usage id and asides ids
id_generator: used to generate the usage_id
"""
# VS[compat] -- the filename attr should go away once everything is
# converted. (note: make sure html files still work once this goes away)
filename = xml_object.get('filename')
if filename is None:
definition_xml = copy.deepcopy(xml_object)
filepath = ''
aside_children = []
else:
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=["location:xmlparser_util_mixin_load_definition_filename"]
)
filepath = cls._format_filepath(xml_object.tag, filename)
# VS[compat]
# TODO (cpennington): If the file doesn't exist at the right path,
# give the class a chance to fix it up. The file will be written out
# again in the correct format. This should go away once the CMS is
# online and has imported all current (fall 2012) courses from xml
if not system.resources_fs.exists(filepath) and hasattr(cls, 'backcompat_paths'):
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=["location:xmlparser_util_mixin_load_definition_backcompat"]
)
candidates = cls.backcompat_paths(filepath)
for candidate in candidates:
if system.resources_fs.exists(candidate):
filepath = candidate
break
definition_xml = cls.load_file(filepath, system.resources_fs, def_id)
usage_id = id_generator.create_usage(def_id)
aside_children = system.parse_asides(definition_xml, def_id, usage_id, id_generator)
# Add the attributes from the pointer node
definition_xml.attrib.update(xml_object.attrib)
definition_metadata = cls._get_metadata_from_xml(definition_xml)
cls.clean_metadata_from_xml(definition_xml)
definition, children = cls.definition_from_xml(definition_xml, system)
if definition_metadata:
definition['definition_metadata'] = definition_metadata
definition['filename'] = [filepath, filename]
if aside_children:
definition['aside_children'] = aside_children
return definition, children
@classmethod
def load_metadata(cls, xml_object):
"""
Read the metadata attributes from this xml_object.
Returns a dictionary {key: value}.
"""
metadata = {'xml_attributes': {}}
for attr, val in xml_object.attrib.iteritems():
# VS[compat]. Remove after all key translations done
attr = cls._translate(attr)
if attr in cls.metadata_to_strip:
if attr in ('course', 'org', 'url_name', 'filename'):
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:xmlparser_util_mixin_load_metadata",
"metadata:{}".format(attr),
)
)
# don't load these
continue
if attr not in cls.fields:
metadata['xml_attributes'][attr] = val
else:
metadata[attr] = deserialize_field(cls.fields[attr], val)
return metadata
@classmethod
def apply_policy(cls, metadata, policy):
"""
Add the keys in policy to metadata, after processing them
through the attrmap. Updates the metadata dict in place.
"""
for attr, value in policy.iteritems():
attr = cls._translate(attr)
if attr not in cls.fields:
# Store unknown attributes coming from policy.json
# in such a way that they will export to xml unchanged
metadata['xml_attributes'][attr] = value
else:
metadata[attr] = value
@classmethod
def parse_xml(cls, node, runtime, keys, id_generator): # pylint: disable=unused-argument
"""
Use `node` to construct a new block.
Arguments:
node (etree.Element): The xml node to parse into an xblock.
runtime (:class:`.Runtime`): The runtime to use while parsing.
keys (:class:`.ScopeIds`): The keys identifying where this block
will store its data.
id_generator (:class:`.IdGenerator`): An object that will allow the
runtime to generate correct definition and usage ids for
children of this block.
Returns (XBlock): The newly parsed XBlock
"""
# VS[compat] -- just have the url_name lookup, once translation is done
url_name = node.get('url_name', node.get('slug'))
def_id = id_generator.create_definition(node.tag, url_name)
usage_id = id_generator.create_usage(def_id)
aside_children = []
# VS[compat] -- detect new-style each-in-a-file mode
if is_pointer_tag(node):
# new style:
# read the actual definition file--named using url_name.replace(':','/')
filepath = cls._format_filepath(node.tag, name_to_pathname(url_name))
definition_xml = cls.load_file(filepath, runtime.resources_fs, def_id)
aside_children = runtime.parse_asides(definition_xml, def_id, usage_id, id_generator)
else:
filepath = None
definition_xml = node
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=["location:xmlparser_util_mixin_parse_xml"]
)
# Note: removes metadata.
definition, children = cls.load_definition(definition_xml, runtime, def_id, id_generator)
# VS[compat] -- make Ike's github preview links work in both old and
# new file layouts
if is_pointer_tag(node):
# new style -- contents actually at filepath
definition['filename'] = [filepath, filepath]
metadata = cls.load_metadata(definition_xml)
# move definition metadata into dict
dmdata = definition.get('definition_metadata', '')
if dmdata:
metadata['definition_metadata_raw'] = dmdata
try:
metadata.update(json.loads(dmdata))
except Exception as err:
log.debug('Error in loading metadata %r', dmdata, exc_info=True)
metadata['definition_metadata_err'] = str(err)
definition_aside_children = definition.pop('aside_children', None)
if definition_aside_children:
aside_children.extend(definition_aside_children)
# Set/override any metadata specified by policy
cls.apply_policy(metadata, runtime.get_policy(usage_id))
field_data = {}
field_data.update(metadata)
field_data.update(definition)
field_data['children'] = children
field_data['xml_attributes']['filename'] = definition.get('filename', ['', None]) # for git link
kvs = InheritanceKeyValueStore(initial_values=field_data)
field_data = KvsFieldData(kvs)
xblock = runtime.construct_xblock_from_class(
cls,
# We're loading a descriptor, so student_id is meaningless
ScopeIds(None, node.tag, def_id, usage_id),
field_data,
)
if aside_children:
asides_tags = [x.tag for x in aside_children]
asides = runtime.get_asides(xblock)
for asd in asides:
if asd.scope_ids.block_type in asides_tags:
xblock.add_aside(asd)
return xblock
@classmethod
def _format_filepath(cls, category, name):
return u'{category}/{name}.{ext}'.format(category=category,
name=name,
ext=cls.filename_extension)
def export_to_file(self):
"""If this returns True, write the definition of this descriptor to a separate
file.
NOTE: Do not override this without a good reason. It is here
specifically for customtag...
"""
return True
def add_xml_to_node(self, node):
"""
For exporting, set data on `node` from ourselves.
"""
# Get the definition
xml_object = self.definition_to_xml(self.runtime.export_fs)
self.clean_metadata_from_xml(xml_object)
# Set the tag on both nodes so we get the file path right.
xml_object.tag = self.category
node.tag = self.category
# Add the non-inherited metadata
for attr in sorted(own_metadata(self)):
# don't want e.g. data_dir
if attr not in self.metadata_to_strip and attr not in self.metadata_to_export_to_policy:
val = serialize_field(self._field_data.get(self, attr))
try:
xml_object.set(attr, val)
except Exception:
logging.exception(
u'Failed to serialize metadata attribute %s with value %s in module %s. This could mean data loss!!!',
attr, val, self.url_name
)
for key, value in self.xml_attributes.items():
if key not in self.metadata_to_strip:
xml_object.set(key, serialize_field(value))
if self.export_to_file():
# Write the definition to a file
url_path = name_to_pathname(self.url_name)
filepath = self._format_filepath(self.category, url_path)
self.runtime.export_fs.makedir(os.path.dirname(filepath), recursive=True, allow_recreate=True)
with self.runtime.export_fs.open(filepath, 'w') as fileobj:
ElementTree(xml_object).write(fileobj, pretty_print=True, encoding='utf-8')
else:
# Write all attributes from xml_object onto node
node.clear()
node.tag = xml_object.tag
node.text = xml_object.text
node.tail = xml_object.tail
node.attrib.update(xml_object.attrib)
node.extend(xml_object)
node.set('url_name', self.url_name)
# Special case for course pointers:
if self.category == 'course':
# add org and course attributes on the pointer tag
node.set('org', self.location.org)
node.set('course', self.location.course)
def definition_to_xml(self, resource_fs):
"""
Return a new etree Element object created from this modules definition.
"""
raise NotImplementedError(
"%s does not implement definition_to_xml" % self.__class__.__name__)
@property
def non_editable_metadata_fields(self):
"""
Return a list of all metadata fields that cannot be edited.
"""
non_editable_fields = super(XmlParserMixin, self).non_editable_metadata_fields
non_editable_fields.append(XmlParserMixin.xml_attributes)
return non_editable_fields
class XmlDescriptor(XmlParserMixin, XModuleDescriptor): # pylint: disable=abstract-method
"""
Mixin class for standardized parsing of XModule xml.
"""
@classmethod
def from_xml(cls, xml_data, system, id_generator):
"""
Creates an instance of this descriptor from the supplied xml_data.
This may be overridden by subclasses.
Args:
xml_data (str): A string of xml that will be translated into data and children
for this module
system (:class:`.XMLParsingSystem):
id_generator (:class:`xblock.runtime.IdGenerator`): Used to generate the
usage_ids and definition_ids when loading this xml
"""
# Shim from from_xml to the parse_xml defined in XmlParserMixin.
# This only exists to satisfy subclasses that both:
# a) define from_xml themselves
# b) call super(..).from_xml(..)
return super(XmlDescriptor, cls).parse_xml(
etree.fromstring(xml_data),
system,
None, # This is ignored by XmlParserMixin
id_generator,
)
@classmethod
def parse_xml(cls, node, runtime, keys, id_generator):
"""
Interpret the parsed XML in `node`, creating an XModuleDescriptor.
"""
if cls.from_xml != XmlDescriptor.from_xml:
# Skip the parse_xml from XmlParserMixin to get the shim parse_xml
# from XModuleDescriptor, which actually calls `from_xml`.
return super(XmlParserMixin, cls).parse_xml(node, runtime, keys, id_generator) # pylint: disable=bad-super-call
else:
return super(XmlDescriptor, cls).parse_xml(node, runtime, keys, id_generator)
def export_to_xml(self, resource_fs):
"""
Returns an xml string representing this module, and all modules
underneath it. May also write required resources out to resource_fs.
Assumes that modules have single parentage (that no module appears twice
in the same course), and that it is thus safe to nest modules as xml
children as appropriate.
The returned XML should be able to be parsed back into an identical
XModuleDescriptor using the from_xml method with the same system, org,
and course
"""
# Shim from export_to_xml to the add_xml_to_node defined in XmlParserMixin.
# This only exists to satisfy subclasses that both:
# a) define export_to_xml themselves
# b) call super(..).export_to_xml(..)
node = Element(self.category)
super(XmlDescriptor, self).add_xml_to_node(node)
return etree.tostring(node)
def add_xml_to_node(self, node):
"""
Export this :class:`XModuleDescriptor` as XML, by setting attributes on the provided
`node`.
"""
if self.export_to_xml != XmlDescriptor.export_to_xml:
# Skip the add_xml_to_node from XmlParserMixin to get the shim add_xml_to_node
# from XModuleDescriptor, which actually calls `export_to_xml`.
super(XmlParserMixin, self).add_xml_to_node(node) # pylint: disable=bad-super-call
else:
super(XmlDescriptor, self).add_xml_to_node(node)
|
zrhans/pythonanywhere | refs/heads/master | .virtualenvs/django19/lib/python3.4/site-packages/pip/_vendor/requests/sessions.py | 439 | # -*- coding: utf-8 -*-
"""
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
from collections import Mapping
from datetime import datetime
from .auth import _basic_auth_str
from .compat import cookielib, OrderedDict, urljoin, urlparse
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from .utils import to_key_val_list, default_headers, to_native_string
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
from .packages.urllib3._collections import RecentlyUsedContainer
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
get_auth_from_url
)
from .status_codes import codes
# formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI
REDIRECT_CACHE_SIZE = 1000
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""
Determines appropriate setting for a given request, taking into account the
explicit setting on that request, and the setting in the session. If a
setting is a dictionary, they will be merged together using `dict_class`
"""
if session_setting is None:
return request_setting
if request_setting is None:
return session_setting
# Bypass if not a dictionary (e.g. verify)
if not (
isinstance(session_setting, Mapping) and
isinstance(request_setting, Mapping)
):
return request_setting
merged_setting = dict_class(to_key_val_list(session_setting))
merged_setting.update(to_key_val_list(request_setting))
# Remove keys that are set to None.
for (k, v) in request_setting.items():
if v is None:
del merged_setting[k]
merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None)
return merged_setting
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
"""
Properly merges both requests and session hooks.
This is necessary because when request_hooks == {'response': []}, the
merge breaks Session hooks entirely.
"""
if session_hooks is None or session_hooks.get('response') == []:
return request_hooks
if request_hooks is None or request_hooks.get('response') == []:
return session_hooks
return merge_setting(request_hooks, session_hooks, dict_class)
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses."""
i = 0
hist = [] # keep track of history
while resp.is_redirect:
prepared_request = req.copy()
if i > 0:
# Update history and keep track of redirects.
hist.append(resp)
new_hist = list(hist)
resp.history = new_hist
try:
resp.content # Consume socket so it can be released
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
resp.raw.read(decode_content=False)
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
method = req.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# The scheme should be lower case...
parsed = urlparse(url)
url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
if not parsed.netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = to_native_string(url)
# Cache the url, unless it redirects to itself.
if resp.is_permanent_redirect and req.url != prepared_request.url:
self.redirect_cache[req.url] = prepared_request.url
# http://tools.ietf.org/html/rfc7231#section-6.4.4
if (resp.status_code == codes.see_other and
method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
# First, turn 302s into GETs.
if resp.status_code == codes.found and method != 'HEAD':
method = 'GET'
# Second, if a POST is responded to with a 301, turn it into a GET.
# This bizarre behaviour is explained in Issue 1704.
if resp.status_code == codes.moved and method == 'POST':
method = 'GET'
prepared_request.method = method
# https://github.com/kennethreitz/requests/issues/1084
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
# Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
# request, use the old one that we haven't yet touched.
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
# Rebuild auth and proxy information.
proxies = self.rebuild_proxies(prepared_request, proxies)
self.rebuild_auth(prepared_request, resp)
# Override the original request.
req = prepared_request
resp = self.send(
req,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
**adapter_kwargs
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
i += 1
yield resp
def rebuild_auth(self, prepared_request, response):
"""
When being redirected we may want to strip authentication from the
request to avoid leaking credentials. This method intelligently removes
and reapplies authentication where possible to avoid credential loss.
"""
headers = prepared_request.headers
url = prepared_request.url
if 'Authorization' in headers:
# If we get redirected to a new host, we should strip out any
# authentication headers.
original_parsed = urlparse(response.request.url)
redirect_parsed = urlparse(url)
if (original_parsed.hostname != redirect_parsed.hostname):
del headers['Authorization']
# .netrc might have more auth for us on our new host.
new_auth = get_netrc_auth(url) if self.trust_env else None
if new_auth is not None:
prepared_request.prepare_auth(new_auth)
return
def rebuild_proxies(self, prepared_request, proxies):
"""
This method re-evaluates the proxy configuration by considering the
environment variables. If we are redirected to a URL covered by
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
proxy keys for this URL (in case they were stripped by a previous
redirect).
This method also replaces the Proxy-Authorization header where
necessary.
"""
headers = prepared_request.headers
url = prepared_request.url
scheme = urlparse(url).scheme
new_proxies = proxies.copy() if proxies is not None else {}
if self.trust_env and not should_bypass_proxies(url):
environ_proxies = get_environ_proxies(url)
proxy = environ_proxies.get(scheme)
if proxy:
new_proxies.setdefault(scheme, environ_proxies[scheme])
if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
try:
username, password = get_auth_from_url(new_proxies[scheme])
except KeyError:
username, password = None, None
if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
return new_proxies
class Session(SessionRedirectMixin):
"""A Requests session.
Provides cookie persistence, connection-pooling, and configuration.
Basic Usage::
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
200
"""
__attrs__ = [
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
'max_redirects',
]
def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each
#: :class:`Request <Request>` sent from this
#: :class:`Session <Session>`.
self.headers = default_headers()
#: Default Authentication tuple or object to attach to
#: :class:`Request <Request>`.
self.auth = None
#: Dictionary mapping protocol to the URL of the proxy (e.g.
#: {'http': 'foo.bar:3128'}) to be used on each
#: :class:`Request <Request>`.
self.proxies = {}
#: Event-handling hooks.
self.hooks = default_hooks()
#: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
#: representing multivalued query parameters.
self.params = {}
#: Stream response content default.
self.stream = False
#: SSL Verification default.
self.verify = True
#: SSL certificate default.
self.cert = None
#: Maximum number of redirects allowed. If the request exceeds this
#: limit, a :class:`TooManyRedirects` exception is raised.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
#: Should we trust the environment?
self.trust_env = True
#: A CookieJar containing all currently outstanding cookies set on this
#: session. By default it is a
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
#: may be any other ``cookielib.CookieJar`` compatible object.
self.cookies = cookiejar_from_dict({})
# Default connection adapters.
self.adapters = OrderedDict()
self.mount('https://', HTTPAdapter())
self.mount('http://', HTTPAdapter())
# Only store 1000 redirects to prevent using infinite memory
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def prepare_request(self, request):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
transmission and returns it. The :class:`PreparedRequest` has settings
merged from the :class:`Request <Request>` instance and those of the
:class:`Session`.
:param request: :class:`Request` instance to prepare with this
session's settings.
"""
cookies = request.cookies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = merge_cookies(
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
# Set environment's basic authentication if not explicitly set.
auth = request.auth
if self.trust_env and not auth and not self.auth:
auth = get_netrc_auth(request.url)
p = PreparedRequest()
p.prepare(
method=request.method.upper(),
url=request.url,
files=request.files,
data=request.data,
json=request.json,
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
cookies=merged_cookies,
hooks=merge_hooks(request.hooks, self.hooks),
)
return p
def request(self, method, url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
json=None):
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the
:class:`Request`.
:param json: (optional) json to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
:param files: (optional) Dictionary of ``'filename': file-like-objects``
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a (`connect timeout, read
timeout <user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Set to True by default.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified.
A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
"""
method = to_native_string(method)
# Create the Request.
req = Request(
method = method.upper(),
url = url,
headers = headers,
files = files,
data = data or {},
json = json,
params = params or {},
auth = auth,
cookies = cookies,
hooks = hooks,
)
prep = self.prepare_request(req)
proxies = proxies or {}
settings = self.merge_environment_settings(
prep.url, proxies, stream, verify, cert
)
# Send the request.
send_kwargs = {
'timeout': timeout,
'allow_redirects': allow_redirects,
}
send_kwargs.update(settings)
resp = self.send(prep, **send_kwargs)
return resp
def get(self, url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('GET', url, **kwargs)
def options(self, url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return self.request('HEAD', url, **kwargs)
def post(self, url, data=None, json=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('POST', url, data=data, json=json, **kwargs)
def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('DELETE', url, **kwargs)
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if not isinstance(request, PreparedRequest):
raise ValueError('You can only send PreparedRequests.')
checked_urls = set()
while request.url in self.redirect_cache:
checked_urls.add(request.url)
new_url = self.redirect_cache.get(request.url)
if new_url in checked_urls:
break
request.url = new_url
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
if r.history:
# If the hooks create history then we want those cookies too
for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, **kwargs)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = history
if not stream:
r.content
return r
def merge_environment_settings(self, url, proxies, stream, verify, cert):
"""Check the environment and merge it with some settings."""
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
# Look for requests environment configuration and be compatible
# with cURL.
if verify is True or verify is None:
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
os.environ.get('CURL_CA_BUNDLE'))
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
return {'verify': verify, 'proxies': proxies, 'stream': stream,
'cert': cert}
def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.lower().startswith(prefix):
return adapter
# Nothing matches :-/
raise InvalidSchema("No connection adapters were found for '%s'" % url)
def close(self):
"""Closes all adapters and as such the session"""
for v in self.adapters.values():
v.close()
def mount(self, prefix, adapter):
"""Registers a connection adapter to a prefix.
Adapters are sorted in descending order by key length."""
self.adapters[prefix] = adapter
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
for key in keys_to_move:
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
state['redirect_cache'] = dict(self.redirect_cache)
return state
def __setstate__(self, state):
redirect_cache = state.pop('redirect_cache', {})
for attr, value in state.items():
setattr(self, attr, value)
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
for redirect, to in redirect_cache.items():
self.redirect_cache[redirect] = to
def session():
"""Returns a :class:`Session` for context-management."""
return Session()
|
kingvuplus/gui_test3 | refs/heads/master | lib/python/Plugins/Extensions/MediaPlayer/settings.py | 23 | from Screens.Screen import Screen
from Screens.HelpMenu import HelpableScreen
from Components.FileList import FileList
from Components.Sources.StaticText import StaticText
from Components.config import config, getConfigListEntry, ConfigSubsection, ConfigText, ConfigYesNo, ConfigDirectory
from Components.ConfigList import ConfigListScreen
from Components.ActionMap import ActionMap
from Components.Pixmap import Pixmap
from Components.Sources.Boolean import Boolean
config.mediaplayer = ConfigSubsection()
config.mediaplayer.repeat = ConfigYesNo(default=False)
config.mediaplayer.savePlaylistOnExit = ConfigYesNo(default=True)
config.mediaplayer.saveDirOnExit = ConfigYesNo(default=False)
config.mediaplayer.defaultDir = ConfigDirectory()
config.mediaplayer.sortPlaylists = ConfigYesNo(default=False)
config.mediaplayer.alwaysHideInfoBar = ConfigYesNo(default=True)
config.mediaplayer.onMainMenu = ConfigYesNo(default=False)
class DirectoryBrowser(Screen, HelpableScreen):
def __init__(self, session, currDir):
Screen.__init__(self, session)
# for the skin: first try MediaPlayerDirectoryBrowser, then FileBrowser, this allows individual skinning
self.skinName = ["MediaPlayerDirectoryBrowser", "FileBrowser" ]
HelpableScreen.__init__(self)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Use"))
self.filelist = FileList(currDir, matchingPattern="")
self["filelist"] = self.filelist
self["FilelistActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"green": self.use,
"red": self.exit,
"ok": self.ok,
"cancel": self.exit
})
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_("Directory browser"))
def ok(self):
if self.filelist.canDescent():
self.filelist.descent()
def use(self):
if self["filelist"].getCurrentDirectory() is not None:
if self.filelist.canDescent() and self["filelist"].getFilename() and len(self["filelist"].getFilename()) > len(self["filelist"].getCurrentDirectory()):
self.filelist.descent()
self.close(self["filelist"].getCurrentDirectory())
else:
self.close(self["filelist"].getFilename())
def exit(self):
self.close(False)
class MediaPlayerSettings(Screen,ConfigListScreen):
def __init__(self, session, parent):
Screen.__init__(self, session)
# for the skin: first try MediaPlayerSettings, then Setup, this allows individual skinning
self.skinName = ["MediaPlayerSettings", "Setup" ]
self.setup_title = _("Edit settings")
self.onChangedEntry = [ ]
self["HelpWindow"] = Pixmap()
self["HelpWindow"].hide()
self["VKeyIcon"] = Boolean(False)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
ConfigListScreen.__init__(self, [], session = session, on_change = self.changedEntry)
self.parent = parent
self.initConfigList()
config.mediaplayer.saveDirOnExit.addNotifier(self.initConfigList)
self["setupActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"green": self.save,
"red": self.cancel,
"cancel": self.cancel,
"ok": self.ok,
}, -2)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(self.setup_title)
def initConfigList(self, element=None):
print "[initConfigList]", element
try:
self.list = []
self.list.append(getConfigListEntry(_("repeat playlist"), config.mediaplayer.repeat))
self.list.append(getConfigListEntry(_("save playlist on exit"), config.mediaplayer.savePlaylistOnExit))
self.list.append(getConfigListEntry(_("save last directory on exit"), config.mediaplayer.saveDirOnExit))
if not config.mediaplayer.saveDirOnExit.value:
self.list.append(getConfigListEntry(_("start directory"), config.mediaplayer.defaultDir))
self.list.append(getConfigListEntry(_("sorting of playlists"), config.mediaplayer.sortPlaylists))
self.list.append(getConfigListEntry(_("Always hide infobar"), config.mediaplayer.alwaysHideInfoBar))
self.list.append(getConfigListEntry(_("show mediaplayer on mainmenu"), config.mediaplayer.onMainMenu))
self["config"].setList(self.list)
except KeyError:
print "keyError"
def changedConfigList(self):
self.initConfigList()
def ok(self):
if self["config"].getCurrent()[1] == config.mediaplayer.defaultDir:
self.session.openWithCallback(self.DirectoryBrowserClosed, DirectoryBrowser, self.parent.filelist.getCurrentDirectory())
def DirectoryBrowserClosed(self, path):
print "PathBrowserClosed:" + str(path)
if path:
config.mediaplayer.defaultDir.setValue(path)
def save(self):
for x in self["config"].list:
x[1].save()
self.close()
def cancel(self):
self.close()
# for summary:
def changedEntry(self):
for x in self.onChangedEntry:
x()
def getCurrentEntry(self):
return self["config"].getCurrent()[0]
def getCurrentValue(self):
return str(self["config"].getCurrent()[1].getText())
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
|
nburn42/tensorflow | refs/heads/master | tensorflow/contrib/distributions/python/kernel_tests/bijectors/gumbel_test.py | 14 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib.distributions.python.ops.bijectors.gumbel import Gumbel
from tensorflow.python.ops.distributions.bijector_test_util import assert_bijective_and_finite
from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency
from tensorflow.python.platform import test
class GumbelBijectorTest(test.TestCase):
"""Tests correctness of the Gumbel bijector."""
def testBijector(self):
with self.test_session():
loc = 0.3
scale = 5.
bijector = Gumbel(loc=loc, scale=scale, validate_args=True)
self.assertEqual("gumbel", bijector.name)
x = np.array([[[-3.], [0.], [0.5], [4.2], [12.]]], dtype=np.float32)
# Gumbel distribution
gumbel_dist = stats.gumbel_r(loc=loc, scale=scale)
y = gumbel_dist.cdf(x).astype(np.float32)
self.assertAllClose(y, bijector.forward(x).eval())
self.assertAllClose(x, bijector.inverse(y).eval())
self.assertAllClose(
np.squeeze(gumbel_dist.logpdf(x), axis=-1),
bijector.forward_log_det_jacobian(x, event_ndims=1).eval())
self.assertAllClose(
-bijector.inverse_log_det_jacobian(y, event_ndims=1).eval(),
bijector.forward_log_det_jacobian(x, event_ndims=1).eval(),
rtol=1e-4,
atol=0.)
def testScalarCongruency(self):
with self.test_session():
assert_scalar_congruency(
Gumbel(loc=0.3, scale=20.), lower_x=1., upper_x=100., rtol=0.02)
def testBijectiveAndFinite(self):
with self.test_session():
bijector = Gumbel(loc=0., scale=3.0, validate_args=True)
x = np.linspace(-10., 10., num=10).astype(np.float32)
y = np.linspace(0.01, 0.99, num=10).astype(np.float32)
assert_bijective_and_finite(bijector, x, y, event_ndims=0, rtol=1e-3)
if __name__ == "__main__":
test.main()
|
victorywang80/Maintenance | refs/heads/master | saltstack/src/salt/modules/apache.py | 1 | # -*- coding: utf-8 -*-
'''
Support for Apache
'''
# Import python libs
import os
import re
import logging
import urllib2
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
'''
Only load the module if apache is installed
'''
cmd = _detect_os()
if salt.utils.which(cmd):
return 'apache'
return False
def _detect_os():
'''
Apache commands and paths differ depending on packaging
'''
# TODO: Add pillar support for the apachectl location
if __grains__['os_family'] == 'RedHat':
return 'apachectl'
elif __grains__['os_family'] == 'Debian':
return 'apache2ctl'
else:
return 'apachectl'
def version():
'''
Return server version from apachectl -v
CLI Example:
.. code-block:: bash
salt '*' apache.version
'''
cmd = '{0} -v'.format(_detect_os())
out = __salt__['cmd.run'](cmd).splitlines()
ret = out[0].split(': ')
return ret[1]
def fullversion():
'''
Return server version from apachectl -V
CLI Example:
.. code-block:: bash
salt '*' apache.fullversion
'''
cmd = '{0} -V'.format(_detect_os())
ret = {}
ret['compiled_with'] = []
out = __salt__['cmd.run'](cmd).splitlines()
# Example
# -D APR_HAS_MMAP
define_re = re.compile(r'^\s+-D\s+')
for line in out:
if ': ' in line:
comps = line.split(': ')
if not comps:
continue
ret[comps[0].strip().lower().replace(' ', '_')] = comps[1].strip()
elif ' -D' in line:
cwith = define_re.sub('', line)
ret['compiled_with'].append(cwith)
return ret
def modules():
'''
Return list of static and shared modules from apachectl -M
CLI Example:
.. code-block:: bash
salt '*' apache.modules
'''
cmd = '{0} -M'.format(_detect_os())
ret = {}
ret['static'] = []
ret['shared'] = []
out = __salt__['cmd.run'](cmd).splitlines()
for line in out:
comps = line.split()
if not comps:
continue
if '(static)' in line:
ret['static'].append(comps[0])
if '(shared)' in line:
ret['shared'].append(comps[0])
return ret
def servermods():
'''
Return list of modules compiled into the server (apachectl -l)
CLI Example:
.. code-block:: bash
salt '*' apache.servermods
'''
cmd = '{0} -l'.format(_detect_os())
ret = []
out = __salt__['cmd.run'](cmd).splitlines()
for line in out:
if not line:
continue
if '.c' in line:
ret.append(line.strip())
return ret
def directives():
'''
Return list of directives together with expected arguments
and places where the directive is valid (``apachectl -L``)
CLI Example:
.. code-block:: bash
salt '*' apache.directives
'''
cmd = '{0} -L'.format(_detect_os())
ret = {}
out = __salt__['cmd.run'](cmd)
out = out.replace('\n\t', '\t')
for line in out.splitlines():
if not line:
continue
comps = line.split('\t')
desc = '\n'.join(comps[1:])
ret[comps[0]] = desc
return ret
def vhosts():
'''
Show the settings as parsed from the config file (currently
only shows the virtualhost settings). (``apachectl -S``)
Because each additional virtual host adds to the execution
time, this command may require a long timeout be specified.
CLI Example:
.. code-block:: bash
salt -t 10 '*' apache.vhosts
'''
cmd = '{0} -S'.format(_detect_os())
ret = {}
namevhost = ''
out = __salt__['cmd.run'](cmd)
for line in out.splitlines():
if not line:
continue
comps = line.split()
if 'is a NameVirtualHost' in line:
namevhost = comps[0]
ret[namevhost] = {}
else:
if comps[0] == 'default':
ret[namevhost]['default'] = {}
ret[namevhost]['default']['vhost'] = comps[2]
ret[namevhost]['default']['conf'] = re.sub(r'\(|\)', '', comps[3])
if comps[0] == 'port':
ret[namevhost][comps[3]] = {}
ret[namevhost][comps[3]]['vhost'] = comps[3]
ret[namevhost][comps[3]]['conf'] = re.sub(r'\(|\)', '', comps[4])
ret[namevhost][comps[3]]['port'] = comps[1]
return ret
def signal(signal=None):
'''
Signals httpd to start, restart, or stop.
CLI Example:
.. code-block:: bash
salt '*' apache.signal restart
'''
no_extra_args = ('configtest', 'status', 'fullstatus')
valid_signals = ('start', 'stop', 'restart', 'graceful', 'graceful-stop')
if signal not in valid_signals and signal not in no_extra_args:
return
# Make sure you use the right arguments
if signal in valid_signals:
arguments = ' -k {0}'.format(signal)
else:
arguments = ' {0}'.format(signal)
cmd = _detect_os() + arguments
out = __salt__['cmd.run_all'](cmd)
# A non-zero return code means fail
if out['retcode'] and out['stderr']:
ret = out['stderr'].strip()
# 'apachectl configtest' returns 'Syntax OK' to stderr
elif out['stderr']:
ret = out['stderr'].strip()
elif out['stdout']:
ret = out['stdout'].strip()
# No output for something like: apachectl graceful
else:
ret = 'Command: "{0}" completed successfully!'.format(cmd)
return ret
def useradd(pwfile, user, password, opts=''):
'''
Add an HTTP user using the htpasswd command. If the htpasswd file does not
exist, it will be created. Valid options that can be passed are:
n Don't update file; display results on stdout.
m Force MD5 encryption of the password (default).
d Force CRYPT encryption of the password.
p Do not encrypt the password (plaintext).
s Force SHA encryption of the password.
CLI Examples:
.. code-block:: bash
salt '*' apache.useradd /etc/httpd/htpasswd larry badpassword
salt '*' apache.useradd /etc/httpd/htpasswd larry badpass opts=ns
'''
return __salt__['webutil.useradd'](pwfile, user, password, opts)
def userdel(pwfile, user):
'''
Delete an HTTP user from the specified htpasswd file.
CLI Examples:
.. code-block:: bash
salt '*' apache.userdel /etc/httpd/htpasswd larry
'''
return __salt__['webutil.userdel'](pwfile, user)
def check_site_enabled(site):
'''
Checks to see if the specific Site symlink is in /etc/apache2/sites-enabled.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.check_site_enabled example.com
'''
if os.path.islink('/etc/apache2/sites-enabled/{0}'.format(site)):
return True
elif (site == 'default' and os.path.islink('/etc/apache2/sites-enabled/000-{0}'.format(site))):
return True
else:
return False
def a2ensite(site):
'''
Runs a2ensite for the given site.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.a2ensite example.com
'''
ret = {}
command = ['a2ensite', site]
try:
status = __salt__['cmd.retcode'](command, python_shell=False)
except Exception as e:
return e
ret['Name'] = 'Apache2 Enable Site'
ret['Site'] = site
if status == 1:
ret['Status'] = 'Site {0} Not found'.format(site)
elif status == 0:
ret['Status'] = 'Site {0} enabled'.format(site)
else:
ret['Status'] = status
return ret
def a2dissite(site):
'''
Runs a2dissite for the given site.
This will only be functional on Debian-based operating systems (Ubuntu,
Mint, etc).
CLI Examples:
.. code-block:: bash
salt '*' apache.a2dissite example.com
'''
ret = {}
command = ['a2dissite', site]
try:
status = __salt__['cmd.retcode'](command, python_shell=False)
except Exception as e:
return e
ret['Name'] = 'Apache2 Disable Site'
ret['Site'] = site
if status == 256:
ret['Status'] = 'Site {0} Not found'.format(site)
elif status == 0:
ret['Status'] = 'Site {0} disabled'.format(site)
else:
ret['Status'] = status
return ret
def server_status(profile='default'):
'''
Get Information from the Apache server-status handler
NOTE:
the server-status handler is disabled by default.
in order for this function to work it needs to be enabled.
http://httpd.apache.org/docs/2.2/mod/mod_status.html
The following configuration needs to exists in pillar/grains
each entry nested in apache.server-status is a profile of a vhost/server
this would give support for multiple apache servers/vhosts
apache.server-status:
'default':
'url': http://localhost/server-status
'user': someuser
'pass': password
'realm': 'authentication realm for digest passwords'
'timeout': 5
CLI Examples:
.. code-block:: bash
salt '*' apache.server_status
salt '*' apache.server_status other-profile
'''
ret = {
'Scoreboard': {
'_': 0,
'S': 0,
'R': 0,
'W': 0,
'K': 0,
'D': 0,
'C': 0,
'L': 0,
'G': 0,
'I': 0,
'.': 0,
},
}
# Get configuration from pillar
url = __salt__['config.get']('apache.server-status:{0}:url'.format(profile), 'http://localhost/server-status')
user = __salt__['config.get']('apache.server-status:{0}:user'.format(profile), '')
passwd = __salt__['config.get']('apache.server-status:{0}:pass'.format(profile), '')
realm = __salt__['config.get']('apache.server-status:{0}:realm'.format(profile), '')
timeout = __salt__['config.get']('apache.server-status:{0}:timeout'.format(profile), 5)
# create authentication handler if configuration exists
if user and passwd:
basic = urllib2.HTTPBasicAuthHandler()
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
digest = urllib2.HTTPDigestAuthHandler()
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
urllib2.install_opener(urllib2.build_opener(basic, digest))
# get http data
url += '?auto'
try:
response = urllib2.urlopen(url, timeout=timeout).read().splitlines()
except urllib2.URLError:
return 'error'
# parse the data
for line in response:
splt = line.split(':', 1)
splt[0] = splt[0].strip()
splt[1] = splt[1].strip()
if splt[0] == 'Scoreboard':
for c in splt[1]:
ret['Scoreboard'][c] += 1
else:
if splt[1].isdigit():
ret[splt[0]] = int(splt[1])
else:
ret[splt[0]] = float(splt[1])
# return the good stuff
return ret
|
jswope00/griffinx | refs/heads/master | lms/djangoapps/django_comment_client/tests/factories.py | 149 | from factory.django import DjangoModelFactory
from django_comment_common.models import Role, Permission
class RoleFactory(DjangoModelFactory):
FACTORY_FOR = Role
name = 'Student'
course_id = 'edX/toy/2012_Fall'
class PermissionFactory(DjangoModelFactory):
FACTORY_FOR = Permission
name = 'create_comment'
|
lchl7890987/WebGL | refs/heads/master | utils/exporters/blender/addons/io_three/exporter/api/object.py | 124 | import math
import mathutils
import bpy
from bpy import data, context, types
from bpy_extras.io_utils import axis_conversion
from .. import constants, logger, utilities, exceptions
from .constants import (
MESH,
EMPTY,
ARMATURE,
LAMP,
SPOT,
SUN,
POINT,
HEMI,
AREA,
CAMERA,
PERSP,
ORTHO,
RENDER,
NO_SHADOW,
ZYX
)
# Blender doesn't seem to have a good way to link a mesh back to the
# objects that are instancing it, or it is bloody obvious and I haven't
# discovered yet. This manifest serves as a way for me to map a mesh
# node to the object nodes that are using it.
_MESH_MAP = {}
def _object(func):
"""
:param func:
"""
def inner(arg, *args, **kwargs):
"""
:param arg:
:param *args:
:param **kwargs:
"""
if isinstance(arg, types.Object):
obj = arg
else:
obj = data.objects[arg]
return func(obj, *args, **kwargs)
return inner
def clear_mesh_map():
"""Clears the mesh map, required on initialization"""
_MESH_MAP.clear()
def assemblies(valid_types, options):
"""
:param valid_types:
:param options:
"""
logger.debug('object.assemblies(%s)', valid_types)
for obj in data.objects:
# rigged assets are parented under armature nodes
if obj.parent and obj.parent.type != ARMATURE:
continue
if obj.parent and obj.parent.type == ARMATURE:
logger.info('Has armature parent %s', obj.name)
if _valid_node(obj, valid_types, options):
yield obj.name
@_object
def cast_shadow(obj):
"""
:param obj:
"""
logger.debug('object.cast_shadow(%s)', obj)
if obj.type == LAMP:
if obj.data.type in (SPOT, SUN):
ret = obj.data.shadow_method != NO_SHADOW
else:
logger.info('%s is a lamp but this lamp type does not '\
'have supported shadows in ThreeJS', obj.name)
ret = None
return ret
elif obj.type == MESH:
mat = material(obj)
if mat:
return data.materials[mat].use_cast_shadows
else:
return False
@_object
def children(obj, valid_types):
"""
:param obj:
:param valid_types:
"""
logger.debug('object.children(%s, %s)', obj, valid_types)
for child in obj.children:
if child.type in valid_types:
yield child.name
@_object
def material(obj):
"""
:param obj:
"""
logger.debug('object.material(%s)', obj)
try:
return obj.material_slots[0].name
except IndexError:
pass
@_object
def mesh(obj, options):
"""
:param obj:
:param options:
"""
logger.debug('object.mesh(%s, %s)', obj, options)
if obj.type != MESH:
return
for mesh_, objects in _MESH_MAP.items():
if obj in objects:
return mesh_
else:
logger.debug('Could not map object, updating manifest')
mesh_ = extract_mesh(obj, options)
if len(mesh_.tessfaces) is not 0:
manifest = _MESH_MAP.setdefault(mesh_.name, [])
manifest.append(obj)
mesh_name = mesh_.name
else:
# possibly just being used as a controller
logger.info('Object %s has no faces', obj.name)
mesh_name = None
return mesh_name
@_object
def name(obj):
"""
:param obj:
"""
return obj.name
@_object
def node_type(obj):
"""
:param obj:
"""
logger.debug('object.node_type(%s)', obj)
# standard transformation nodes are inferred
if obj.type == MESH:
return constants.MESH.title()
elif obj.type == EMPTY:
return constants.OBJECT.title()
dispatch = {
LAMP: {
POINT: constants.POINT_LIGHT,
SUN: constants.DIRECTIONAL_LIGHT,
SPOT: constants.SPOT_LIGHT,
HEMI: constants.HEMISPHERE_LIGHT,
AREA: constants.AREA_LIGHT,
},
CAMERA: {
PERSP: constants.PERSPECTIVE_CAMERA,
ORTHO: constants.ORTHOGRAPHIC_CAMERA
}
}
try:
return dispatch[obj.type][obj.data.type]
except AttributeError:
msg = 'Invalid type: %s' % obj.type
raise exceptions.UnsupportedObjectType(msg)
def nodes(valid_types, options):
"""
:param valid_types:
:param options:
"""
for obj in data.objects:
if _valid_node(obj, valid_types, options):
yield obj.name
@_object
def position(obj, options):
"""
:param obj:
:param options:
"""
logger.debug('object.position(%s)', obj)
vector = matrix(obj, options).to_translation()
return (vector.x, vector.y, vector.z)
@_object
def receive_shadow(obj):
"""
:param obj:
"""
if obj.type == MESH:
mat = material(obj)
if mat:
return data.materials[mat].use_shadows
else:
return False
AXIS_CONVERSION = axis_conversion(to_forward='Z', to_up='Y').to_4x4()
@_object
def matrix(obj, options):
"""
:param obj:
:param options:
"""
logger.debug('object.matrix(%s)', obj)
if options.get(constants.HIERARCHY, False) and obj.parent:
parent_inverted = obj.parent.matrix_world.inverted(mathutils.Matrix())
return parent_inverted * obj.matrix_world
else:
return AXIS_CONVERSION * obj.matrix_world
@_object
def rotation(obj, options):
"""
:param obj:
:param options:
"""
logger.debug('object.rotation(%s)', obj)
vector = matrix(obj, options).to_euler(ZYX)
return (vector.x, vector.y, vector.z)
@_object
def scale(obj, options):
"""
:param obj:
:param options:
"""
logger.debug('object.scale(%s)', obj)
vector = matrix(obj, options).to_scale()
return (vector.x, vector.y, vector.z)
@_object
def select(obj):
"""
:param obj:
"""
obj.select = True
@_object
def unselect(obj):
"""
:param obj:
"""
obj.select = False
@_object
def visible(obj):
"""
:param obj:
"""
logger.debug('object.visible(%s)', obj)
return obj.is_visible(context.scene)
def extract_mesh(obj, options, recalculate=False):
"""
:param obj:
:param options:
:param recalculate: (Default value = False)
"""
logger.debug('object.extract_mesh(%s, %s)', obj, options)
mesh_node = obj.to_mesh(context.scene, True, RENDER)
# transfer the geometry type to the extracted mesh
mesh_node.THREE_geometry_type = obj.data.THREE_geometry_type
# now determine whether or not to export using the geometry type
# set globally from the exporter's options or to use the local
# override on the mesh node itself
opt_buffer = options.get(constants.GEOMETRY_TYPE)
opt_buffer = opt_buffer == constants.BUFFER_GEOMETRY
prop_buffer = mesh_node.THREE_geometry_type == constants.BUFFER_GEOMETRY
# if doing buffer geometry it is imperative to triangulate the mesh
if opt_buffer or prop_buffer:
original_mesh = obj.data
obj.data = mesh_node
logger.debug('swapped %s for %s',
original_mesh.name,
mesh_node.name)
obj.select = True
bpy.context.scene.objects.active = obj
logger.info('Applying triangulation to %s', obj.data.name)
bpy.ops.object.modifier_add(type='TRIANGULATE')
bpy.ops.object.modifier_apply(apply_as='DATA',
modifier='Triangulate')
obj.data = original_mesh
obj.select = False
# recalculate the normals to face outwards, this is usually
# best after applying a modifiers, especialy for something
# like the mirror
if recalculate:
logger.info('Recalculating normals')
original_mesh = obj.data
obj.data = mesh_node
bpy.context.scene.objects.active = obj
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.normals_make_consistent()
bpy.ops.object.editmode_toggle()
obj.data = original_mesh
if not options.get(constants.SCENE):
xrot = mathutils.Matrix.Rotation(-math.pi/2, 4, 'X')
mesh_node.transform(xrot * obj.matrix_world)
# now generate a unique name
index = 0
while True:
if index is 0:
mesh_name = '%sGeometry' % obj.data.name
else:
mesh_name = '%sGeometry.%d' % (obj.data.name, index)
try:
data.meshes[mesh_name]
index += 1
except KeyError:
break
mesh_node.name = mesh_name
mesh_node.update(calc_tessface=True)
mesh_node.calc_normals()
mesh_node.calc_tessface()
scale_ = options.get(constants.SCALE, 1)
mesh_node.transform(mathutils.Matrix.Scale(scale_, 4))
return mesh_node
def objects_using_mesh(mesh_node):
"""
:param mesh_node:
:return: list of object names
"""
logger.debug('object.objects_using_mesh(%s)', mesh_node)
for mesh_name, objects in _MESH_MAP.items():
if mesh_name == mesh_node.name:
return objects
else:
logger.warning('Could not find mesh mapping')
def prep_meshes(options):
"""Prep the mesh nodes. Preperation includes identifying:
- nodes that are on visible layers
- nodes that have export disabled
- nodes that have modifiers that need to be applied
:param options:
"""
logger.debug('object.prep_meshes(%s)', options)
mapping = {}
visible_layers = _visible_scene_layers()
for obj in data.objects:
if obj.type != MESH:
continue
# this is ideal for skipping controller or proxy nodes
# that may apply to a Blender but not a 3js scene
if not _on_visible_layer(obj, visible_layers):
logger.info('%s is not on a visible layer', obj.name)
continue
# if someone really insists on a visible node not being exportable
if not obj.THREE_export:
logger.info('%s export is disabled', obj.name)
continue
# need to apply modifiers before moving on, and before
# handling instancing. it is possible for 2 or more objects
# instance the same mesh but to not all use the same modifiers
# this logic identifies the object with modifiers and extracts
# the mesh making the mesh unique to this particular object
if len(obj.modifiers):
logger.info('%s has modifiers' % obj.name)
mesh_node = extract_mesh(obj, options, recalculate=True)
_MESH_MAP[mesh_node.name] = [obj]
continue
logger.info('adding mesh %s.%s to prep',
obj.name, obj.data.name)
manifest = mapping.setdefault(obj.data.name, [])
manifest.append(obj)
# now associate the extracted mesh node with all the objects
# that are instancing it
for objects in mapping.values():
mesh_node = extract_mesh(objects[0], options)
_MESH_MAP[mesh_node.name] = objects
def extracted_meshes():
"""
:return: names of extracted mesh nodes
"""
logger.debug('object.extracted_meshes()')
return [key for key in _MESH_MAP.keys()]
def _on_visible_layer(obj, visible_layers):
"""
:param obj:
:param visible_layers:
"""
is_visible = False
for index, layer in enumerate(obj.layers):
if layer and index in visible_layers:
is_visible = True
break
if not is_visible:
logger.info('%s is on a hidden layer', obj.name)
return is_visible
def _visible_scene_layers():
"""
:return: list of visiible layer indices
"""
visible_layers = []
for index, layer in enumerate(context.scene.layers):
if layer:
visible_layers.append(index)
return visible_layers
def _valid_node(obj, valid_types, options):
"""
:param obj:
:param valid_types:
:param options:
"""
if obj.type not in valid_types:
return False
# skip objects that are not on visible layers
visible_layers = _visible_scene_layers()
if not _on_visible_layer(obj, visible_layers):
return False
try:
export = obj.THREE_export
except AttributeError:
export = True
if not export:
return False
mesh_node = mesh(obj, options)
is_mesh = obj.type == MESH
# skip objects that a mesh could not be resolved
if is_mesh and not mesh_node:
return False
# secondary test; if a mesh node was resolved but no
# faces are detected then bow out
if is_mesh:
mesh_node = data.meshes[mesh_node]
if len(mesh_node.tessfaces) is 0:
return False
# if we get this far assume that the mesh is valid
return True
|
Subsets and Splits