repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
uselessfire/altaire | libs/xmpp/commands.py | 2 | 14199 | ## $Id: commands.py,v 1.17 2007/08/28 09:54:15 normanr Exp $
## Ad-Hoc Command manager
## Mike Albon (c) 5th January 2005
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
"""This module is a ad-hoc command processor for xmpppy. It uses the plug-in mechanism like most of the core library. It depends on a DISCO browser manager.
There are 3 classes here, a command processor Commands like the Browser, and a command template plugin Command, and an example command.
To use this module:
Instansiate the module with the parent transport and disco browser manager as parameters.
'Plug in' commands using the command template.
The command feature must be added to existing disco replies where neccessary.
What it supplies:
Automatic command registration with the disco browser manager.
Automatic listing of commands in the public command list.
A means of handling requests, by redirection though the command manager.
"""
from protocol import *
from client import PlugIn
class Commands(PlugIn):
"""Commands is an ancestor of PlugIn and can be attached to any session.
The commands class provides a lookup and browse mechnism. It follows the same priciple of the Browser class, for Service Discovery to provide the list of commands, it adds the 'list' disco type to your existing disco handler function.
How it works:
The commands are added into the existing Browser on the correct nodes. When the command list is built the supplied discovery handler function needs to have a 'list' option in type. This then gets enumerated, all results returned as None are ignored.
The command executed is then called using it's Execute method. All session management is handled by the command itself.
"""
def __init__(self, browser):
"""Initialises class and sets up local variables"""
PlugIn.__init__(self)
DBG_LINE='commands'
self._exported_methods=[]
self._handlers={'':{}}
self._browser = browser
def plugin(self, owner):
"""Makes handlers within the session"""
# Plug into the session and the disco manager
# We only need get and set, results are not needed by a service provider, only a service user.
owner.RegisterHandler('iq',self._CommandHandler,typ='set',ns=NS_COMMANDS)
owner.RegisterHandler('iq',self._CommandHandler,typ='get',ns=NS_COMMANDS)
self._browser.setDiscoHandler(self._DiscoHandler,node=NS_COMMANDS,jid='')
def plugout(self):
"""Removes handlers from the session"""
# unPlug from the session and the disco manager
self._owner.UnregisterHandler('iq',self._CommandHandler,ns=NS_COMMANDS)
for jid in self._handlers:
self._browser.delDiscoHandler(self._DiscoHandler,node=NS_COMMANDS)
def _CommandHandler(self,conn,request):
"""The internal method to process the routing of command execution requests"""
# This is the command handler itself.
# We must:
# Pass on command execution to command handler
# (Do we need to keep session details here, or can that be done in the command?)
jid = str(request.getTo())
try:
node = request.getTagAttr('command','node')
except:
conn.send(Error(request,ERR_BAD_REQUEST))
raise NodeProcessed
if self._handlers.has_key(jid):
if self._handlers[jid].has_key(node):
self._handlers[jid][node]['execute'](conn,request)
else:
conn.send(Error(request,ERR_ITEM_NOT_FOUND))
raise NodeProcessed
elif self._handlers[''].has_key(node):
self._handlers[''][node]['execute'](conn,request)
else:
conn.send(Error(request,ERR_ITEM_NOT_FOUND))
raise NodeProcessed
def _DiscoHandler(self,conn,request,typ):
"""The internal method to process service discovery requests"""
# This is the disco manager handler.
if typ == 'items':
# We must:
# Generate a list of commands and return the list
# * This handler does not handle individual commands disco requests.
# Pseudo:
# Enumerate the 'item' disco of each command for the specified jid
# Build responce and send
# To make this code easy to write we add an 'list' disco type, it returns a tuple or 'none' if not advertised
list = []
items = []
jid = str(request.getTo())
# Get specific jid based results
if self._handlers.has_key(jid):
for each in self._handlers[jid].keys():
items.append((jid,each))
else:
# Get generic results
for each in self._handlers[''].keys():
items.append(('',each))
if items != []:
for each in items:
i = self._handlers[each[0]][each[1]]['disco'](conn,request,'list')
if i != None:
list.append(Node(tag='item',attrs={'jid':i[0],'node':i[1],'name':i[2]}))
iq = request.buildReply('result')
if request.getQuerynode(): iq.setQuerynode(request.getQuerynode())
iq.setQueryPayload(list)
conn.send(iq)
else:
conn.send(Error(request,ERR_ITEM_NOT_FOUND))
raise NodeProcessed
elif typ == 'info':
return {'ids':[{'category':'automation','type':'command-list'}],'features':[]}
def addCommand(self,name,cmddisco,cmdexecute,jid=''):
"""The method to call if adding a new command to the session, the requred parameters of cmddisco and cmdexecute are the methods to enable that command to be executed"""
# This command takes a command object and the name of the command for registration
# We must:
# Add item into disco
# Add item into command list
if not self._handlers.has_key(jid):
self._handlers[jid]={}
self._browser.setDiscoHandler(self._DiscoHandler,node=NS_COMMANDS,jid=jid)
if self._handlers[jid].has_key(name):
raise NameError,'Command Exists'
else:
self._handlers[jid][name]={'disco':cmddisco,'execute':cmdexecute}
# Need to add disco stuff here
self._browser.setDiscoHandler(cmddisco,node=name,jid=jid)
def delCommand(self,name,jid=''):
"""Removed command from the session"""
# This command takes a command object and the name used for registration
# We must:
# Remove item from disco
# Remove item from command list
if not self._handlers.has_key(jid):
raise NameError,'Jid not found'
if not self._handlers[jid].has_key(name):
raise NameError, 'Command not found'
else:
#Do disco removal here
command = self.getCommand(name,jid)['disco']
del self._handlers[jid][name]
self._browser.delDiscoHandler(command,node=name,jid=jid)
def getCommand(self,name,jid=''):
"""Returns the command tuple"""
# This gets the command object with name
# We must:
# Return item that matches this name
if not self._handlers.has_key(jid):
raise NameError,'Jid not found'
elif not self._handlers[jid].has_key(name):
raise NameError,'Command not found'
else:
return self._handlers[jid][name]
class Command_Handler_Prototype(PlugIn):
"""This is a prototype command handler, as each command uses a disco method
and execute method you can implement it any way you like, however this is
my first attempt at making a generic handler that you can hang process
stages on too. There is an example command below.
The parameters are as follows:
name : the name of the command within the jabber environment
description : the natural language description
discofeatures : the features supported by the command
initial : the initial command in the from of {'execute':commandname}
All stages set the 'actions' dictionary for each session to represent the possible options available.
"""
name = 'examplecommand'
count = 0
description = 'an example command'
discofeatures = [NS_COMMANDS,NS_DATA]
# This is the command template
def __init__(self,jid=''):
"""Set up the class"""
PlugIn.__init__(self)
DBG_LINE='command'
self.sessioncount = 0
self.sessions = {}
# Disco information for command list pre-formatted as a tuple
self.discoinfo = {'ids':[{'category':'automation','type':'command-node','name':self.description}],'features': self.discofeatures}
self._jid = jid
def plugin(self,owner):
"""Plug command into the commands class"""
# The owner in this instance is the Command Processor
self._commands = owner
self._owner = owner._owner
self._commands.addCommand(self.name,self._DiscoHandler,self.Execute,jid=self._jid)
def plugout(self):
"""Remove command from the commands class"""
self._commands.delCommand(self.name,self._jid)
def getSessionID(self):
"""Returns an id for the command session"""
self.count = self.count+1
return 'cmd-%s-%d'%(self.name,self.count)
def Execute(self,conn,request):
"""The method that handles all the commands, and routes them to the correct method for that stage."""
# New request or old?
try:
session = request.getTagAttr('command','sessionid')
except:
session = None
try:
action = request.getTagAttr('command','action')
except:
action = None
if action == None: action = 'execute'
# Check session is in session list
if self.sessions.has_key(session):
if self.sessions[session]['jid']==request.getFrom():
# Check action is vaild
if self.sessions[session]['actions'].has_key(action):
# Execute next action
self.sessions[session]['actions'][action](conn,request)
else:
# Stage not presented as an option
self._owner.send(Error(request,ERR_BAD_REQUEST))
raise NodeProcessed
else:
# Jid and session don't match. Go away imposter
self._owner.send(Error(request,ERR_BAD_REQUEST))
raise NodeProcessed
elif session != None:
# Not on this sessionid you won't.
self._owner.send(Error(request,ERR_BAD_REQUEST))
raise NodeProcessed
else:
# New session
self.initial[action](conn,request)
def _DiscoHandler(self,conn,request,type):
"""The handler for discovery events"""
if type == 'list':
return (request.getTo(),self.name,self.description)
elif type == 'items':
return []
elif type == 'info':
return self.discoinfo
class TestCommand(Command_Handler_Prototype):
""" Example class. You should read source if you wish to understate how it works.
Generally, it presents a "master" that giudes user through to calculate something.
"""
name = 'testcommand'
description = 'a noddy example command'
def __init__(self,jid=''):
""" Init internal constants. """
Command_Handler_Prototype.__init__(self,jid)
self.initial = {'execute':self.cmdFirstStage}
def cmdFirstStage(self,conn,request):
""" Determine """
# This is the only place this should be repeated as all other stages should have SessionIDs
try:
session = request.getTagAttr('command','sessionid')
except:
session = None
if session == None:
session = self.getSessionID()
self.sessions[session]={'jid':request.getFrom(),'actions':{'cancel':self.cmdCancel,'next':self.cmdSecondStage,'execute':self.cmdSecondStage},'data':{'type':None}}
# As this is the first stage we only send a form
reply = request.buildReply('result')
form = DataForm(title='Select type of operation',data=['Use the combobox to select the type of calculation you would like to do, then click Next',DataField(name='calctype',desc='Calculation Type',value=self.sessions[session]['data']['type'],options=[['circlediameter','Calculate the Diameter of a circle'],['circlearea','Calculate the area of a circle']],typ='list-single',required=1)])
replypayload = [Node('actions',attrs={'execute':'next'},payload=[Node('next')]),form]
reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':session,'status':'executing'},payload=replypayload)
self._owner.send(reply)
raise NodeProcessed
def cmdSecondStage(self,conn,request):
form = DataForm(node = request.getTag(name='command').getTag(name='x',namespace=NS_DATA))
self.sessions[request.getTagAttr('command','sessionid')]['data']['type']=form.getField('calctype').getValue()
self.sessions[request.getTagAttr('command','sessionid')]['actions']={'cancel':self.cmdCancel,None:self.cmdThirdStage,'previous':self.cmdFirstStage,'execute':self.cmdThirdStage,'next':self.cmdThirdStage}
# The form generation is split out to another method as it may be called by cmdThirdStage
self.cmdSecondStageReply(conn,request)
def cmdSecondStageReply(self,conn,request):
reply = request.buildReply('result')
form = DataForm(title = 'Enter the radius', data=['Enter the radius of the circle (numbers only)',DataField(desc='Radius',name='radius',typ='text-single')])
replypayload = [Node('actions',attrs={'execute':'complete'},payload=[Node('complete'),Node('prev')]),form]
reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':request.getTagAttr('command','sessionid'),'status':'executing'},payload=replypayload)
self._owner.send(reply)
raise NodeProcessed
def cmdThirdStage(self,conn,request):
form = DataForm(node = request.getTag(name='command').getTag(name='x',namespace=NS_DATA))
try:
num = float(form.getField('radius').getValue())
except:
self.cmdSecondStageReply(conn,request)
from math import pi
if self.sessions[request.getTagAttr('command','sessionid')]['data']['type'] == 'circlearea':
result = (num**2)*pi
else:
result = num*2*pi
reply = request.buildReply('result')
form = DataForm(typ='result',data=[DataField(desc='result',name='result',value=result)])
reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':request.getTagAttr('command','sessionid'),'status':'completed'},payload=[form])
self._owner.send(reply)
raise NodeProcessed
def cmdCancel(self,conn,request):
reply = request.buildReply('result')
reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':request.getTagAttr('command','sessionid'),'status':'cancelled'})
self._owner.send(reply)
del self.sessions[request.getTagAttr('command','sessionid')]
| apache-2.0 |
egbertbouman/tribler-g | Tribler/Main/Build/Win32/setuptribler.py | 1 | 1721 | # Written by ABC authors and Arno Bakker
# see LICENSE.txt for license information
import sys
import os
try:
import py2exe.mf as modulefinder
import win32com
for p in win32com.__path__[1:]:
modulefinder.AddPackagePath("win32com", p)
for extra in ["win32com.shell"]:
__import__(extra)
m = sys.modules[extra]
for p in m.__path__[1:]:
modulefinder.AddPackagePath(extra, p)
except ImportError:
pass
from distutils.core import setup
import py2exe
################################################################
#
# Setup script used for py2exe
#
# *** Important note: ***
# Setting Python's optimize flag when building disables
# "assert" statements, which are used throughout the
# BitTornado core for error-handling.
#
################################################################
mainfile = os.path.join('Tribler','Main','tribler.py')
progicofile = os.path.join('Tribler','Images','tribler.ico')
target = {
"script": mainfile,
"icon_resources": [(1, progicofile)],
}
# gui panels to include (=those not found by py2exe from imports)
includePanels=[
"TopSearchPanel", "home", "games", "list", "settingsDialog"
]
#packages = ["Tribler.Core","encodings"] + ["Tribler.Main.vwxGUI.%s" % x for x in includePanels]
packages = ["encodings"] + ["Tribler.Main.vwxGUI.%s" % x for x in includePanels]
setup(
# (Disabling bundle_files for now -- apparently causes some issues with Win98)
# options = {"py2exe": {"bundle_files": 1}},
# zipfile = None,
options = {"py2exe": {"packages": packages,"optimize": 2}},
data_files = [("installdir",[])],
windows = [target],
)
| lgpl-2.1 |
BenMotz/cubetoolkit | toolkit/diary/templatetags/noprefix_url.py | 1 | 1089 | import logging
import urllib
from django import template
from django.template.defaulttags import url, URLNode
from django.urls import get_script_prefix
register = template.Library()
logger = logging.getLogger(__name__)
class NoPrefixURLNode(URLNode):
def __init__(self, url_node):
super(NoPrefixURLNode, self).__init__(
url_node.view_name, url_node.args, url_node.kwargs, url_node.asvar)
def render(self, context):
text = super(NoPrefixURLNode, self).render(context)
prefix = get_script_prefix()
parts = urllib.parse.urlsplit(text)
if not parts.path.startswith(prefix):
logger.error("Path %s doesn't start with prefix %s", text, prefix)
new_parts = list(parts)
new_parts[2] = parts.path[len(prefix) - 1:]
return urllib.parse.urlunsplit(new_parts)
@register.tag
def noprefix_url(parser, token):
"""
Returns an absolute URL matching given view with its parameters, with any
path prefix from the WSGI request stripped.
"""
return NoPrefixURLNode(url(parser, token))
| agpl-3.0 |
mozilla/captain | vendor/lib/python/django/core/servers/fastcgi.py | 241 | 6638 | """
FastCGI (or SCGI, or AJP1.3 ...) server that implements the WSGI protocol.
Uses the flup python package: http://www.saddi.com/software/flup/
This is a adaptation of the flup package to add FastCGI server support
to run Django apps from Web servers that support the FastCGI protocol.
This module can be run standalone or from the django-admin / manage.py
scripts using the "runfcgi" directive.
Run with the extra option "help" for a list of additional options you can
pass to this server.
"""
import os
import sys
from django.utils import importlib
__version__ = "0.1"
__all__ = ["runfastcgi"]
FASTCGI_OPTIONS = {
'protocol': 'fcgi',
'host': None,
'port': None,
'socket': None,
'method': 'fork',
'daemonize': None,
'workdir': '/',
'pidfile': None,
'maxspare': 5,
'minspare': 2,
'maxchildren': 50,
'maxrequests': 0,
'debug': None,
'outlog': None,
'errlog': None,
'umask': None,
}
FASTCGI_HELP = r"""
Run this project as a fastcgi (or some other protocol supported
by flup) application. To do this, the flup package from
http://www.saddi.com/software/flup/ is required.
runfcgi [options] [fcgi settings]
Optional Fcgi settings: (setting=value)
protocol=PROTOCOL fcgi, scgi, ajp, ... (default %(protocol)s)
host=HOSTNAME hostname to listen on.
port=PORTNUM port to listen on.
socket=FILE UNIX socket to listen on.
method=IMPL prefork or threaded (default %(method)s).
maxrequests=NUMBER number of requests a child handles before it is
killed and a new child is forked (0 = no limit).
maxspare=NUMBER max number of spare processes / threads (default %(maxspare)s).
minspare=NUMBER min number of spare processes / threads (default %(minspare)s).
maxchildren=NUMBER hard limit number of processes / threads (default %(maxchildren)s).
daemonize=BOOL whether to detach from terminal.
pidfile=FILE write the spawned process-id to this file.
workdir=DIRECTORY change to this directory when daemonizing (default %(workdir)s).
debug=BOOL set to true to enable flup tracebacks.
outlog=FILE write stdout to this file.
errlog=FILE write stderr to this file.
umask=UMASK umask to use when daemonizing, in octal notation (default 022).
Examples:
Run a "standard" fastcgi process on a file-descriptor
(for Web servers which spawn your processes for you)
$ manage.py runfcgi method=threaded
Run a scgi server on a TCP host/port
$ manage.py runfcgi protocol=scgi method=prefork host=127.0.0.1 port=8025
Run a fastcgi server on a UNIX domain socket (posix platforms only)
$ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock
Run a fastCGI as a daemon and write the spawned PID in a file
$ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \
daemonize=true pidfile=/var/run/django-fcgi.pid
""" % FASTCGI_OPTIONS
def fastcgi_help(message=None):
print(FASTCGI_HELP)
if message:
print(message)
return False
def runfastcgi(argset=[], **kwargs):
options = FASTCGI_OPTIONS.copy()
options.update(kwargs)
for x in argset:
if "=" in x:
k, v = x.split('=', 1)
else:
k, v = x, True
options[k.lower()] = v
if "help" in options:
return fastcgi_help()
try:
import flup
except ImportError as e:
sys.stderr.write("ERROR: %s\n" % e)
sys.stderr.write(" Unable to load the flup package. In order to run django\n")
sys.stderr.write(" as a FastCGI application, you will need to get flup from\n")
sys.stderr.write(" http://www.saddi.com/software/flup/ If you've already\n")
sys.stderr.write(" installed flup, then make sure you have it in your PYTHONPATH.\n")
return False
flup_module = 'server.' + options['protocol']
if options['method'] in ('prefork', 'fork'):
wsgi_opts = {
'maxSpare': int(options["maxspare"]),
'minSpare': int(options["minspare"]),
'maxChildren': int(options["maxchildren"]),
'maxRequests': int(options["maxrequests"]),
}
flup_module += '_fork'
elif options['method'] in ('thread', 'threaded'):
wsgi_opts = {
'maxSpare': int(options["maxspare"]),
'minSpare': int(options["minspare"]),
'maxThreads': int(options["maxchildren"]),
}
else:
return fastcgi_help("ERROR: Implementation must be one of prefork or "
"thread.")
wsgi_opts['debug'] = options['debug'] is not None
try:
module = importlib.import_module('.%s' % flup_module, 'flup')
WSGIServer = module.WSGIServer
except Exception:
print("Can't import flup." + flup_module)
return False
# Prep up and go
from django.core.servers.basehttp import get_internal_wsgi_application
if options["host"] and options["port"] and not options["socket"]:
wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
elif options["socket"] and not options["host"] and not options["port"]:
wsgi_opts['bindAddress'] = options["socket"]
elif not options["socket"] and not options["host"] and not options["port"]:
wsgi_opts['bindAddress'] = None
else:
return fastcgi_help("Invalid combination of host, port, socket.")
if options["daemonize"] is None:
# Default to daemonizing if we're running on a socket/named pipe.
daemonize = (wsgi_opts['bindAddress'] is not None)
else:
if options["daemonize"].lower() in ('true', 'yes', 't'):
daemonize = True
elif options["daemonize"].lower() in ('false', 'no', 'f'):
daemonize = False
else:
return fastcgi_help("ERROR: Invalid option for daemonize "
"parameter.")
daemon_kwargs = {}
if options['outlog']:
daemon_kwargs['out_log'] = options['outlog']
if options['errlog']:
daemon_kwargs['err_log'] = options['errlog']
if options['umask']:
daemon_kwargs['umask'] = int(options['umask'], 8)
if daemonize:
from django.utils.daemonize import become_daemon
become_daemon(our_home_dir=options["workdir"], **daemon_kwargs)
if options["pidfile"]:
with open(options["pidfile"], "w") as fp:
fp.write("%d\n" % os.getpid())
WSGIServer(get_internal_wsgi_application(), **wsgi_opts).run()
if __name__ == '__main__':
runfastcgi(sys.argv[1:])
| mpl-2.0 |
rileymcdowell/genomic-neuralnet | genomic_neuralnet/methods/generic_keras_net.py | 1 | 6998 | from __future__ import print_function
import os
import time
import json
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Lambda
from keras.optimizers import Nadam as Trainer
#from keras.optimizers import Adam as Trainer
from keras.regularizers import WeightRegularizer
from keras.callbacks import EarlyStopping, Callback, LearningRateScheduler
from sklearn.preprocessing import MinMaxScaler
from genomic_neuralnet.util import get_is_time_stats, get_should_plot
TIMING_EPOCHS = 12000
class LossHistory(Callback):
def on_train_begin(self, logs={}):
self.losses = []
def on_epoch_end(self, epoch, logs={}):
self.losses.append(logs.get('loss'))
class NeuralNetContainer(object):
def __init__(self):
self.model = None
self.learning_rate = None
self.weight_decay = 0.0
self.dropout_prob = 0.0
self.epochs = 25
self.hidden_layers = (10,)
self.verbose = False
self.plot = False
def clone(self):
if not self.model is None:
raise NotImplemented('Cannot clone container after building model')
clone = NeuralNetContainer()
clone.learning_rate = self.learning_rate
clone.weight_decay = self.weight_decay
clone.dropout_prob = self.dropout_prob
clone.epochs = self.epochs
clone.hidden_layers = self.hidden_layers
clone.verbose = self.verbose
clone.plot = self.plot
return clone
def _build_nn(net_container, n_features):
model = Sequential()
# Change scale from (-1, 1) to (0, 1)
model.add(Lambda(lambda x: (x + 1) / 2, input_shape=(n_features,), output_shape=(n_features,)))
if net_container.weight_decay > 0.0:
weight_regularizer = WeightRegularizer(net_container.weight_decay)
else:
weight_regularizer = None
last_dim = n_features
for lidx, n_nodes in enumerate(net_container.hidden_layers):
# Layer, activation, and dropout, in that order.
model.add(Dense(output_dim=n_nodes, input_dim=last_dim, W_regularizer=weight_regularizer))
model.add(Activation('sigmoid'))
if net_container.dropout_prob > 0.0:
model.add(Dropout(net_container.dropout_prob))
last_dim = n_nodes
model.add(Dense(output_dim=1, input_dim=last_dim, bias=False))
model.add(Activation('linear'))
if not net_container.learning_rate is None:
optimizer = Trainer(lr=net_container.learning_rate)
else:
#optimizer = Trainer(lr=0.0001)
optimizer = Trainer()
model.compile( optimizer=optimizer
, loss='mean_squared_error'
)
net_container.model = model
def _train_net(container, X, y, override_epochs=None, is_check_train=False):
"""
Given a container, X (inputs), and y (outputs) train the network in the container.
* If override_epochs is an integer, just run that many epochs.
* The is_check_train parameter signifies that this training is a quick check to make
sure that the network is properly initialized and that the output error
is decreasing. The best "check trained" network will be passed in again
for an additional full set of training epochs.
"""
model = container.model
epochs = override_epochs if (not override_epochs is None) else container.epochs
verbose = int(container.verbose)
def rate_func(epoch):
if epochs - epoch == 2000:
# Settle down during last 2000 epochs.
model.optimizer.lr.set_value(model.optimizer.lr.get_value()/4.0)
if epochs - epoch == 500:
# Go a bit further in last 500 epochs.
model.optimizer.lr.set_value(model.optimizer.lr.get_value()/4.0)
return float(model.optimizer.lr.get_value())
lr_scheduler = LearningRateScheduler(rate_func)
loss_history = LossHistory()
callbacks = [loss_history, lr_scheduler]
model.fit( X,
y,
nb_epoch=epochs,
batch_size=X.shape[0] / 4,
verbose=verbose,
callbacks=callbacks
)
if (isinstance(override_epochs, int)) and (not is_check_train) and container.plot:
# Plot, but only if this is not overriden epochs.
import matplotlib.pyplot as plt
plt.plot(range(len(loss_history.losses)), loss_history.losses)
plt.show()
return loss_history.losses[-1]
def _predict(container, X):
model = container.model
return model.predict(X)
_NET_TRIES = 2
def _get_initial_net(container, n_features, X, y):
"""
Create a few networks. Start the training process for a few epochs, then take
the best one to continue training. This eliminates networks that are poorly
initialized and will not converge.
"""
candidates = []
for _ in range(_NET_TRIES):
cont = container.clone()
_build_nn(cont, n_features)
candidates.append(cont)
losses = []
for candidate in candidates:
# Train each candidate for 100 epochs.
loss = _train_net(candidate, X, y, override_epochs=100, is_check_train=True)
losses.append(loss)
best_idx = np.argmin(losses)
return candidates[best_idx]
def get_net_prediction( train_data, train_truth, test_data, test_truth
, hidden=(5,), weight_decay=0.0, dropout_prob=0.0
, learning_rate=None, epochs=25, verbose=False
, iter_id=None
):
container = NeuralNetContainer()
container.learning_rate = learning_rate
container.dropout_prob = dropout_prob
container.weight_decay = weight_decay
container.epochs = epochs
container.hidden_layers = hidden
container.verbose = verbose
container.plot = get_should_plot()
mms = MinMaxScaler(feature_range= (-1, 1)) # Scale output from -1 to 1.
train_y = mms.fit_transform(train_truth[:,np.newaxis])
n_features = train_data.shape[1]
collect_time_stats = get_is_time_stats()
if collect_time_stats:
start = time.time()
# Find and return an effectively initialized network to start.
container = _get_initial_net(container, n_features, train_data, train_y)
# Train the network.
if collect_time_stats:
# Train a specific time, never terminating early.
_train_net(container, train_data, train_y, override_epochs=TIMING_EPOCHS, is_check_train=False)
else:
# Normal training, enable all heuristics.
_train_net(container, train_data, train_y)
if collect_time_stats:
end = time.time()
print('Fitting took {} seconds'.format(end - start))
print(json.dumps({'seconds': end - start, 'hidden': container.hidden_layers}))
# Unsupervised (test) dataset.
predicted = _predict(container, test_data)
predicted = mms.inverse_transform(predicted)
return predicted.ravel()
| mit |
eliksir/mailmojo-python-sdk | mailmojo_sdk/models/category.py | 1 | 2937 | # coding: utf-8
"""
MailMojo API
v1 of the MailMojo API # noqa: E501
OpenAPI spec version: 1.1.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Category(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str'
}
attribute_map = {
'name': 'name'
}
def __init__(self, name=None): # noqa: E501
"""Category - a model defined in Swagger""" # noqa: E501
self._name = None
self.discriminator = None
if name is not None:
self.name = name
@property
def name(self):
"""Gets the name of this Category. # noqa: E501
:return: The name of this Category. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Category.
:param name: The name of this Category. # noqa: E501
:type: str
"""
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Category, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Category):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| apache-2.0 |
Mozhuowen/brython | www/src/Lib/test/test_quopri.py | 171 | 7715 | from test import support
import unittest
import sys, os, io, subprocess
import quopri
ENCSAMPLE = b"""\
Here's a bunch of special=20
=A1=A2=A3=A4=A5=A6=A7=A8=A9
=AA=AB=AC=AD=AE=AF=B0=B1=B2=B3
=B4=B5=B6=B7=B8=B9=BA=BB=BC=BD=BE
=BF=C0=C1=C2=C3=C4=C5=C6
=C7=C8=C9=CA=CB=CC=CD=CE=CF
=D0=D1=D2=D3=D4=D5=D6=D7
=D8=D9=DA=DB=DC=DD=DE=DF
=E0=E1=E2=E3=E4=E5=E6=E7
=E8=E9=EA=EB=EC=ED=EE=EF
=F0=F1=F2=F3=F4=F5=F6=F7
=F8=F9=FA=FB=FC=FD=FE=FF
characters... have fun!
"""
# First line ends with a space
DECSAMPLE = b"Here's a bunch of special \n" + \
b"""\
\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9
\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3
\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe
\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6
\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf
\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7
\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf
\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7
\xe8\xe9\xea\xeb\xec\xed\xee\xef
\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7
\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff
characters... have fun!
"""
def withpythonimplementation(testfunc):
def newtest(self):
# Test default implementation
testfunc(self)
# Test Python implementation
if quopri.b2a_qp is not None or quopri.a2b_qp is not None:
oldencode = quopri.b2a_qp
olddecode = quopri.a2b_qp
try:
quopri.b2a_qp = None
quopri.a2b_qp = None
testfunc(self)
finally:
quopri.b2a_qp = oldencode
quopri.a2b_qp = olddecode
newtest.__name__ = testfunc.__name__
return newtest
class QuopriTestCase(unittest.TestCase):
# Each entry is a tuple of (plaintext, encoded string). These strings are
# used in the "quotetabs=0" tests.
STRINGS = (
# Some normal strings
(b'hello', b'hello'),
(b'''hello
there
world''', b'''hello
there
world'''),
(b'''hello
there
world
''', b'''hello
there
world
'''),
(b'\201\202\203', b'=81=82=83'),
# Add some trailing MUST QUOTE strings
(b'hello ', b'hello=20'),
(b'hello\t', b'hello=09'),
# Some long lines. First, a single line of 108 characters
(b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\xd8\xd9\xda\xdb\xdc\xdd\xde\xdfxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
b'''xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx=D8=D9=DA=DB=DC=DD=DE=DFx=
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'''),
# A line of exactly 76 characters, no soft line break should be needed
(b'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy',
b'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'),
# A line of 77 characters, forcing a soft line break at position 75,
# and a second line of exactly 2 characters (because the soft line
# break `=' sign counts against the line length limit).
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz',
b'''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=
zz'''),
# A line of 151 characters, forcing a soft line break at position 75,
# with a second line of exactly 76 characters and no trailing =
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz',
b'''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''),
# A string containing a hard line break, but which the first line is
# 151 characters and the second line is exactly 76 characters. This
# should leave us with three lines, the first which has a soft line
# break, and which the second and third do not.
(b'''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''',
b'''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy=
yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'''),
# Now some really complex stuff ;)
(DECSAMPLE, ENCSAMPLE),
)
# These are used in the "quotetabs=1" tests.
ESTRINGS = (
(b'hello world', b'hello=20world'),
(b'hello\tworld', b'hello=09world'),
)
# These are used in the "header=1" tests.
HSTRINGS = (
(b'hello world', b'hello_world'),
(b'hello_world', b'hello=5Fworld'),
)
@withpythonimplementation
def test_encodestring(self):
for p, e in self.STRINGS:
self.assertEqual(quopri.encodestring(p), e)
@withpythonimplementation
def test_decodestring(self):
for p, e in self.STRINGS:
self.assertEqual(quopri.decodestring(e), p)
@withpythonimplementation
def test_idempotent_string(self):
for p, e in self.STRINGS:
self.assertEqual(quopri.decodestring(quopri.encodestring(e)), e)
@withpythonimplementation
def test_encode(self):
for p, e in self.STRINGS:
infp = io.BytesIO(p)
outfp = io.BytesIO()
quopri.encode(infp, outfp, quotetabs=False)
self.assertEqual(outfp.getvalue(), e)
@withpythonimplementation
def test_decode(self):
for p, e in self.STRINGS:
infp = io.BytesIO(e)
outfp = io.BytesIO()
quopri.decode(infp, outfp)
self.assertEqual(outfp.getvalue(), p)
@withpythonimplementation
def test_embedded_ws(self):
for p, e in self.ESTRINGS:
self.assertEqual(quopri.encodestring(p, quotetabs=True), e)
self.assertEqual(quopri.decodestring(e), p)
@withpythonimplementation
def test_encode_header(self):
for p, e in self.HSTRINGS:
self.assertEqual(quopri.encodestring(p, header=True), e)
@withpythonimplementation
def test_decode_header(self):
for p, e in self.HSTRINGS:
self.assertEqual(quopri.decodestring(e, header=True), p)
def test_scriptencode(self):
(p, e) = self.STRINGS[-1]
process = subprocess.Popen([sys.executable, "-mquopri"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
self.addCleanup(process.stdout.close)
cout, cerr = process.communicate(p)
# On Windows, Python will output the result to stdout using
# CRLF, as the mode of stdout is text mode. To compare this
# with the expected result, we need to do a line-by-line comparison.
cout = cout.decode('latin-1').splitlines()
e = e.decode('latin-1').splitlines()
assert len(cout)==len(e)
for i in range(len(cout)):
self.assertEqual(cout[i], e[i])
self.assertEqual(cout, e)
def test_scriptdecode(self):
(p, e) = self.STRINGS[-1]
process = subprocess.Popen([sys.executable, "-mquopri", "-d"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
self.addCleanup(process.stdout.close)
cout, cerr = process.communicate(e)
cout = cout.decode('latin-1')
p = p.decode('latin-1')
self.assertEqual(cout.splitlines(), p.splitlines())
def test_main():
support.run_unittest(QuopriTestCase)
if __name__ == "__main__":
test_main()
| bsd-3-clause |
linuxdeepin/deepin-media-player | src/widget/window.py | 1 | 5101 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Deepin, Inc.
# 2013 Hailong Qiu
#
# Author: Hailong Qiu <[email protected]>
# Maintainer: Hailong Qiu <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from utils import propagate_expose
import math
import cairo
import gtk
class MenuWindow(gtk.Window):
def __init__(self):
gtk.Window.__init__(self, gtk.WINDOW_POPUP)
self.__init_values()
self.__init_settings()
self.__init_events()
def __init_values(self):
self.on_paint_expose_event = None
self.alpha = 1.0
# 阴影.
self.__sahow_check = True
self.__sahow_value = 2
self.__sahow_color = ("#FFFFFF", 0.5)
#
self.__surface = None
#
self.__old_w, self.__old_h = 0, 0
def get_sahow_value(self):
return self.__sahow_value
def __init_settings(self):
self.set_colormap(gtk.gdk.Screen().get_rgba_colormap())
self.set_decorated(False)
self.set_app_paintable(True)
self.set_skip_pager_hint(True)
self.set_skip_taskbar_hint(True)
self.set_position(gtk.WIN_POS_NONE)
self.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_MENU)
self.set_opacity(self.alpha)
def __init_events(self):
self.add_events(gtk.gdk.ALL_EVENTS_MASK)
self.connect("size-allocate", self.__on_size_allocate)
self.connect("expose-event", self.__expose_event)
self.connect("destroy", lambda w : gtk.main_quit())
def __on_size_allocate(self, widget, alloc):
x, y, w, h = self.allocation
# 防止重复的加载.
if (self.__old_w == w and self.__old_h == h):
return False
self.__surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, w, h)
self.__surface_context = cairo.Context(self.__surface)
self.__compute_shadow(w, h)
#
self.__old_w = w
self.__old_h = h
def __compute_shadow(self, w, h):
#cr = self.__surface_context
x, y = 0, 0
'''
self.on_draw_rectangle(x, y, w, h)
cr.set_source_rgba(*alpha_color_hex_to_cairo((self.__sahow_color)))
cr.fill_preserve()
gaussian_blur(self.__surface, self.__sahow_value)
# 画外边框.
cr.clip()
self.on_draw_rectangle(x, y, w, h)
self.__border_out_color = ("#000000", 1.0)
cr.set_source_rgba(*alpha_color_hex_to_cairo((self.__border_out_color)))
cr.fill_preserve()
# 画内边框.
cr.clip()
self.on_draw_rectangle(x + 0.5, y + 0.5, w, h)
self.__border_out_color = ("#FFFFFF", 0.9)
cr.set_source_rgba(*alpha_color_hex_to_cairo((self.__border_out_color)))
cr.fill_preserve()
'''
def on_draw_rectangle(self, x, y, w, h):
cr = self.__surface_context
radius = 5
x += radius
y += radius
w = w - x * 2
h = h - y * 2
# 集合.
arc_set = [
(x + radius, y + radius, radius, 1 * math.pi, 1.5 * math.pi),
(x + w - radius, y + radius, radius, math.pi * 1.5, math.pi * 2.0),
(x + w - radius, y + h - radius, radius, 0, math.pi * 0.5),
(x + radius, y + h - radius, radius, math.pi * 0.5, math.pi)
]
#
for x, y, r, start, end in arc_set:
cr.arc(x, y, r, start, end)
cr.close_path()
def __expose_event(self, widget, event):
cr = widget.window.cairo_create()
rect = widget.allocation
#
cr.rectangle(*rect)
cr.set_source_rgba(1, 1, 1, 0)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.paint()
#
cr = widget.window.cairo_create()
#
'''
if self.__sahow_check: # 是否显示阴影.
self.draw_surface_expose_event(cr)
else: # 如果不开启阴影.
cr.set_source_rgba(1, 1, 1, 1.0)
cr.paint()
#
'''
if self.on_paint_expose_event:
self.on_paint_expose_event(widget, event)
#
propagate_expose(widget, event)
return True
def draw_surface_expose_event(self, cr):
if self.__surface:
cr.set_source_surface(self.__surface, 0, 0)
cr.paint()
if __name__ == "__main__":
win = MenuWindow()
win.move(300, 300)
win.set_size_request(300, 300)
win.show_all()
gtk.main()
| gpl-3.0 |
grenade/password-store-fork | contrib/importers/kwallet2pass.py | 37 | 3626 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Juhamatti Niemelä <[email protected]>. All Rights Reserved.
# Copyright (C) 2014 Diggory Hardy <[email protected]>. All Rights Reserved.
# This file is licensed under the GPLv2+. Please see COPYING for more information.
import sys
import re
from subprocess import Popen, PIPE
from xml.etree import ElementTree
HEAD = '/passwords/'
def insert_data(path,text):
""" Insert data into the password store.
(1) removes HEAD from path
(2) ensures text ends with a new line and encodes in UTF-8
(3) inserts
"""
global HEAD
if path.startswith(HEAD):
path = path[len(HEAD):]
if not text.endswith('\n'):
text = text + '\n'
text = text.encode('utf8')
#print "Import: " + path + ": " + text
proc = Popen(['pass', 'insert', '--multiline', '--force', path],
stdin=PIPE, stdout=PIPE)
proc.communicate(text)
proc.wait()
def space_to_camelcase(value):
output = ""
first_word_passed = False
for word in value.split(" "):
if not word:
output += "_"
continue
if first_word_passed:
output += word.capitalize()
else:
output += word.lower()
first_word_passed = True
return output
def cleanTitle(title):
# make the title more command line friendly
title = re.sub("(\\|\||\(|\)|/)", "-", title)
title = re.sub("-$", "", title)
title = re.sub("\@", "At", title)
title = re.sub("'", "", title)
return title
def path_for(element, path=''):
""" Generate path name from elements title and current path """
title = cleanTitle(space_to_camelcase(element.attrib['name']))
return '/'.join([path, title])
def unexpected(element, path):
print "Unexpected element: " + path + '/' + element.tag + "\tAttributes: " + str(element.attrib)
def import_map(element, path):
npath = path_for(element, path)
nEntries = 0
text = 'Map'
for child in element:
if child.tag == 'mapentry':
name = child.attrib['name']
text = text + '\n\n' + name + '\n' + child.text
nEntries += 1
for child2 in child:
unexpected(child, path_for(child, npath))
else:
unexpected(child, npath)
insert_data(npath, text)
print "Map " + npath + " [" + str(nEntries) + " entries]"
def import_password(element, path=''):
""" Import new password entry to password-store using pass insert
command """
npath = path_for(element, path)
text = element.text
if text == None:
print "Password " + npath + ": no text"
text = ""
insert_data(npath, text)
for child in element:
unexpected(child, npath)
def import_folder(element, path=''):
""" Import all entries and folders from given folder """
npath = path_for(element, path)
print "Importing folder " + npath
nPasswords = 0
for child in element:
if child.tag == 'folder':
import_folder(child, npath)
elif child.tag == 'password':
import_password(child, npath)
nPasswords += 1
elif child.tag == 'map':
import_map(child, npath)
else:
unexpected(child, npath)
if nPasswords > 0:
print "[" + str(nPasswords) + " passwords]"
def main(xml_file):
""" Parse XML entries from a KWallet """
element = ElementTree.parse(xml_file).getroot()
assert element.tag == 'wallet'
import_folder(element)
if __name__ == '__main__':
main(sys.argv[1])
| gpl-2.0 |
Pistachitos/Sick-Beard | lib/hachoir_parser/image/gif.py | 90 | 8192 | """
GIF picture parser.
Author: Victor Stinner
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet, ParserError,
Enum, UInt8, UInt16,
Bit, Bits, NullBytes,
String, PascalString8, Character,
NullBits, RawBytes)
from lib.hachoir_parser.image.common import PaletteRGB
from lib.hachoir_core.endian import LITTLE_ENDIAN
from lib.hachoir_core.tools import humanDuration
from lib.hachoir_core.text_handler import textHandler, displayHandler, hexadecimal
# Maximum image dimension (in pixel)
MAX_WIDTH = 6000
MAX_HEIGHT = MAX_WIDTH
MAX_FILE_SIZE = 100 * 1024 * 1024
class Image(FieldSet):
def createFields(self):
yield UInt16(self, "left", "Left")
yield UInt16(self, "top", "Top")
yield UInt16(self, "width", "Width")
yield UInt16(self, "height", "Height")
yield Bits(self, "bpp", 3, "Bits / pixel minus one")
yield NullBits(self, "nul", 2)
yield Bit(self, "sorted", "Sorted??")
yield Bit(self, "interlaced", "Interlaced?")
yield Bit(self, "has_local_map", "Use local color map?")
if self["has_local_map"].value:
nb_color = 1 << (1 + self["bpp"].value)
yield PaletteRGB(self, "local_map", nb_color, "Local color map")
yield UInt8(self, "code_size", "LZW Minimum Code Size")
while True:
blen = UInt8(self, "block_len[]", "Block Length")
yield blen
if blen.value != 0:
yield RawBytes(self, "data[]", blen.value, "Image Data")
else:
break
def createDescription(self):
return "Image: %ux%u pixels at (%u,%u)" % (
self["width"].value, self["height"].value,
self["left"].value, self["top"].value)
DISPOSAL_METHOD = {
0: "No disposal specified",
1: "Do not dispose",
2: "Restore to background color",
3: "Restore to previous",
}
NETSCAPE_CODE = {
1: "Loop count",
}
def parseApplicationExtension(parent):
yield PascalString8(parent, "app_name", "Application name")
yield UInt8(parent, "size")
size = parent["size"].value
if parent["app_name"].value == "NETSCAPE2.0" and size == 3:
yield Enum(UInt8(parent, "netscape_code"), NETSCAPE_CODE)
if parent["netscape_code"].value == 1:
yield UInt16(parent, "loop_count")
else:
yield RawBytes(parent, "raw", 2)
else:
yield RawBytes(parent, "raw", size)
yield NullBytes(parent, "terminator", 1, "Terminator (0)")
def parseGraphicControl(parent):
yield UInt8(parent, "size", "Block size (4)")
yield Bit(parent, "has_transp", "Has transparency")
yield Bit(parent, "user_input", "User input")
yield Enum(Bits(parent, "disposal_method", 3), DISPOSAL_METHOD)
yield NullBits(parent, "reserved[]", 3)
if parent["size"].value != 4:
raise ParserError("Invalid graphic control size")
yield displayHandler(UInt16(parent, "delay", "Delay time in millisecond"), humanDuration)
yield UInt8(parent, "transp", "Transparent color index")
yield NullBytes(parent, "terminator", 1, "Terminator (0)")
def parseComments(parent):
while True:
field = PascalString8(parent, "comment[]", strip=" \0\r\n\t")
yield field
if field.length == 0:
break
def parseTextExtension(parent):
yield UInt8(parent, "block_size", "Block Size")
yield UInt16(parent, "left", "Text Grid Left")
yield UInt16(parent, "top", "Text Grid Top")
yield UInt16(parent, "width", "Text Grid Width")
yield UInt16(parent, "height", "Text Grid Height")
yield UInt8(parent, "cell_width", "Character Cell Width")
yield UInt8(parent, "cell_height", "Character Cell Height")
yield UInt8(parent, "fg_color", "Foreground Color Index")
yield UInt8(parent, "bg_color", "Background Color Index")
while True:
field = PascalString8(parent, "comment[]", strip=" \0\r\n\t")
yield field
if field.length == 0:
break
def defaultExtensionParser(parent):
while True:
size = UInt8(parent, "size[]", "Size (in bytes)")
yield size
if 0 < size.value:
yield RawBytes(parent, "content[]", size.value)
else:
break
class Extension(FieldSet):
ext_code = {
0xf9: ("graphic_ctl[]", parseGraphicControl, "Graphic control"),
0xfe: ("comments[]", parseComments, "Comments"),
0xff: ("app_ext[]", parseApplicationExtension, "Application extension"),
0x01: ("text_ext[]", parseTextExtension, "Plain text extension")
}
def __init__(self, *args):
FieldSet.__init__(self, *args)
code = self["code"].value
if code in self.ext_code:
self._name, self.parser, self._description = self.ext_code[code]
else:
self.parser = defaultExtensionParser
def createFields(self):
yield textHandler(UInt8(self, "code", "Extension code"), hexadecimal)
for field in self.parser(self):
yield field
def createDescription(self):
return "Extension: function %s" % self["func"].display
class ScreenDescriptor(FieldSet):
def createFields(self):
yield UInt16(self, "width", "Width")
yield UInt16(self, "height", "Height")
yield Bits(self, "bpp", 3, "Bits per pixel minus one")
yield Bit(self, "reserved", "(reserved)")
yield Bits(self, "color_res", 3, "Color resolution minus one")
yield Bit(self, "global_map", "Has global map?")
yield UInt8(self, "background", "Background color")
yield UInt8(self, "pixel_aspect_ratio", "Pixel Aspect Ratio")
def createDescription(self):
colors = 1 << (self["bpp"].value+1)
return "Screen descriptor: %ux%u pixels %u colors" \
% (self["width"].value, self["height"].value, colors)
class GifFile(Parser):
endian = LITTLE_ENDIAN
separator_name = {
"!": "Extension",
",": "Image",
";": "Terminator"
}
PARSER_TAGS = {
"id": "gif",
"category": "image",
"file_ext": ("gif",),
"mime": (u"image/gif",),
"min_size": (6 + 7 + 1 + 9)*8, # signature + screen + separator + image
"magic": (("GIF87a", 0), ("GIF89a", 0)),
"description": "GIF picture"
}
def validate(self):
if self.stream.readBytes(0, 6) not in ("GIF87a", "GIF89a"):
return "Wrong header"
if self["screen/width"].value == 0 or self["screen/height"].value == 0:
return "Invalid image size"
if MAX_WIDTH < self["screen/width"].value:
return "Image width too big (%u)" % self["screen/width"].value
if MAX_HEIGHT < self["screen/height"].value:
return "Image height too big (%u)" % self["screen/height"].value
return True
def createFields(self):
# Header
yield String(self, "magic", 3, "File magic code", charset="ASCII")
yield String(self, "version", 3, "GIF version", charset="ASCII")
yield ScreenDescriptor(self, "screen")
if self["screen/global_map"].value:
bpp = (self["screen/bpp"].value+1)
yield PaletteRGB(self, "color_map", 1 << bpp, "Color map")
self.color_map = self["color_map"]
else:
self.color_map = None
self.images = []
while True:
code = Enum(Character(self, "separator[]", "Separator code"), self.separator_name)
yield code
code = code.value
if code == "!":
yield Extension(self, "extensions[]")
elif code == ",":
yield Image(self, "image[]")
elif code == ";":
# GIF Terminator
break
else:
raise ParserError("Wrong GIF image separator: 0x%02X" % ord(code))
def createContentSize(self):
field = self["image[0]"]
start = field.absolute_address + field.size
end = start + MAX_FILE_SIZE*8
pos = self.stream.searchBytes("\0;", start, end)
if pos:
return pos + 16
return None
| gpl-3.0 |
floydhub/dockerfiles | dl/tensorflow/tests/1.5.0/mnist_eager.py | 1 | 9530 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A deep MNIST classifier using convolutional layers.
Sample usage:
python mnist.py --help
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import functools
import os
import sys
import time
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from tensorflow.examples.tutorials.mnist import input_data
FLAGS = None
class MNISTModel(tfe.Network):
"""MNIST Network.
Network structure is equivalent to:
https://github.com/tensorflow/tensorflow/blob/r1.5/tensorflow/examples/tutorials/mnist/mnist_deep.py
and
https://github.com/tensorflow/models/blob/master/tutorials/image/mnist/convolutional.py
But written using the tf.layers API.
"""
def __init__(self, data_format):
"""Creates a model for classifying a hand-written digit.
Args:
data_format: Either 'channels_first' or 'channels_last'.
'channels_first' is typically faster on GPUs while 'channels_last' is
typically faster on CPUs. See
https://www.tensorflow.org/performance/performance_guide#data_formats
"""
super(MNISTModel, self).__init__(name='')
if data_format == 'channels_first':
self._input_shape = [-1, 1, 28, 28]
else:
assert data_format == 'channels_last'
self._input_shape = [-1, 28, 28, 1]
self.conv1 = self.track_layer(
tf.layers.Conv2D(32, 5, data_format=data_format, activation=tf.nn.relu))
self.conv2 = self.track_layer(
tf.layers.Conv2D(64, 5, data_format=data_format, activation=tf.nn.relu))
self.fc1 = self.track_layer(tf.layers.Dense(1024, activation=tf.nn.relu))
self.fc2 = self.track_layer(tf.layers.Dense(10))
self.dropout = self.track_layer(tf.layers.Dropout(0.5))
self.max_pool2d = self.track_layer(
tf.layers.MaxPooling2D(
(2, 2), (2, 2), padding='SAME', data_format=data_format))
def call(self, inputs, training):
"""Computes labels from inputs.
Users should invoke __call__ to run the network, which delegates to this
method (and not call this method directly).
Args:
inputs: A batch of images as a Tensor with shape [batch_size, 784].
training: True if invoked in the context of training (causing dropout to
be applied). False otherwise.
Returns:
A Tensor with shape [batch_size, 10] containing the predicted logits
for each image in the batch, for each of the 10 classes.
"""
x = tf.reshape(inputs, self._input_shape)
x = self.conv1(x)
x = self.max_pool2d(x)
x = self.conv2(x)
x = self.max_pool2d(x)
x = tf.layers.flatten(x)
x = self.fc1(x)
if training:
x = self.dropout(x)
x = self.fc2(x)
return x
def loss(predictions, labels):
return tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(
logits=predictions, labels=labels))
def compute_accuracy(predictions, labels):
return tf.reduce_sum(
tf.cast(
tf.equal(
tf.argmax(predictions, axis=1,
output_type=tf.int64),
tf.argmax(labels, axis=1,
output_type=tf.int64)),
dtype=tf.float32)) / float(predictions.shape[0].value)
def train_one_epoch(model, optimizer, dataset, log_interval=None):
"""Trains model on `dataset` using `optimizer`."""
tf.train.get_or_create_global_step()
def model_loss(labels, images):
prediction = model(images, training=True)
loss_value = loss(prediction, labels)
tf.contrib.summary.scalar('loss', loss_value)
tf.contrib.summary.scalar('accuracy',
compute_accuracy(prediction, labels))
return loss_value
for (batch, (images, labels)) in enumerate(tfe.Iterator(dataset)):
with tf.contrib.summary.record_summaries_every_n_global_steps(10):
batch_model_loss = functools.partial(model_loss, labels, images)
optimizer.minimize(
batch_model_loss, global_step=tf.train.get_global_step())
if log_interval and batch % log_interval == 0:
print('Batch #%d\tLoss: %.6f' % (batch, batch_model_loss()))
def test(model, dataset):
"""Perform an evaluation of `model` on the examples from `dataset`."""
avg_loss = tfe.metrics.Mean('loss')
accuracy = tfe.metrics.Accuracy('accuracy')
for (images, labels) in tfe.Iterator(dataset):
predictions = model(images, training=False)
avg_loss(loss(predictions, labels))
accuracy(tf.argmax(predictions, axis=1, output_type=tf.int64),
tf.argmax(labels, axis=1, output_type=tf.int64))
print('Test set: Average loss: %.4f, Accuracy: %4f%%\n' %
(avg_loss.result(), 100 * accuracy.result()))
with tf.contrib.summary.always_record_summaries():
tf.contrib.summary.scalar('loss', avg_loss.result())
tf.contrib.summary.scalar('accuracy', accuracy.result())
def load_data(data_dir):
"""Returns training and test tf.data.Dataset objects."""
data = input_data.read_data_sets(data_dir, one_hot=True)
train_ds = tf.data.Dataset.from_tensor_slices((data.train.images,
data.train.labels))
test_ds = tf.data.Dataset.from_tensors((data.test.images, data.test.labels))
return (train_ds, test_ds)
def main(_):
tfe.enable_eager_execution()
# Log Info
print("-" * 64)
print("TEST INFO - EAGER")
print("-" * 64)
print("TF version:\t {}".format(tf.__version__))
print("Dataset:\t MNIST")
print("Model:\t CNN")
(device, data_format) = ('/gpu:0', 'channels_first')
if FLAGS.no_gpu or tfe.num_gpus() <= 0:
(device, data_format) = ('/cpu:0', 'channels_last')
print('Device:\t {}'.format(device))
if data_format == 'channels_first':
print("Data format:\t NCHW (channel first)")
else:
print("Data format:\t NHWC (channel last)")
print("=" * 64)
# Load the datasets
(train_ds, test_ds) = load_data(FLAGS.data_dir)
train_ds = train_ds.shuffle(60000).batch(FLAGS.batch_size)
# Create the model and optimizer
model = MNISTModel(data_format)
optimizer = tf.train.MomentumOptimizer(FLAGS.lr, FLAGS.momentum)
if FLAGS.output_dir:
train_dir = os.path.join(FLAGS.output_dir, 'train')
test_dir = os.path.join(FLAGS.output_dir, 'eval')
tf.gfile.MakeDirs(FLAGS.output_dir)
else:
train_dir = None
test_dir = None
summary_writer = tf.contrib.summary.create_file_writer(
train_dir, flush_millis=10000)
test_summary_writer = tf.contrib.summary.create_file_writer(
test_dir, flush_millis=10000, name='test')
checkpoint_prefix = os.path.join(FLAGS.checkpoint_dir, 'ckpt')
with tf.device(device):
for epoch in range(1, 6):
with tfe.restore_variables_on_create(
tf.train.latest_checkpoint(FLAGS.checkpoint_dir)):
global_step = tf.train.get_or_create_global_step()
start = time.time()
with summary_writer.as_default():
train_one_epoch(model, optimizer, train_ds, FLAGS.log_interval)
end = time.time()
print('\nTrain time for epoch #%d (global step %d): %f' % (
epoch, global_step.numpy(), end - start))
with test_summary_writer.as_default():
test(model, test_ds)
all_variables = (
model.variables
+ optimizer.variables()
+ [global_step])
tfe.Saver(all_variables).save(
checkpoint_prefix, global_step=global_step)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--data-dir',
type=str,
default='/tmp/tensorflow/mnist/input_data',
help='Directory for storing input data')
parser.add_argument(
'--batch-size',
type=int,
default=100,
metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument(
'--log-interval',
type=int,
default=10,
metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument(
'--output_dir',
type=str,
default=None,
metavar='N',
help='Directory to write TensorBoard summaries')
parser.add_argument(
'--checkpoint_dir',
type=str,
default='/tmp/tensorflow/mnist/checkpoints/',
metavar='N',
help='Directory to save checkpoints in (once per epoch)')
parser.add_argument(
'--lr',
type=float,
default=0.01,
metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument(
'--momentum',
type=float,
default=0.5,
metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument(
'--no-gpu',
action='store_true',
default=False,
help='disables GPU usage even if a GPU is available')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
vefimova/rally | tests/unit/plugins/openstack/scenarios/nova/test_utils.py | 1 | 41162 | # Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from rally import exceptions as rally_exceptions
from rally.plugins.openstack.scenarios.nova import utils
from tests.unit import fakes
from tests.unit import test
BM_UTILS = "rally.task.utils"
NOVA_UTILS = "rally.plugins.openstack.scenarios.nova.utils"
CONF = cfg.CONF
class NovaScenarioTestCase(test.ScenarioTestCase):
def setUp(self):
super(NovaScenarioTestCase, self).setUp()
self.server = mock.Mock()
self.server1 = mock.Mock()
self.volume = mock.Mock()
self.floating_ip = mock.Mock()
self.image = mock.Mock()
self.keypair = mock.Mock()
def test__list_servers(self):
servers_list = []
self.clients("nova").servers.list.return_value = servers_list
nova_scenario = utils.NovaScenario()
return_servers_list = nova_scenario._list_servers(True)
self.assertEqual(servers_list, return_servers_list)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.list_servers")
@mock.patch(NOVA_UTILS + ".NovaScenario._generate_random_name",
return_value="foo_server_name")
def test__boot_server(self, mock__generate_random_name):
self.clients("nova").servers.create.return_value = self.server
nova_scenario = utils.NovaScenario(context={})
return_server = nova_scenario._boot_server("image_id",
"flavor_id")
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self.assertEqual(self.mock_wait_for.mock.return_value, return_server)
self.clients("nova").servers.create.assert_called_once_with(
"foo_server_name", "image_id", "flavor_id")
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.boot_server")
@mock.patch(NOVA_UTILS + ".NovaScenario._generate_random_name",
return_value="foo_server_name")
def test__boot_server_with_network(self, mock__generate_random_name):
self.clients("nova").servers.create.return_value = self.server
networks = [{"id": "foo_id", "external": False},
{"id": "bar_id", "external": False}]
self.clients("nova").networks.list.return_value = networks
nova_scenario = utils.NovaScenario(context={
"iteration": 3,
"config": {"users": {"tenants": 2}},
"tenant": {"networks": networks}})
return_server = nova_scenario._boot_server("image_id",
"flavor_id",
auto_assign_nic=True)
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self.clients("nova").servers.create.assert_called_once_with(
"foo_server_name", "image_id", "flavor_id",
nics=[{"net-id": "bar_id"}])
self.assertEqual(self.mock_wait_for.mock.return_value, return_server)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.boot_server")
def test__boot_server_with_network_exception(self):
self.clients("nova").servers.create.return_value = self.server
nova_scenario = utils.NovaScenario(
context={"tenant": {"networks": None}})
self.assertRaises(TypeError, nova_scenario._boot_server,
"image_id", "flavor_id",
auto_assign_nic=True)
@mock.patch(NOVA_UTILS + ".NovaScenario._generate_random_name",
return_value="foo_server_name")
def test__boot_server_with_ssh(self, mock__generate_random_name):
self.clients("nova").servers.create.return_value = self.server
nova_scenario = utils.NovaScenario(context={
"user": {
"secgroup": {"name": "test"},
"endpoint": mock.MagicMock()
}}
)
return_server = nova_scenario._boot_server("image_id", "flavor_id")
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self.assertEqual(self.mock_wait_for.mock.return_value, return_server)
self.clients("nova").servers.create.assert_called_once_with(
"foo_server_name", "image_id", "flavor_id",
security_groups=["test"])
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.boot_server")
@mock.patch(NOVA_UTILS + ".NovaScenario._generate_random_name",
return_value="foo_server_name")
def test__boot_server_with_sec_group(self, mock__generate_random_name):
self.clients("nova").servers.create.return_value = self.server
nova_scenario = utils.NovaScenario(context={
"user": {
"secgroup": {"name": "new"},
"endpoint": mock.MagicMock()
}
})
return_server = nova_scenario._boot_server(
"image_id", "flavor_id",
security_groups=["test"])
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self.assertEqual(self.mock_wait_for.mock.return_value, return_server)
self.clients("nova").servers.create.assert_called_once_with(
"foo_server_name", "image_id", "flavor_id",
security_groups=["test", "new"])
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.boot_server")
@mock.patch(NOVA_UTILS + ".NovaScenario._generate_random_name",
return_value="foo_server_name")
def test__boot_server_with_similar_sec_group(self,
mock__generate_random_name):
self.clients("nova").servers.create.return_value = self.server
nova_scenario = utils.NovaScenario(context={
"user": {
"secgroup": {"name": "test1"},
"endpoint": mock.MagicMock()
}}
)
return_server = nova_scenario._boot_server(
"image_id", "flavor_id",
security_groups=["test1"])
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self.assertEqual(self.mock_wait_for.mock.return_value, return_server)
self.clients("nova").servers.create.assert_called_once_with(
"foo_server_name", "image_id", "flavor_id",
security_groups=["test1"])
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.boot_server")
def test__suspend_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._suspend_server(self.server)
self.server.suspend.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_suspend_poll_interval,
timeout=CONF.benchmark.nova_server_suspend_timeout)
self.mock_resource_is.mock.assert_called_once_with("SUSPENDED")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.suspend_server")
def test__resume_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._resume_server(self.server)
self.server.resume.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_resume_poll_interval,
timeout=CONF.benchmark.nova_server_resume_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.resume_server")
def test__pause_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._pause_server(self.server)
self.server.pause.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_pause_poll_interval,
timeout=CONF.benchmark.nova_server_pause_timeout)
self.mock_resource_is.mock.assert_called_once_with("PAUSED")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.pause_server")
def test__unpause_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._unpause_server(self.server)
self.server.unpause.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_unpause_poll_interval,
timeout=CONF.benchmark.nova_server_unpause_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.unpause_server")
def test__shelve_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._shelve_server(self.server)
self.server.shelve.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_shelve_poll_interval,
timeout=CONF.benchmark.nova_server_shelve_timeout)
self.mock_resource_is.mock.assert_called_once_with("SHELVED_OFFLOADED")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.shelve_server")
def test__unshelve_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._unshelve_server(self.server)
self.server.unshelve.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_unshelve_poll_interval,
timeout=CONF.benchmark.nova_server_unshelve_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.unshelve_server")
def test__create_image(self):
self.clients("nova").images.get.return_value = self.image
nova_scenario = utils.NovaScenario()
return_image = nova_scenario._create_image(self.server)
self.mock_wait_for.mock.assert_called_once_with(
self.image,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.
nova_server_image_create_poll_interval,
timeout=CONF.benchmark.nova_server_image_create_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self.assertEqual(self.mock_wait_for.mock.return_value, return_image)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.create_image")
def test__default_delete_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._delete_server(self.server)
self.server.delete.assert_called_once_with()
self.mock_wait_for_delete.mock.assert_called_once_with(
self.server,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_delete_poll_interval,
timeout=CONF.benchmark.nova_server_delete_timeout)
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.delete_server")
def test__force_delete_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._delete_server(self.server, force=True)
self.server.force_delete.assert_called_once_with()
self.mock_wait_for_delete.mock.assert_called_once_with(
self.server,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_delete_poll_interval,
timeout=CONF.benchmark.nova_server_delete_timeout)
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.force_delete_server")
def test__reboot_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._reboot_server(self.server)
self.server.reboot.assert_called_once_with(reboot_type="HARD")
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_reboot_poll_interval,
timeout=CONF.benchmark.nova_server_reboot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.reboot_server")
def test__soft_reboot_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._soft_reboot_server(self.server)
self.server.reboot.assert_called_once_with(reboot_type="SOFT")
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_reboot_poll_interval,
timeout=CONF.benchmark.nova_server_reboot_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.soft_reboot_server")
def test__rebuild_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._rebuild_server(self.server, "img", fakearg="fakearg")
self.server.rebuild.assert_called_once_with("img", fakearg="fakearg")
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_rebuild_poll_interval,
timeout=CONF.benchmark.nova_server_rebuild_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.rebuild_server")
def test__start_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._start_server(self.server)
self.server.start.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_start_poll_interval,
timeout=CONF.benchmark.nova_server_start_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.start_server")
def test__stop_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._stop_server(self.server)
self.server.stop.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_stop_poll_interval,
timeout=CONF.benchmark.nova_server_stop_timeout)
self.mock_resource_is.mock.assert_called_once_with("SHUTOFF")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.stop_server")
def test__rescue_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._rescue_server(self.server)
self.server.rescue.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_rescue_poll_interval,
timeout=CONF.benchmark.nova_server_rescue_timeout)
self.mock_resource_is.mock.assert_called_once_with("RESCUE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.rescue_server")
def test__unrescue_server(self):
nova_scenario = utils.NovaScenario()
nova_scenario._unrescue_server(self.server)
self.server.unrescue.assert_called_once_with()
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_unrescue_poll_interval,
timeout=CONF.benchmark.nova_server_unrescue_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.unrescue_server")
def _test_delete_servers(self, force=False):
servers = [self.server, self.server1]
nova_scenario = utils.NovaScenario()
nova_scenario._delete_servers(servers, force=force)
check_interval = CONF.benchmark.nova_server_delete_poll_interval
expected = []
for server in servers:
expected.append(mock.call(
server,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=check_interval,
timeout=CONF.benchmark.nova_server_delete_timeout))
if force:
server.force_delete.assert_called_once_with()
self.assertFalse(server.delete.called)
else:
server.delete.assert_called_once_with()
self.assertFalse(server.force_delete.called)
self.mock_wait_for_delete.mock.assert_has_calls(expected)
timer_name = "nova.%sdelete_servers" % ("force_" if force else "")
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
timer_name)
def test__default_delete_servers(self):
self._test_delete_servers()
def test__force_delete_servers(self):
self._test_delete_servers(force=True)
def test__delete_image(self):
nova_scenario = utils.NovaScenario()
nova_scenario._delete_image(self.image)
self.image.delete.assert_called_once_with()
self.mock_wait_for_delete.mock.assert_called_once_with(
self.image,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.
nova_server_image_delete_poll_interval,
timeout=CONF.benchmark.nova_server_image_delete_timeout)
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.delete_image")
def test__boot_servers(self):
self.clients("nova").servers.list.return_value = [self.server,
self.server1]
nova_scenario = utils.NovaScenario()
nova_scenario._boot_servers("image", "flavor", 2)
expected = [
mock.call(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout
),
mock.call(
self.server1,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_boot_poll_interval,
timeout=CONF.benchmark.nova_server_boot_timeout
)
]
self.mock_wait_for.mock.assert_has_calls(expected)
self.mock_resource_is.mock.assert_has_calls([mock.call("ACTIVE"),
mock.call("ACTIVE")])
self.mock_get_from_manager.mock.assert_has_calls([mock.call(),
mock.call()])
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.boot_servers")
def test__associate_floating_ip(self):
nova_scenario = utils.NovaScenario()
nova_scenario._associate_floating_ip(self.server, self.floating_ip)
self.server.add_floating_ip.assert_called_once_with(self.floating_ip,
fixed_address=None)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.associate_floating_ip")
def test__dissociate_floating_ip(self):
nova_scenario = utils.NovaScenario()
nova_scenario._dissociate_floating_ip(self.server, self.floating_ip)
self.server.remove_floating_ip.assert_called_once_with(
self.floating_ip)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.dissociate_floating_ip")
def test__check_ip_address(self):
nova_scenario = utils.NovaScenario()
fake_server = fakes.FakeServerManager().create("test_server",
"image_id_01",
"flavor_id_01")
fake_server.addresses = {
"private": [
{"version": 4, "addr": "1.2.3.4"},
]}
floating_ip = fakes.FakeFloatingIP()
floating_ip.ip = "10.20.30.40"
# Also test function check_ip_address accept a string as attr
self.assertFalse(
nova_scenario.check_ip_address(floating_ip.ip)(fake_server))
self.assertTrue(
nova_scenario.check_ip_address(floating_ip.ip, must_exist=False)
(fake_server))
fake_server.addresses["private"].append(
{"version": 4, "addr": floating_ip.ip}
)
# Also test function check_ip_address accept an object with attr ip
self.assertTrue(
nova_scenario.check_ip_address(floating_ip)
(fake_server))
self.assertFalse(
nova_scenario.check_ip_address(floating_ip, must_exist=False)
(fake_server))
def test__list_networks(self):
network_list = []
self.clients("nova").networks.list.return_value = network_list
nova_scenario = utils.NovaScenario()
return_network_list = nova_scenario._list_networks()
self.assertEqual(network_list, return_network_list)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.list_networks")
def test__resize(self):
nova_scenario = utils.NovaScenario()
to_flavor = mock.Mock()
nova_scenario._resize(self.server, to_flavor)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.resize")
def test__resize_confirm(self):
nova_scenario = utils.NovaScenario()
nova_scenario._resize_confirm(self.server)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.resize_confirm")
def test__resize_revert(self):
nova_scenario = utils.NovaScenario()
nova_scenario._resize_revert(self.server)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.resize_revert")
def test__attach_volume(self):
self.clients("nova").volumes.create_server_volume.return_value = None
nova_scenario = utils.NovaScenario()
nova_scenario._attach_volume(self.server, self.volume)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.attach_volume")
def test__detach_volume(self):
self.clients("nova").volumes.delete_server_volume.return_value = None
nova_scenario = utils.NovaScenario()
nova_scenario._detach_volume(self.server, self.volume)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.detach_volume")
def test__live_migrate_server(self):
fake_host = mock.MagicMock()
self.admin_clients("nova").servers.get(return_value=self.server)
nova_scenario = utils.NovaScenario()
nova_scenario._live_migrate(self.server,
fake_host,
block_migration=False,
disk_over_commit=False,
skip_host_check=True)
self.mock_wait_for.mock.assert_called_once_with(
self.server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.
nova_server_live_migrate_poll_interval,
timeout=CONF.benchmark.nova_server_live_migrate_timeout)
self.mock_resource_is.mock.assert_called_once_with("ACTIVE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.live_migrate")
def test__find_host_to_migrate(self):
fake_server = self.server
fake_host = {"nova-compute": {"available": True}}
self.admin_clients("nova").servers.get.return_value = fake_server
self.admin_clients("nova").availability_zones.list.return_value = [
mock.MagicMock(zoneName="a",
hosts={"a1": fake_host, "a2": fake_host,
"a3": fake_host}),
mock.MagicMock(zoneName="b",
hosts={"b1": fake_host, "b2": fake_host,
"b3": fake_host}),
mock.MagicMock(zoneName="c",
hosts={"c1": fake_host,
"c2": fake_host, "c3": fake_host})
]
setattr(fake_server, "OS-EXT-SRV-ATTR:host", "b2")
setattr(fake_server, "OS-EXT-AZ:availability_zone", "b")
nova_scenario = utils.NovaScenario()
self.assertIn(
nova_scenario._find_host_to_migrate(fake_server), ["b1", "b3"])
def test__migrate_server(self):
fake_server = self.server
setattr(fake_server, "OS-EXT-SRV-ATTR:host", "a1")
self.clients("nova").servers.get(return_value=fake_server)
nova_scenario = utils.NovaScenario()
nova_scenario._migrate(fake_server, skip_host_check=True)
self.mock_wait_for.mock.assert_called_once_with(
fake_server,
is_ready=self.mock_resource_is.mock.return_value,
update_resource=self.mock_get_from_manager.mock.return_value,
check_interval=CONF.benchmark.nova_server_migrate_poll_interval,
timeout=CONF.benchmark.nova_server_migrate_timeout)
self.mock_resource_is.mock.assert_called_once_with("VERIFY_RESIZE")
self.mock_get_from_manager.mock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.migrate")
self.assertRaises(rally_exceptions.MigrateException,
nova_scenario._migrate,
fake_server, skip_host_check=False)
def test__create_security_groups(self):
nova_scenario = utils.NovaScenario()
nova_scenario._generate_random_name = mock.MagicMock()
security_group_count = 5
sec_groups = nova_scenario._create_security_groups(
security_group_count)
self.assertEqual(security_group_count, len(sec_groups))
self.assertEqual(security_group_count,
nova_scenario._generate_random_name.call_count)
self.assertEqual(
security_group_count,
self.clients("nova").security_groups.create.call_count)
self._test_atomic_action_timer(
nova_scenario.atomic_actions(),
"nova.create_%s_security_groups" % security_group_count)
def test__create_rules_for_security_group(self):
nova_scenario = utils.NovaScenario()
fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"),
fakes.FakeSecurityGroup(None, None, 2, "uuid2")]
rules_per_security_group = 10
nova_scenario._create_rules_for_security_group(
fake_secgroups, rules_per_security_group)
self.assertEqual(
len(fake_secgroups) * rules_per_security_group,
self.clients("nova").security_group_rules.create.call_count)
self._test_atomic_action_timer(
nova_scenario.atomic_actions(),
"nova.create_%s_rules" %
(rules_per_security_group * len(fake_secgroups)))
def test__delete_security_groups(self):
nova_scenario = utils.NovaScenario()
fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"),
fakes.FakeSecurityGroup(None, None, 2, "uuid2")]
nova_scenario._delete_security_groups(fake_secgroups)
self.assertSequenceEqual(
map(lambda x: mock.call(x.id), fake_secgroups),
self.clients("nova").security_groups.delete.call_args_list)
self._test_atomic_action_timer(
nova_scenario.atomic_actions(),
"nova.delete_%s_security_groups" % len(fake_secgroups))
def test__list_security_groups(self):
nova_scenario = utils.NovaScenario()
nova_scenario._list_security_groups()
self.clients("nova").security_groups.list.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.list_security_groups")
def test__list_keypairs(self):
keypairs_list = ["foo_keypair"]
self.clients("nova").keypairs.list.return_value = keypairs_list
nova_scenario = utils.NovaScenario()
return_keypairs_list = nova_scenario._list_keypairs()
self.assertEqual(keypairs_list, return_keypairs_list)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.list_keypairs")
def test__create_keypair(self):
self.clients("nova").keypairs.create.return_value.name = self.keypair
nova_scenario = utils.NovaScenario()
return_keypair = nova_scenario._create_keypair()
self.assertEqual(self.keypair, return_keypair)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.create_keypair")
def test__delete_keypair(self):
nova_scenario = utils.NovaScenario()
nova_scenario._delete_keypair(self.keypair)
self.clients("nova").keypairs.delete.assert_called_once_with(
self.keypair)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.delete_keypair")
def test__list_floating_ips_bulk(self):
floating_ips_bulk_list = ["foo_floating_ips_bulk"]
self.admin_clients("nova").floating_ips_bulk.list.return_value = (
floating_ips_bulk_list)
nova_scenario = utils.NovaScenario()
return_floating_ips_bulk_list = nova_scenario._list_floating_ips_bulk()
self.assertEqual(floating_ips_bulk_list, return_floating_ips_bulk_list)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.list_floating_ips_bulk")
@mock.patch(NOVA_UTILS + ".network_wrapper.generate_cidr")
def test__create_floating_ips_bulk(self, mock_generate_cidr):
fake_cidr = "10.2.0.0/24"
fake_pool = "test1"
fake_floating_ips_bulk = mock.MagicMock()
fake_floating_ips_bulk.ip_range = fake_cidr
fake_floating_ips_bulk.pool = fake_pool
self.admin_clients("nova").floating_ips_bulk.create.return_value = (
fake_floating_ips_bulk)
nova_scenario = utils.NovaScenario()
return_iprange = nova_scenario._create_floating_ips_bulk(fake_cidr)
mock_generate_cidr.assert_called_once_with(start_cidr=fake_cidr)
self.assertEqual(return_iprange, fake_floating_ips_bulk)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.create_floating_ips_bulk")
def test__delete_floating_ips_bulk(self):
fake_cidr = "10.2.0.0/24"
nova_scenario = utils.NovaScenario()
nova_scenario._delete_floating_ips_bulk(fake_cidr)
self.admin_clients(
"nova").floating_ips_bulk.delete.assert_called_once_with(fake_cidr)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.delete_floating_ips_bulk")
def test__list_hypervisors(self):
nova_scenario = utils.NovaScenario()
nova_scenario._list_hypervisors(detailed=False)
self.admin_clients("nova").hypervisors.list.assert_called_once_with(
False)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.list_hypervisors")
def test__lock_server(self):
server = mock.Mock()
nova_scenario = utils.NovaScenario()
nova_scenario._lock_server(server)
server.lock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.lock_server")
def test__unlock_server(self):
server = mock.Mock()
nova_scenario = utils.NovaScenario()
nova_scenario._unlock_server(server)
server.unlock.assert_called_once_with()
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.unlock_server")
def test__delete_network(self):
fake_netlabel = "test1"
nova_scenario = utils.NovaScenario()
nova_scenario._delete_network(fake_netlabel)
self.admin_clients("nova").networks.delete.assert_called_once_with(
fake_netlabel)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.delete_network")
@mock.patch(NOVA_UTILS + ".network_wrapper.generate_cidr")
def test__create_network(self, mock_generate_cidr):
fake_cidr = "10.2.0.0/24"
fake_net = mock.MagicMock()
fake_net.cidr = fake_cidr
self.admin_clients("nova").networks.create.return_value = (fake_net)
nova_scenario = utils.NovaScenario()
nova_scenario._generate_random_name = mock.Mock(
return_value="rally_novanet_fake")
return_netlabel = nova_scenario._create_network(fake_cidr,
fakearg="fakearg")
mock_generate_cidr.assert_called_once_with(start_cidr=fake_cidr)
self.admin_clients("nova").networks.create.assert_called_once_with(
label="rally_novanet_fake", cidr=mock_generate_cidr.return_value,
fakearg="fakearg")
self.assertEqual(return_netlabel, fake_net)
self._test_atomic_action_timer(nova_scenario.atomic_actions(),
"nova.create_network")
| apache-2.0 |
vitaliykomarov/NEUCOGAR | nest/noradrenaline/nest-2.10.0/examples/nest/music/msgtest.py | 13 | 1200 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# msgtest.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import nest
nest.sli_run("statusdict/have_music ::")
if not nest.spp():
import sys
print("NEST was not compiled with support for MUSIC, not running.")
sys.exit()
mmip = nest.Create('music_message_in_proxy')
nest.SetStatus(mmip, {'port_name' : 'msgdata'})
# Simulate and get message data with a granularity of 10 ms:
time = 0
while time < 1000:
nest.Simulate (10)
data = nest.GetStatus(mmip, 'data')
print data
time += 10
| gpl-2.0 |
seaotterman/tensorflow | tensorflow/python/saved_model/signature_def_utils.py | 89 | 1583 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SignatureDef utility functions.
Utility functions for constructing SignatureDef protos.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.saved_model.signature_def_utils_impl import build_signature_def
from tensorflow.python.saved_model.signature_def_utils_impl import classification_signature_def
from tensorflow.python.saved_model.signature_def_utils_impl import predict_signature_def
from tensorflow.python.saved_model.signature_def_utils_impl import regression_signature_def
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
"build_signature_def",
"classification_signature_def",
"predict_signature_def",
"regression_signature_def",
]
remove_undocumented(__name__, _allowed_symbols)
| apache-2.0 |
kevin8909/xjerp | openerp/addons/account/project/wizard/__init__.py | 427 | 1337 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_analytic_journal_report
import account_analytic_balance_report
import account_analytic_inverted_balance_report
import account_analytic_cost_ledger_report
import account_analytic_cost_ledger_for_journal_report
import project_account_analytic_line
import account_analytic_chart
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Cube777/dotgit | tests/test_plugins_plain.py | 1 | 2081 | import os
from dotgit.plugins.plain import PlainPlugin
class TestPlainPlugin:
def test_apply(self, tmp_path):
plugin = PlainPlugin(str(tmp_path / 'data'))
data = 'test data'
with open(tmp_path / 'file', 'w') as f:
f.write(data)
plugin.apply(tmp_path / 'file', tmp_path / 'file2')
assert (tmp_path / 'file').exists()
assert (tmp_path / 'file2').exists()
assert not (tmp_path / 'file').is_symlink()
assert not (tmp_path / 'file2').is_symlink()
with open(tmp_path / 'file2', 'r') as f:
assert f.read() == data
def test_remove(self, tmp_path):
plugin = PlainPlugin(str(tmp_path / 'data'))
open(tmp_path / 'file', 'w').close()
plugin.remove(tmp_path / 'file', tmp_path / 'file2')
assert (tmp_path / 'file').exists()
assert (tmp_path / 'file2').exists()
assert not (tmp_path / 'file').is_symlink()
assert (tmp_path / 'file2').is_symlink()
assert (tmp_path / 'file').samefile(tmp_path / 'file2')
def test_samefile_link(self, tmp_path):
plugin = PlainPlugin(str(tmp_path / 'data'))
open(tmp_path / 'file', 'w').close()
os.symlink(tmp_path / 'file', tmp_path / 'file2')
assert plugin.samefile(tmp_path / 'file', tmp_path / 'file2')
def test_samefile_copy(self, tmp_path):
plugin = PlainPlugin(str(tmp_path / 'data'))
open(tmp_path / 'file', 'w').close()
open(tmp_path / 'file2', 'w').close()
assert not plugin.samefile(tmp_path / 'file', tmp_path / 'file2')
def test_hard_mode(self, tmp_path):
plugin = PlainPlugin(str(tmp_path / 'data'), hard=True)
open(tmp_path / 'file', 'w').close()
plugin.remove(tmp_path / 'file', tmp_path / 'file2')
assert (tmp_path / 'file').exists()
assert (tmp_path / 'file2').exists()
assert not (tmp_path / 'file').is_symlink()
assert not (tmp_path / 'file2').is_symlink()
assert not (tmp_path / 'file').samefile(tmp_path / 'file2')
| gpl-2.0 |
hal0x2328/neo-python | neo/Core/Utils.py | 1 | 1665 | import base58
from neo.Settings import settings
from neo.Core.Fixed8 import Fixed8
from typing import Tuple
def isValidPublicAddress(address: str) -> bool:
"""Check if address is a valid NEO address"""
valid = False
if len(address) == 34 and address[0] == 'A':
try:
base58.b58decode_check(address.encode())
valid = True
except ValueError:
# checksum mismatch
valid = False
return valid
def validate_simple_policy(tx) -> Tuple[bool, str]:
"""
Validate transaction policies
Args:
tx: Transaction object
Returns:
tuple:
result: True if it passes the policy checks. False otherwise.
error_msg: empty str if policy passes, otherwise reason for failure.
"""
# verify the maximum tx size is not exceeded
if tx.Size() > tx.MAX_TX_SIZE:
return False, f"Transaction cancelled. The tx size ({tx.Size()}) exceeds the maximum tx size ({tx.MAX_TX_SIZE})."
# calculate and verify the required network fee for the tx
fee = tx.NetworkFee()
if tx.Size() > settings.MAX_FREE_TX_SIZE and not tx.Type == b'\x02': # Claim Transactions are High Priority
req_fee = Fixed8.FromDecimal(settings.FEE_PER_EXTRA_BYTE * (tx.Size() - settings.MAX_FREE_TX_SIZE))
if req_fee < settings.LOW_PRIORITY_THRESHOLD:
req_fee = settings.LOW_PRIORITY_THRESHOLD
if fee < req_fee:
return False, f'Transaction cancelled. The tx size ({tx.Size()}) exceeds the max free tx size ({settings.MAX_FREE_TX_SIZE}).\nA network fee of {req_fee.ToString()} GAS is required.'
return True, ""
| mit |
sunlianqiang/kbengine | kbe/src/lib/python/Lib/encodings/utf_16_le.py | 860 | 1037 | """ Python 'utf-16-le' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
encode = codecs.utf_16_le_encode
def decode(input, errors='strict'):
return codecs.utf_16_le_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.utf_16_le_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
_buffer_decode = codecs.utf_16_le_decode
class StreamWriter(codecs.StreamWriter):
encode = codecs.utf_16_le_encode
class StreamReader(codecs.StreamReader):
decode = codecs.utf_16_le_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-16-le',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| lgpl-3.0 |
BubuLK/sfepy | examples/linear_elasticity/shell10x_cantilever.py | 5 | 3878 | r"""
Bending of a long thin cantilever beam computed using the
:class:`dw_shell10x <sfepy.terms.terms_shells.Shell10XTerm>` term.
Find displacements of the central plane :math:`\ul{u}`, and rotations
:math:`\ul{\alpha}` such that:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}, \ul{\beta})
e_{kl}(\ul{u}, \ul{\alpha})
= - \int_{\Gamma_{right}} \ul{v} \cdot \ul{f}
\;, \quad \forall \ul{v} \;,
where :math:`D_{ijkl}` is the isotropic elastic tensor, given using the Young's
modulus :math:`E` and the Poisson's ratio :math:`\nu`.
The variable ``u`` below holds both :math:`\ul{u}` and :math:`\ul{\alpha}`
DOFs. For visualization, it is saved as two fields ``u_disp`` and ``u_rot``,
corresponding to :math:`\ul{u}` and :math:`\ul{\alpha}`, respectively.
See also :ref:`linear_elasticity-shell10x_cantilever_interactive` example.
View the results using::
python postproc.py shell10x.vtk -d 'u_disp,plot_displacements,rel_scaling=1.0' --opacity='wireframe=0.5' -b --wireframe
"""
from __future__ import absolute_import
from sfepy.base.base import output
from sfepy.discrete.fem.meshio import UserMeshIO
from sfepy.discrete import Integral
import sfepy.mechanics.shell10x as sh
import examples.linear_elasticity.shell10x_cantilever_interactive as sci
# Beam dimensions.
dims = [0.2, 0.01, 0.001]
thickness = dims[2]
transform = 'bend' # None, 'bend' or 'twist'
# Mesh resolution: increase to improve accuracy.
shape = [11, 2]
# Material parameters.
young = 210e9
poisson = 0.3
# Loading force.
force = -1.0
def mesh_hook(mesh, mode):
"""
Generate the beam mesh.
"""
if mode == 'read':
mesh = sci.make_mesh(dims[:2], shape, transform=transform)
return mesh
def post_process(out, problem, state, extend=False):
u = problem.get_variables()['u']
gamma2 = problem.domain.regions['Gamma2']
dofs = u.get_state_in_region(gamma2)
output('DOFs along the loaded edge:')
output('\n%s' % dofs)
if transform != 'twist':
label, ii = {None : ('u_3', 2), 'bend' : ('u_1', 0)}[transform]
u_exact = sci.get_analytical_displacement(dims, young, force,
transform=transform)
output('max. %s displacement:' % label, dofs[0, ii])
output('analytical value:', u_exact)
return out
filename_mesh = UserMeshIO(mesh_hook)
options = {
'nls' : 'newton',
'ls' : 'ls',
'post_process_hook' : 'post_process',
}
if transform is None:
pload = [[0.0, 0.0, force / shape[1], 0.0, 0.0, 0.0]] * shape[1]
elif transform == 'bend':
pload = [[force / shape[1], 0.0, 0.0, 0.0, 0.0, 0.0]] * shape[1]
elif transform == 'twist':
pload = [[0.0, force / shape[1], 0.0, 0.0, 0.0, 0.0]] * shape[1]
materials = {
'm' : ({
'D' : sh.create_elastic_tensor(young=young, poisson=poisson),
'.drill' : 1e-7,
},),
'load' : ({
'.val' : pload,
},)
}
xmin = (-0.5 + 1e-12) * dims[0]
xmax = (0.5 - 1e-12) * dims[0]
regions = {
'Omega' : 'all',
'Gamma1' : ('vertices in (x < %.14f)' % xmin, 'facet'),
'Gamma2' : ('vertices in (x > %.14f)' % xmax, 'facet'),
}
fields = {
'fu': ('real', 6, 'Omega', 1, 'H1', 'shell10x'),
}
variables = {
'u' : ('unknown field', 'fu', 0),
'v' : ('test field', 'fu', 'u'),
}
ebcs = {
'fix' : ('Gamma1', {'u.all' : 0.0}),
}
# Custom integral.
aux = Integral('i', order=3)
qp_coors, qp_weights = aux.get_qp('3_8')
qp_coors[:, 2] = thickness * (qp_coors[:, 2] - 0.5)
qp_weights *= thickness
integrals = {
'i' : ('custom', qp_coors, qp_weights),
}
equations = {
'elasticity' :
"""dw_shell10x.i.Omega(m.D, m.drill, v, u)
= dw_point_load.i.Gamma2(load.val, v)""",
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton', {
'i_max' : 1,
'eps_a' : 1e-7,
}),
}
| bsd-3-clause |
Zhongqilong/mykbengineer | kbe/res/scripts/common/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py | 1730 | 3405 | """A collection of modules for building different kinds of tree from
HTML documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.
2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree
3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
"""
from __future__ import absolute_import, division, unicode_literals
from ..utils import default_etree
treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - A generic builder for DOM implementations, defaulting to
a xml.dom.minidom based implementation.
"etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to
xml.etree.cElementTree if available and
xml.etree.ElementTree if not.
"lxml" - A etree-based builder for lxml.etree, handling
limitations of lxml's implementation.
implementation - (Currently applies to the "etree" and "dom" tree types). A
module implementing the tree type e.g.
xml.etree.ElementTree or xml.etree.cElementTree."""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "lxml":
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
else:
raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
return treeBuilderCache.get(treeType)
| lgpl-3.0 |
DDelon/youtube-dl | youtube_dl/extractor/blinkx.py | 199 | 3217 | from __future__ import unicode_literals
import json
from .common import InfoExtractor
from ..utils import (
remove_start,
int_or_none,
)
class BlinkxIE(InfoExtractor):
_VALID_URL = r'(?:https?://(?:www\.)blinkx\.com/#?ce/|blinkx:)(?P<id>[^?]+)'
IE_NAME = 'blinkx'
_TEST = {
'url': 'http://www.blinkx.com/ce/Da0Gw3xc5ucpNduzLuDDlv4WC9PuI4fDi1-t6Y3LyfdY2SZS5Urbvn-UPJvrvbo8LTKTc67Wu2rPKSQDJyZeeORCR8bYkhs8lI7eqddznH2ofh5WEEdjYXnoRtj7ByQwt7atMErmXIeYKPsSDuMAAqJDlQZ-3Ff4HJVeH_s3Gh8oQ',
'md5': '337cf7a344663ec79bf93a526a2e06c7',
'info_dict': {
'id': 'Da0Gw3xc',
'ext': 'mp4',
'title': 'No Daily Show for John Oliver; HBO Show Renewed - IGN News',
'uploader': 'IGN News',
'upload_date': '20150217',
'timestamp': 1424215740,
'description': 'HBO has renewed Last Week Tonight With John Oliver for two more seasons.',
'duration': 47.743333,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
display_id = video_id[:8]
api_url = ('https://apib4.blinkx.com/api.php?action=play_video&' +
'video=%s' % video_id)
data_json = self._download_webpage(api_url, display_id)
data = json.loads(data_json)['api']['results'][0]
duration = None
thumbnails = []
formats = []
for m in data['media']:
if m['type'] == 'jpg':
thumbnails.append({
'url': m['link'],
'width': int(m['w']),
'height': int(m['h']),
})
elif m['type'] == 'original':
duration = float(m['d'])
elif m['type'] == 'youtube':
yt_id = m['link']
self.to_screen('Youtube video detected: %s' % yt_id)
return self.url_result(yt_id, 'Youtube', video_id=yt_id)
elif m['type'] in ('flv', 'mp4'):
vcodec = remove_start(m['vcodec'], 'ff')
acodec = remove_start(m['acodec'], 'ff')
vbr = int_or_none(m.get('vbr') or m.get('vbitrate'), 1000)
abr = int_or_none(m.get('abr') or m.get('abitrate'), 1000)
tbr = vbr + abr if vbr and abr else None
format_id = '%s-%sk-%s' % (vcodec, tbr, m['w'])
formats.append({
'format_id': format_id,
'url': m['link'],
'vcodec': vcodec,
'acodec': acodec,
'abr': abr,
'vbr': vbr,
'tbr': tbr,
'width': int_or_none(m.get('w')),
'height': int_or_none(m.get('h')),
})
self._sort_formats(formats)
return {
'id': display_id,
'fullid': video_id,
'title': data['title'],
'formats': formats,
'uploader': data['channel_name'],
'timestamp': data['pubdate_epoch'],
'description': data.get('description'),
'thumbnails': thumbnails,
'duration': duration,
}
| unlicense |
kfeiWang/pythonUtils | estimator.py | 1 | 1971 | # -*- coding:utf8 -*-
from __future__ import print_function
import codecs
def caluPAndRAndF1(file1, file2):
'''
计算准确率p和召回率r,f1值
file1: 参考文件
file2: 处理后文件
'''
with codecs.open(file1, 'r', 'utf8') as fin1:
with codecs.open(file2, 'r', 'utf8') as fin2:
line1 = fin1.readline()
line2 = fin2.readline()
totalWordCount = 0
rightWordCount = 0
splitWordCount = 0
while line1 and line2:
line1Arr = line1.strip().split(' ')
line2Arr = line2.strip().split(' ')
if len(line1Arr) != len(line2Arr):
raise Exception('句子词数量不一致')
for w1, w2 in zip(line1Arr, line2Arr): # 循环对应句子中每个词
set1 = packSet(w1) # 将word以/切分后放入集合
set2 = packSet(w2) # 将word以/切分后放入集合
#print('w1:', w1, len(set1), 'set1:', set1, 'w2', w2, len(set2), 'set2:', set2)
totalWordCount += len(set1) # 参考文件中全部词片段数量
splitWordCount += len(set2) # 切分后全部词片段数量
rightWordCount += len(set1.intersection(set2)) # 参考文件词片段和切分后全部词片段交集
line1 = fin1.readline()
line2 = fin2.readline()
p = rightWordCount*1.0/totalWordCount # 计算准确率
r = rightWordCount*1.0/splitWordCount # 计算召回率
f1 = p*r*2/(p+r) # 计算f1值
return p,r,f1
def packSet(word):
'''
word:以/划分
'''
setR = set()
wArr = word.split('/')
for w in wArr:
setR.add(w)
return setR
def testCaseCaluPAndRAndF1():
p,r,f1 = caluPAndRAndF1('testData/srcFile', 'testData/splitFile')
print('p:', p, 'r:', r, 'f1:', f1)
if __name__=='__main__':
testCaseCaluPAndRAndF1() | mit |
radicalbit/ambari | ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/params_linux.py | 2 | 22595 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import re
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
import status_params
from ambari_commons.constants import AMBARI_SUDO_BINARY
from ambari_commons import yaml_utils
from resource_management.libraries.functions import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.get_bare_principal import get_bare_principal
from resource_management.libraries.script import Script
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.stack_features import get_stack_feature_version
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.expect import expect
from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
from resource_management.libraries.functions import is_empty
from ambari_commons.ambari_metrics_helper import select_metric_collector_hosts_from_hostnames
from resource_management.libraries.functions.setup_ranger_plugin_xml import get_audit_configs, generate_ranger_service_config
# server configurations
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_root = status_params.stack_root
sudo = AMBARI_SUDO_BINARY
limits_conf_dir = "/etc/security/limits.d"
# Needed since this is an Atlas Hook service.
cluster_name = config['clusterName']
stack_name = status_params.stack_name
upgrade_direction = default("/commandParams/upgrade_direction", None)
version = default("/commandParams/version", None)
agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability']
agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int)
storm_component_home_dir = status_params.storm_component_home_dir
conf_dir = status_params.conf_dir
stack_version_unformatted = status_params.stack_version_unformatted
stack_version_formatted = status_params.stack_version_formatted
stack_supports_ru = stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted)
stack_supports_storm_kerberos = stack_version_formatted and check_stack_feature(StackFeature.STORM_KERBEROS, stack_version_formatted)
stack_supports_storm_ams = stack_version_formatted and check_stack_feature(StackFeature.STORM_AMS, stack_version_formatted)
stack_supports_core_site_for_ranger_plugin = check_stack_feature(StackFeature.CORE_SITE_FOR_RANGER_PLUGINS_SUPPORT, stack_version_formatted)
# get the correct version to use for checking stack features
version_for_stack_feature_checks = get_stack_feature_version(config)
stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
# default hadoop params
rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
storm_bin_dir = "/usr/bin"
storm_lib_dir = "/usr/lib/storm/lib/"
# hadoop parameters for 2.2+
if stack_supports_ru:
rest_lib_dir = format("{storm_component_home_dir}/contrib/storm-rest")
storm_bin_dir = format("{storm_component_home_dir}/bin")
storm_lib_dir = format("{storm_component_home_dir}/lib")
log4j_dir = format("{storm_component_home_dir}/log4j2")
storm_user = config['configurations']['storm-env']['storm_user']
log_dir = config['configurations']['storm-env']['storm_log_dir']
pid_dir = status_params.pid_dir
local_dir = config['configurations']['storm-site']['storm.local.dir']
user_group = config['configurations']['cluster-env']['user_group']
java64_home = config['hostLevelParams']['java_home']
jps_binary = format("{java64_home}/bin/jps")
nimbus_port = config['configurations']['storm-site']['nimbus.thrift.port']
storm_zookeeper_root_dir = default('/configurations/storm-site/storm.zookeeper.root', None)
storm_zookeeper_servers = config['configurations']['storm-site']['storm.zookeeper.servers']
storm_zookeeper_port = config['configurations']['storm-site']['storm.zookeeper.port']
storm_logs_supported = config['configurations']['storm-env']['storm_logs_supported']
# nimbus.seeds is supported in HDP 2.3.0.0 and higher
nimbus_seeds_supported = default('/configurations/storm-env/nimbus_seeds_supported', False)
nimbus_host = default('/configurations/storm-site/nimbus.host', None)
nimbus_seeds = default('/configurations/storm-site/nimbus.seeds', None)
default_topology_max_replication_wait_time_sec = default('/configurations/storm-site/topology.max.replication.wait.time.sec.default', -1)
nimbus_hosts = default("/clusterHostInfo/nimbus_hosts", [])
default_topology_min_replication_count = default('/configurations/storm-site/topology.min.replication.count.default', 1)
#Calculate topology.max.replication.wait.time.sec and topology.min.replication.count
if len(nimbus_hosts) > 1:
# for HA Nimbus
actual_topology_max_replication_wait_time_sec = -1
actual_topology_min_replication_count = len(nimbus_hosts) / 2 + 1
else:
# for non-HA Nimbus
actual_topology_max_replication_wait_time_sec = default_topology_max_replication_wait_time_sec
actual_topology_min_replication_count = default_topology_min_replication_count
if 'topology.max.replication.wait.time.sec.default' in config['configurations']['storm-site']:
del config['configurations']['storm-site']['topology.max.replication.wait.time.sec.default']
if 'topology.min.replication.count.default' in config['configurations']['storm-site']:
del config['configurations']['storm-site']['topology.min.replication.count.default']
rest_api_port = "8745"
rest_api_admin_port = "8746"
rest_api_conf_file = format("{conf_dir}/config.yaml")
storm_env_sh_template = config['configurations']['storm-env']['content']
jmxremote_port = config['configurations']['storm-env']['jmxremote_port']
if 'ganglia_server_host' in config['clusterHostInfo'] and len(config['clusterHostInfo']['ganglia_server_host'])>0:
ganglia_installed = True
ganglia_server = config['clusterHostInfo']['ganglia_server_host'][0]
ganglia_report_interval = 60
else:
ganglia_installed = False
security_enabled = config['configurations']['cluster-env']['security_enabled']
storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", [])
storm_user_nofile_limit = default('/configurations/storm-env/storm_user_nofile_limit', 128000)
storm_user_nproc_limit = default('/configurations/storm-env/storm_user_noproc_limit', 65536)
if security_enabled:
_hostname_lowercase = config['hostname'].lower()
_storm_principal_name = config['configurations']['storm-env']['storm_principal_name']
storm_jaas_principal = _storm_principal_name.replace('_HOST',_hostname_lowercase)
_ambari_principal_name = default('/configurations/cluster-env/ambari_principal_name', None)
storm_keytab_path = config['configurations']['storm-env']['storm_keytab']
if stack_supports_storm_kerberos:
storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab']
_storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name']
storm_ui_jaas_principal = _storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase)
storm_bare_jaas_principal = get_bare_principal(_storm_principal_name)
if _ambari_principal_name:
ambari_bare_jaas_principal = get_bare_principal(_ambari_principal_name)
_nimbus_principal_name = config['configurations']['storm-env']['nimbus_principal_name']
nimbus_jaas_principal = _nimbus_principal_name.replace('_HOST', _hostname_lowercase)
nimbus_bare_jaas_principal = get_bare_principal(_nimbus_principal_name)
nimbus_keytab_path = config['configurations']['storm-env']['nimbus_keytab']
kafka_bare_jaas_principal = None
if stack_supports_storm_kerberos:
if security_enabled:
storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.secure.transport']
# generate KafkaClient jaas config if kafka is kerberoized
_kafka_principal_name = default("/configurations/kafka-env/kafka_principal_name", None)
kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name)
else:
storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.nonsecure.transport']
set_instanceId = "false"
if 'cluster-env' in config['configurations'] and \
'metrics_collector_external_hosts' in config['configurations']['cluster-env']:
ams_collector_hosts = config['configurations']['cluster-env']['metrics_collector_external_hosts']
set_instanceId = "true"
else:
ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", []))
has_metric_collector = not len(ams_collector_hosts) == 0
metric_collector_port = None
if has_metric_collector:
if 'cluster-env' in config['configurations'] and \
'metrics_collector_external_port' in config['configurations']['cluster-env']:
metric_collector_port = config['configurations']['cluster-env']['metrics_collector_external_port']
else:
metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:6188")
if metric_collector_web_address.find(':') != -1:
metric_collector_port = metric_collector_web_address.split(':')[1]
else:
metric_collector_port = '6188'
metric_collector_report_interval = 60
metric_collector_app_id = "nimbus"
if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
metric_collector_protocol = 'https'
else:
metric_collector_protocol = 'http'
metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
pass
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
metric_collector_sink_jar = "/usr/lib/storm/lib/ambari-metrics-storm-sink-with-common-*.jar"
metric_collector_legacy_sink_jar = "/usr/lib/storm/lib/ambari-metrics-storm-sink-legacy-with-common-*.jar"
host_in_memory_aggregation = default("/configurations/ams-site/timeline.metrics.host.inmemory.aggregation", True)
host_in_memory_aggregation_port = default("/configurations/ams-site/timeline.metrics.host.inmemory.aggregation.port", 61888)
# Cluster Zookeeper quorum
zookeeper_quorum = ""
if storm_zookeeper_servers:
storm_zookeeper_servers_list = yaml_utils.get_values_from_yaml_array(storm_zookeeper_servers)
zookeeper_quorum = (":" + storm_zookeeper_port + ",").join(storm_zookeeper_servers_list)
zookeeper_quorum += ":" + storm_zookeeper_port
jar_jvm_opts = ''
########################################################
############# Atlas related params #####################
########################################################
#region Atlas Hooks
storm_atlas_application_properties = default('/configurations/storm-atlas-application.properties', {})
enable_atlas_hook = default('/configurations/storm-env/storm.atlas.hook', False)
atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties')
if enable_atlas_hook:
# Only append /etc/atlas/conf to classpath if on HDP 2.4.*
if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, stack_version_formatted):
atlas_conf_dir = format('{stack_root}/current/atlas-server/conf')
jar_jvm_opts += '-Datlas.conf=' + atlas_conf_dir
#endregion
storm_ui_port = config['configurations']['storm-site']['ui.port']
#Storm log4j properties
storm_a1_maxfilesize = default('/configurations/storm-cluster-log4j/storm_a1_maxfilesize', 100)
storm_a1_maxbackupindex = default('/configurations/storm-cluster-log4j/storm_a1_maxbackupindex', 9)
storm_wrkr_a1_maxfilesize = default('/configurations/storm-worker-log4j/storm_wrkr_a1_maxfilesize', 100)
storm_wrkr_a1_maxbackupindex = default('/configurations/storm-worker-log4j/storm_wrkr_a1_maxbackupindex', 9)
storm_wrkr_out_maxfilesize = default('/configurations/storm-worker-log4j/storm_wrkr_out_maxfilesize', 100)
storm_wrkr_out_maxbackupindex = default('/configurations/storm-worker-log4j/storm_wrkr_out_maxbackupindex', 4)
storm_wrkr_err_maxfilesize = default('/configurations/storm-worker-log4j/storm_wrkr_err_maxfilesize', 100)
storm_wrkr_err_maxbackupindex = default('/configurations/storm-worker-log4j/storm_wrkr_err_maxbackupindex', 4)
storm_cluster_log4j_content = config['configurations']['storm-cluster-log4j']['content']
storm_worker_log4j_content = config['configurations']['storm-worker-log4j']['content']
# some commands may need to supply the JAAS location when running as storm
storm_jaas_file = format("{conf_dir}/storm_jaas.conf")
# for curl command in ranger plugin to get db connector
jdk_location = config['hostLevelParams']['jdk_location']
# ranger storm plugin start section
# ranger host
ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
has_ranger_admin = not len(ranger_admin_hosts) == 0
# ranger support xml_configuration flag, instead of depending on ranger xml_configurations_supported/ranger-env, using stack feature
xml_configurations_supported = check_stack_feature(StackFeature.RANGER_XML_CONFIGURATION, version_for_stack_feature_checks)
# ambari-server hostname
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
# ranger storm plugin enabled property
enable_ranger_storm = default("/configurations/ranger-storm-plugin-properties/ranger-storm-plugin-enabled", "No")
enable_ranger_storm = True if enable_ranger_storm.lower() == 'yes' else False
# ranger storm properties
if enable_ranger_storm:
# get ranger policy url
policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url']
if xml_configurations_supported:
policymgr_mgr_url = config['configurations']['ranger-storm-security']['ranger.plugin.storm.policy.rest.url']
if not is_empty(policymgr_mgr_url) and policymgr_mgr_url.endswith('/'):
policymgr_mgr_url = policymgr_mgr_url.rstrip('/')
# ranger audit db user
xa_audit_db_user = default('/configurations/admin-properties/audit_db_user', 'rangerlogger')
# ranger storm service name
repo_name = str(config['clusterName']) + '_storm'
repo_name_value = config['configurations']['ranger-storm-security']['ranger.plugin.storm.service.name']
if not is_empty(repo_name_value) and repo_name_value != "{{repo_name}}":
repo_name = repo_name_value
common_name_for_certificate = config['configurations']['ranger-storm-plugin-properties']['common.name.for.certificate']
repo_config_username = config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_USERNAME']
# ranger-env config
ranger_env = config['configurations']['ranger-env']
# create ranger-env config having external ranger credential properties
if not has_ranger_admin and enable_ranger_storm:
external_admin_username = default('/configurations/ranger-storm-plugin-properties/external_admin_username', 'admin')
external_admin_password = default('/configurations/ranger-storm-plugin-properties/external_admin_password', 'admin')
external_ranger_admin_username = default('/configurations/ranger-storm-plugin-properties/external_ranger_admin_username', 'amb_ranger_admin')
external_ranger_admin_password = default('/configurations/ranger-storm-plugin-properties/external_ranger_admin_password', 'amb_ranger_admin')
ranger_env = {}
ranger_env['admin_username'] = external_admin_username
ranger_env['admin_password'] = external_admin_password
ranger_env['ranger_admin_username'] = external_ranger_admin_username
ranger_env['ranger_admin_password'] = external_ranger_admin_password
ranger_plugin_properties = config['configurations']['ranger-storm-plugin-properties']
policy_user = storm_user
repo_config_password = config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']
xa_audit_db_password = ''
if not is_empty(config['configurations']['admin-properties']['audit_db_password']) and stack_supports_ranger_audit_db and has_ranger_admin:
xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
repo_config_password = config['configurations']['ranger-storm-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']
downloaded_custom_connector = None
previous_jdbc_jar_name = None
driver_curl_source = None
driver_curl_target = None
previous_jdbc_jar = None
if has_ranger_admin and stack_supports_ranger_audit_db:
xa_audit_db_flavor = config['configurations']['admin-properties']['DB_FLAVOR']
jdbc_jar_name, previous_jdbc_jar_name, audit_jdbc_url, jdbc_driver = get_audit_configs(config)
downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}") if stack_supports_ranger_audit_db else None
driver_curl_source = format("{jdk_location}/{jdbc_jar_name}") if stack_supports_ranger_audit_db else None
driver_curl_target = format("{storm_component_home_dir}/lib/{jdbc_jar_name}") if stack_supports_ranger_audit_db else None
previous_jdbc_jar = format("{storm_component_home_dir}/lib/{previous_jdbc_jar_name}") if stack_supports_ranger_audit_db else None
sql_connector_jar = ''
storm_ranger_plugin_config = {
'username': repo_config_username,
'password': repo_config_password,
'nimbus.url': 'http://' + storm_ui_host[0].lower() + ':' + str(storm_ui_port),
'commonNameForCertificate': common_name_for_certificate
}
storm_ranger_plugin_repo = {
'isActive': 'true',
'config': json.dumps(storm_ranger_plugin_config),
'description': 'storm repo',
'name': repo_name,
'repositoryType': 'storm',
'assetType': '6'
}
custom_ranger_service_config = generate_ranger_service_config(ranger_plugin_properties)
if len(custom_ranger_service_config) > 0:
storm_ranger_plugin_config.update(custom_ranger_service_config)
if stack_supports_ranger_kerberos and security_enabled:
policy_user = format('{storm_user},{storm_bare_jaas_principal}')
storm_ranger_plugin_config['policy.download.auth.users'] = policy_user
storm_ranger_plugin_config['tag.download.auth.users'] = policy_user
storm_ranger_plugin_config['ambari.service.check.user'] = policy_user
storm_ranger_plugin_repo = {
'isEnabled': 'true',
'configs': storm_ranger_plugin_config,
'description': 'storm repo',
'name': repo_name,
'type': 'storm'
}
ranger_storm_principal = None
ranger_storm_keytab = None
if stack_supports_ranger_kerberos and security_enabled:
ranger_storm_principal = storm_jaas_principal
ranger_storm_keytab = storm_keytab_path
xa_audit_db_is_enabled = False
if xml_configurations_supported and stack_supports_ranger_audit_db:
xa_audit_db_is_enabled = config['configurations']['ranger-storm-audit']['xasecure.audit.destination.db']
xa_audit_hdfs_is_enabled = default('/configurations/ranger-storm-audit/xasecure.audit.destination.hdfs', False)
ssl_keystore_password = config['configurations']['ranger-storm-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password'] if xml_configurations_supported else None
ssl_truststore_password = config['configurations']['ranger-storm-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password'] if xml_configurations_supported else None
credential_file = format('/etc/ranger/{repo_name}/cred.jceks')
# for SQLA explicitly disable audit to DB for Ranger
if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
xa_audit_db_is_enabled = False
# ranger storm plugin end section
namenode_hosts = default("/clusterHostInfo/namenode_host", [])
has_namenode = not len(namenode_hosts) == 0
hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if has_namenode else None
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
import functools
#create partial functions with common arguments for every HdfsResource call
#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
hadoop_bin_dir = hadoop_bin_dir,
hadoop_conf_dir = hadoop_conf_dir,
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
default_fs = default_fs,
immutable_paths = get_not_managed_resources()
)
| apache-2.0 |
hacksterio/pygments.rb | vendor/pygments-main/pygments/lexers/r.py | 47 | 23755 | # -*- coding: utf-8 -*-
"""
pygments.lexers.r
~~~~~~~~~~~~~~~~~
Lexers for the R/S languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, words, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
line_re = re.compile('.*?\n')
class RConsoleLexer(Lexer):
"""
For R console transcripts or R CMD BATCH output files.
"""
name = 'RConsole'
aliases = ['rconsole', 'rout']
filenames = ['*.Rout']
def get_tokens_unprocessed(self, text):
slexer = SLexer(**self.options)
current_code_block = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('>') or line.startswith('+'):
# Colorize the prompt as such,
# then put rest of line into current_code_block
insertions.append((len(current_code_block),
[(0, Generic.Prompt, line[:2])]))
current_code_block += line[2:]
else:
# We have reached a non-prompt line!
# If we have stored prompt lines, need to process them first.
if current_code_block:
# Weave together the prompts and highlight code.
for item in do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block)):
yield item
# Reset vars for next code block.
current_code_block = ''
insertions = []
# Now process the actual line itself, this is output from R.
yield match.start(), Generic.Output, line
# If we happen to end on a code block with nothing after it, need to
# process the last code block. This is neither elegant nor DRY so
# should be changed.
if current_code_block:
for item in do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block)):
yield item
class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
.. versionadded:: 0.10
"""
name = 'S'
aliases = ['splus', 's', 'r']
filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
builtins_base = (
'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE',
'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf',
'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame',
'Math.difftime', 'Math.factor', 'Mod', 'NA_character_',
'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN',
'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame',
'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered',
'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string',
'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall',
'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt',
'Summary.data.frame', 'Summary.difftime', 'Summary.factor',
'Summary.numeric_version', 'Summary.ordered', 'Sys.Date',
'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid',
'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink',
'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep',
'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv',
'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs',
'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist',
'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character',
'all.equal.default', 'all.equal.factor', 'all.equal.formula',
'all.equal.language', 'all.equal.list', 'all.equal.numeric',
'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated',
'anyDuplicated.array', 'anyDuplicated.data.frame',
'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm',
'aperm.default', 'aperm.table', 'append', 'apply', 'args',
'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt',
'as.Date.character', 'as.Date.date', 'as.Date.dates',
'as.Date.default', 'as.Date.factor', 'as.Date.numeric',
'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt',
'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default',
'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date',
'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date',
'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor',
'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call',
'as.character', 'as.character.Date', 'as.character.POSIXt',
'as.character.condition', 'as.character.default',
'as.character.error', 'as.character.factor', 'as.character.hexmode',
'as.character.numeric_version', 'as.character.octmode',
'as.character.srcref', 'as.complex', 'as.data.frame',
'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct',
'as.data.frame.POSIXlt', 'as.data.frame.array',
'as.data.frame.character', 'as.data.frame.complex',
'as.data.frame.data.frame', 'as.data.frame.default',
'as.data.frame.difftime', 'as.data.frame.factor',
'as.data.frame.integer', 'as.data.frame.list',
'as.data.frame.logical', 'as.data.frame.matrix',
'as.data.frame.model.matrix', 'as.data.frame.numeric',
'as.data.frame.numeric_version', 'as.data.frame.ordered',
'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts',
'as.data.frame.vector', 'as.difftime', 'as.double',
'as.double.POSIXlt', 'as.double.difftime', 'as.environment',
'as.expression', 'as.expression.default', 'as.factor',
'as.function', 'as.function.default', 'as.hexmode', 'as.integer',
'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame',
'as.list.default', 'as.list.environment', 'as.list.factor',
'as.list.function', 'as.list.numeric_version', 'as.logical',
'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt',
'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote',
'as.name', 'as.null', 'as.null.default', 'as.numeric',
'as.numeric_version', 'as.octmode', 'as.ordered',
'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single',
'as.single.default', 'as.symbol', 'as.table', 'as.table.default',
'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4',
'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh',
'attachNamespace', 'attr', 'attr.all.equal', 'attributes',
'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename',
'besselI', 'besselJ', 'besselK', 'besselY', 'beta',
'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd',
'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body',
'bquote', 'browser', 'browserCondition', 'browserSetDebug',
'browserText', 'builtins', 'by', 'by.data.frame', 'by.default',
'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote',
'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold',
'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling',
'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones',
'chol', 'chol.default', 'chol2inv', 'choose', 'class',
'clearPushBack', 'close', 'close.connection', 'close.srcfile',
'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans',
'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts',
'conditionCall', 'conditionCall.condition', 'conditionMessage',
'conditionMessage.condition', 'conflicts', 'contributors', 'cos',
'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut',
'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class',
'data.matrix', 'date', 'debug', 'debugonce',
'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det',
'determinant', 'determinant.matrix', 'dget', 'diag', 'diff',
'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma',
'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir',
'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels',
'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated',
'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame',
'duplicated.default', 'duplicated.matrix',
'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply',
'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8',
'encodeString', 'enquote', 'env.profile', 'environment',
'environmentIsLocked', 'environmentName', 'eval', 'eval.parent',
'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression',
'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append',
'file.choose', 'file.copy', 'file.create', 'file.exists',
'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename',
'file.show', 'file.symlink', 'find.package', 'findInterval',
'findPackageEnv', 'findRestart', 'floor', 'flush',
'flush.connection', 'force', 'formals', 'format',
'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt',
'format.data.frame', 'format.default', 'format.difftime',
'format.factor', 'format.hexmode', 'format.info',
'format.libraryIQR', 'format.numeric_version', 'format.octmode',
'format.packageInfo', 'format.pval', 'format.summaryDefault',
'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time',
'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections',
'getCallingDLL', 'getCallingDLLe', 'getConnection',
'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo',
'getDLLRegisteredRoutines.character', 'getElement',
'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace',
'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo',
'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion',
'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines',
'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf',
'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl',
'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist',
'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv',
'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive',
'intersect', 'inverse.rle', 'invisible', 'invokeRestart',
'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic',
'is.call', 'is.character', 'is.complex', 'is.data.frame',
'is.double', 'is.element', 'is.environment', 'is.expression',
'is.factor', 'is.finite', 'is.function', 'is.infinite',
'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical',
'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame',
'is.na.numeric_version', 'is.name', 'is.nan', 'is.null',
'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt',
'is.numeric.difftime', 'is.numeric_version', 'is.object',
'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive',
'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol',
'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace',
'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4',
'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE',
'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date',
'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr',
'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply',
'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose',
'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default',
'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload',
'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load',
'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo',
'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p',
'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique',
'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec',
'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col',
'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default',
'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress',
'memory.profile', 'merge', 'merge.data.frame', 'merge.default',
'message', 'mget', 'min', 'missing', 'mode', 'month.abb',
'month.name', 'months', 'months.Date', 'months.POSIXt',
'months.abb', 'months.nameletters', 'names', 'names.POSIXlt',
'namespaceExport', 'namespaceImport', 'namespaceImportClasses',
'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar',
'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm',
'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects',
'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile',
'open.srcfilealias', 'open.srcfilecopy', 'options', 'order',
'ordered', 'outer', 'packBits', 'packageEvent',
'packageHasNamespace', 'packageStartupMessage', 'package_version',
'pairlist', 'parent.env', 'parent.frame', 'parse',
'parseNamespaceFile', 'paste', 'paste0', 'path.expand',
'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin',
'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default',
'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo',
'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date',
'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt',
'print.by', 'print.condition', 'print.connection',
'print.data.frame', 'print.default', 'print.difftime',
'print.factor', 'print.function', 'print.hexmode',
'print.libraryIQR', 'print.listof', 'print.noquote',
'print.numeric_version', 'print.octmode', 'print.packageInfo',
'print.proc_time', 'print.restart', 'print.rle',
'print.simple.list', 'print.srcfile', 'print.srcref',
'print.summary.table', 'print.summaryDefault', 'print.table',
'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table',
'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q',
'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted',
'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters',
'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range',
'range.default', 'rank', 'rapply', 'raw', 'rawConnection',
'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind',
'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar',
'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer',
'regexec', 'regexpr', 'registerS3method', 'registerS3methods',
'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date',
'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int',
'rep.numeric_version', 'rep_len', 'replace', 'replicate',
'requireNamespace', 'restartDescription', 'restartFormals',
'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round',
'round.Date', 'round.POSIXt', 'row', 'row.names',
'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums',
'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default',
'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image',
'saveRDS', 'scale', 'scale.default', 'scan', 'search',
'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date',
'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len',
'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo',
'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal',
'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition',
'signif', 'simpleCondition', 'simpleError', 'simpleMessage',
'simpleWarning', 'simplify2array', 'sin', 'single',
'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection',
'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort',
'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split',
'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default',
'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy',
'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop',
'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit',
'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset',
'subset.data.frame', 'subset.default', 'subset.matrix',
'substitute', 'substr', 'substring', 'sum', 'summary',
'summary.Date', 'summary.POSIXct', 'summary.POSIXlt',
'summary.connection', 'summary.data.frame', 'summary.default',
'summary.factor', 'summary.matrix', 'summary.proc_time',
'summary.srcfile', 'summary.srcref', 'summary.table',
'suppressMessages', 'suppressPackageStartupMessages',
'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls',
'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image',
'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents',
'sys.save.image', 'sys.source', 'sys.status', 'system',
'system.file', 'system.time', 'system2', 't', 't.data.frame',
't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply',
'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile',
'testPlatformEquivalence', 'textConnection', 'textConnectionValue',
'toString', 'toString.default', 'tolower', 'topenv', 'toupper',
'trace', 'traceback', 'tracemem', 'tracingState', 'transform',
'transform.data.frame', 'transform.default', 'trigamma', 'trunc',
'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection',
'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union',
'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame',
'unique.default', 'unique.matrix', 'unique.numeric_version',
'units', 'units.difftime', 'unix.time', 'unlink', 'unlist',
'unloadNamespace', 'unlockBinding', 'unname', 'unserialize',
'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url',
'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays',
'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max',
'which.min', 'with', 'with.default', 'withCallingHandlers',
'withRestarts', 'withVisible', 'within', 'within.data.frame',
'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar',
'writeLines', 'xor', 'xor.hexmode', 'xor.octmode',
'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date',
'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default',
'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile',
'zapsmall'
)
tokens = {
'comments': [
(r'#.*$', Comment.Single),
],
'valid_name': [
(r'[a-zA-Z][\w.]*', Text),
# can begin with ., but not if that is followed by a digit
(r'\.[a-zA-Z_][\w.]*', Text),
],
'punctuation': [
(r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
],
'keywords': [
(words(builtins_base, suffix=r'(?![\w. =])'),
Keyword.Pseudo),
(r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
r'(?![\w.])',
Keyword.Reserved),
(r'(array|category|character|complex|double|function|integer|list|'
r'logical|matrix|numeric|vector|data.frame|c)'
r'(?![\w.])',
Keyword.Type),
(r'(library|require|attach|detach|source)'
r'(?![\w.])',
Keyword.Namespace)
],
'operators': [
(r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
],
'builtin_symbols': [
(r'(NULL|NA(_(integer|real|complex|character)_)?|'
r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
r'(?![\w.])',
Keyword.Constant),
(r'(T|F)\b', Name.Builtin.Pseudo),
],
'numbers': [
# hex number
(r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
# decimal number
(r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
Number),
],
'statements': [
include('comments'),
# whitespaces
(r'\s+', Text),
(r'`.*?`', String.Backtick),
(r'\'', String, 'string_squote'),
(r'\"', String, 'string_dquote'),
include('builtin_symbols'),
include('numbers'),
include('keywords'),
include('punctuation'),
include('operators'),
include('valid_name'),
],
'root': [
include('statements'),
# blocks:
(r'\{|\}', Punctuation),
# (r'\{', Punctuation, 'block'),
(r'.', Text),
],
# 'block': [
# include('statements'),
# ('\{', Punctuation, '#push'),
# ('\}', Punctuation, '#pop')
# ],
'string_squote': [
(r'([^\'\\]|\\.)*\'', String, '#pop'),
],
'string_dquote': [
(r'([^"\\]|\\.)*"', String, '#pop'),
],
}
def analyse_text(text):
if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
return 0.11
class RdLexer(RegexLexer):
"""
Pygments Lexer for R documentation (Rd) files
This is a very minimal implementation, highlighting little more
than the macros. A description of Rd syntax is found in `Writing R
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
.. versionadded:: 1.6
"""
name = 'Rd'
aliases = ['rd']
filenames = ['*.Rd']
mimetypes = ['text/x-r-doc']
# To account for verbatim / LaTeX-like / and R-like areas
# would require parsing.
tokens = {
'root': [
# catch escaped brackets and percent sign
(r'\\[\\{}%]', String.Escape),
# comments
(r'%.*$', Comment),
# special macros with no arguments
(r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
# macros
(r'\\[a-zA-Z]+\b', Keyword),
# special preprocessor macros
(r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
# non-escaped brackets
(r'[{}]', Name.Builtin),
# everything else
(r'[^\\%\n{}]+', Text),
(r'.', Text),
]
}
| mit |
danakj/chromium | tools/perf/page_sets/tough_layout_cases.py | 5 | 1694 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import cache_temperature as cache_temperature_module
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
class ToughLayoutCasesPage(page_module.Page):
def __init__(self, url, page_set, cache_temperature=None):
super(ToughLayoutCasesPage, self).__init__(
url=url, page_set=page_set, credentials_path = 'data/credentials.json',
shared_page_state_class=shared_page_state.SharedDesktopPageState,
cache_temperature=cache_temperature)
self.archive_data_file = 'data/tough_layout_cases.json'
class ToughLayoutCasesPageSet(story.StorySet):
"""
The slowest layouts observed in the alexa top 1 million sites in July 2013.
"""
def __init__(self, cache_temperatures=None):
super(ToughLayoutCasesPageSet, self).__init__(
archive_data_file='data/tough_layout_cases.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
if cache_temperatures is None:
cache_temperatures = [cache_temperature_module.ANY]
urls_list = [
'http://oilevent.com',
'http://www.muzoboss.ru',
'http://natunkantha.com',
'http://www.mossiella.com',
'http://bookish.com',
'http://mydiyclub.com',
'http://amarchoti.blogspot.com',
'http://picarisimo.es',
'http://chinaapache.com',
'http://indoritel.com'
]
for url in urls_list:
for temp in cache_temperatures:
self.AddStory(ToughLayoutCasesPage(url, self, cache_temperature=temp))
| bsd-3-clause |
victorzhao/miniblink49 | v8_5_1/tools/testrunner/local/pool_unittest.py | 27 | 1252 | #!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from pool import Pool
def Run(x):
if x == 10:
raise Exception("Expected exception triggered by test.")
return x
class PoolTest(unittest.TestCase):
def testNormal(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]):
results.add(result.value)
self.assertEquals(set(range(0, 10)), results)
def testException(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 12)]):
# Item 10 will not appear in results due to an internal exception.
results.add(result.value)
expect = set(range(0, 12))
expect.remove(10)
self.assertEquals(expect, results)
def testAdd(self):
results = set()
pool = Pool(3)
for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]):
results.add(result.value)
if result.value < 30:
pool.add([result.value + 20])
self.assertEquals(set(range(0, 10) + range(20, 30) + range(40, 50)),
results)
| gpl-3.0 |
stevenmizuno/QGIS | tests/src/python/test_layer_dependencies.py | 22 | 11160 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsSnappingUtils (complement to C++-based tests)
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Hugo Mercier'
__date__ = '12/07/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
import os
from qgis.core import (QgsProject,
QgsVectorLayer,
QgsMapSettings,
QgsSnappingUtils,
QgsSnappingConfig,
QgsTolerance,
QgsRectangle,
QgsPointXY,
QgsFeature,
QgsGeometry,
QgsLayerDefinition,
QgsMapLayerDependency
)
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import QSize, QPoint
import tempfile
from qgis.utils import spatialite_connect
# Convenience instances in case you may need them
start_app()
class TestLayerDependencies(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# create a temp SpatiaLite db with a trigger
fo = tempfile.NamedTemporaryFile()
fn = fo.name
fo.close()
cls.fn = fn
con = spatialite_connect(fn)
cur = con.cursor()
cur.execute("SELECT InitSpatialMetadata(1)")
cur.execute("create table node(id integer primary key autoincrement);")
cur.execute("select AddGeometryColumn('node', 'geom', 4326, 'POINT');")
cur.execute("create table section(id integer primary key autoincrement, node1 integer, node2 integer);")
cur.execute("select AddGeometryColumn('section', 'geom', 4326, 'LINESTRING');")
cur.execute("create trigger add_nodes after insert on section begin insert into node (geom) values (st_startpoint(NEW.geom)); insert into node (geom) values (st_endpoint(NEW.geom)); end;")
cur.execute("insert into node (geom) values (geomfromtext('point(0 0)', 4326));")
cur.execute("insert into node (geom) values (geomfromtext('point(1 0)', 4326));")
cur.execute("create table node2(id integer primary key autoincrement);")
cur.execute("select AddGeometryColumn('node2', 'geom', 4326, 'POINT');")
cur.execute("create trigger add_nodes2 after insert on node begin insert into node2 (geom) values (st_translate(NEW.geom, 0.2, 0, 0)); end;")
con.commit()
con.close()
cls.pointsLayer = QgsVectorLayer("dbname='%s' table=\"node\" (geom) sql=" % fn, "points", "spatialite")
assert (cls.pointsLayer.isValid())
cls.linesLayer = QgsVectorLayer("dbname='%s' table=\"section\" (geom) sql=" % fn, "lines", "spatialite")
assert (cls.linesLayer.isValid())
cls.pointsLayer2 = QgsVectorLayer("dbname='%s' table=\"node2\" (geom) sql=" % fn, "_points2", "spatialite")
assert (cls.pointsLayer2.isValid())
QgsProject.instance().addMapLayers([cls.pointsLayer, cls.linesLayer, cls.pointsLayer2])
# save the project file
fo = tempfile.NamedTemporaryFile()
fn = fo.name
fo.close()
cls.projectFile = fn
QgsProject.instance().setFileName(cls.projectFile)
QgsProject.instance().write()
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
pass
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
def test_resetSnappingIndex(self):
self.pointsLayer.setDependencies([])
self.linesLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
ms = QgsMapSettings()
ms.setOutputSize(QSize(100, 100))
ms.setExtent(QgsRectangle(0, 0, 1, 1))
self.assertTrue(ms.hasValidSettings())
u = QgsSnappingUtils()
u.setMapSettings(ms)
cfg = u.config()
cfg.setEnabled(True)
cfg.setMode(QgsSnappingConfig.AdvancedConfiguration)
cfg.setIndividualLayerSettings(self.pointsLayer,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
u.setConfig(cfg)
m = u.snapToMap(QPoint(95, 100))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(1, 0))
f = QgsFeature(self.linesLayer.fields())
f.setId(1)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,1 1)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
l1 = len([f for f in self.pointsLayer.getFeatures()])
self.assertEqual(l1, 4)
m = u.snapToMap(QPoint(95, 0))
# snapping not updated
self.pointsLayer.setDependencies([])
self.assertEqual(m.isValid(), False)
# set layer dependencies
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(2)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,0.5 0.5)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the snapped point is OK
m = u.snapToMap(QPoint(45, 50))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.5, 0.5))
self.pointsLayer.setDependencies([])
# test chained layer dependencies A -> B -> C
cfg.setIndividualLayerSettings(self.pointsLayer2,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
u.setConfig(cfg)
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
self.pointsLayer2.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())])
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(3)
geom = QgsGeometry.fromWkt("LINESTRING(0 0.2,0.5 0.8)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the second snapped point is OK
m = u.snapToMap(QPoint(75, 100 - 80))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.7, 0.8))
self.pointsLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
def test_cycleDetection(self):
self.assertTrue(self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())]))
self.assertFalse(self.linesLayer.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())]))
self.pointsLayer.setDependencies([])
self.linesLayer.setDependencies([])
def test_layerDefinitionRewriteId(self):
tmpfile = os.path.join(tempfile.tempdir, "test.qlr")
ltr = QgsProject.instance().layerTreeRoot()
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
QgsLayerDefinition.exportLayerDefinition(tmpfile, [ltr])
grp = ltr.addGroup("imported")
QgsLayerDefinition.loadLayerDefinition(tmpfile, QgsProject.instance(), grp)
newPointsLayer = None
newLinesLayer = None
for l in grp.findLayers():
if l.layerId().startswith('points'):
newPointsLayer = l.layer()
elif l.layerId().startswith('lines'):
newLinesLayer = l.layer()
self.assertIsNotNone(newPointsLayer)
self.assertIsNotNone(newLinesLayer)
self.assertTrue(newLinesLayer.id() in [dep.layerId() for dep in newPointsLayer.dependencies()])
self.pointsLayer.setDependencies([])
def test_signalConnection(self):
# remove all layers
QgsProject.instance().removeAllMapLayers()
# set dependencies and add back layers
self.pointsLayer = QgsVectorLayer("dbname='%s' table=\"node\" (geom) sql=" % self.fn, "points", "spatialite")
assert (self.pointsLayer.isValid())
self.linesLayer = QgsVectorLayer("dbname='%s' table=\"section\" (geom) sql=" % self.fn, "lines", "spatialite")
assert (self.linesLayer.isValid())
self.pointsLayer2 = QgsVectorLayer("dbname='%s' table=\"node2\" (geom) sql=" % self.fn, "_points2", "spatialite")
assert (self.pointsLayer2.isValid())
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
self.pointsLayer2.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())])
# this should update connections between layers
QgsProject.instance().addMapLayers([self.pointsLayer])
QgsProject.instance().addMapLayers([self.linesLayer])
QgsProject.instance().addMapLayers([self.pointsLayer2])
ms = QgsMapSettings()
ms.setOutputSize(QSize(100, 100))
ms.setExtent(QgsRectangle(0, 0, 1, 1))
self.assertTrue(ms.hasValidSettings())
u = QgsSnappingUtils()
u.setMapSettings(ms)
cfg = u.config()
cfg.setEnabled(True)
cfg.setMode(QgsSnappingConfig.AdvancedConfiguration)
cfg.setIndividualLayerSettings(self.pointsLayer,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
cfg.setIndividualLayerSettings(self.pointsLayer2,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.Vertex, 20, QgsTolerance.Pixels))
u.setConfig(cfg)
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(4)
geom = QgsGeometry.fromWkt("LINESTRING(0.5 0.2,0.6 0)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the second snapped point is OK
m = u.snapToMap(QPoint(75, 100 - 0))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.8, 0.0))
self.pointsLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
sunze/py_flask | venv/lib/python3.4/site-packages/celery/concurrency/asynpool.py | 6 | 46754 | # -*- coding: utf-8 -*-
"""
celery.concurrency.asynpool
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. note::
This module will be moved soon, so don't use it directly.
Non-blocking version of :class:`multiprocessing.Pool`.
This code deals with three major challenges:
1) Starting up child processes and keeping them running.
2) Sending jobs to the processes and receiving results back.
3) Safely shutting down this system.
"""
from __future__ import absolute_import
import errno
import os
import select
import socket
import struct
import sys
import time
from collections import deque, namedtuple
from io import BytesIO
from pickle import HIGHEST_PROTOCOL
from time import sleep
from weakref import WeakValueDictionary, ref
from amqp.utils import promise
from billiard.pool import RUN, TERMINATE, ACK, NACK, WorkersJoined
from billiard import pool as _pool
from billiard.compat import buf_t, setblocking, isblocking
from billiard.einfo import ExceptionInfo
from billiard.queues import _SimpleQueue
from kombu.async import READ, WRITE, ERR
from kombu.serialization import pickle as _pickle
from kombu.utils import fxrange
from kombu.utils.compat import get_errno
from kombu.utils.eventio import SELECT_BAD_FD
from celery.five import Counter, items, string_t, text_t, values
from celery.utils.log import get_logger
from celery.utils.text import truncate
from celery.worker import state as worker_state
try:
from _billiard import read as __read__
from struct import unpack_from as _unpack_from
memoryview = memoryview
readcanbuf = True
if sys.version_info[0] == 2 and sys.version_info < (2, 7, 6):
def unpack_from(fmt, view, _unpack_from=_unpack_from): # noqa
return _unpack_from(fmt, view.tobytes()) # <- memoryview
else:
# unpack_from supports memoryview in 2.7.6 and 3.3+
unpack_from = _unpack_from # noqa
except (ImportError, NameError): # pragma: no cover
def __read__(fd, buf, size, read=os.read): # noqa
chunk = read(fd, size)
n = len(chunk)
if n != 0:
buf.write(chunk)
return n
readcanbuf = False # noqa
def unpack_from(fmt, iobuf, unpack=struct.unpack): # noqa
return unpack(fmt, iobuf.getvalue()) # <-- BytesIO
logger = get_logger(__name__)
error, debug = logger.error, logger.debug
UNAVAIL = frozenset([errno.EAGAIN, errno.EINTR])
#: Constant sent by child process when started (ready to accept work)
WORKER_UP = 15
#: A process must have started before this timeout (in secs.) expires.
PROC_ALIVE_TIMEOUT = 4.0
SCHED_STRATEGY_PREFETCH = 1
SCHED_STRATEGY_FAIR = 4
SCHED_STRATEGIES = {
None: SCHED_STRATEGY_PREFETCH,
'fair': SCHED_STRATEGY_FAIR,
}
RESULT_MAXLEN = 128
Ack = namedtuple('Ack', ('id', 'fd', 'payload'))
def gen_not_started(gen):
# gi_frame is None when generator stopped.
return gen.gi_frame and gen.gi_frame.f_lasti == -1
def _get_job_writer(job):
try:
writer = job._writer
except AttributeError:
pass
else:
return writer() # is a weakref
def _select(readers=None, writers=None, err=None, timeout=0):
"""Simple wrapper to :class:`~select.select`.
:param readers: Set of reader fds to test if readable.
:param writers: Set of writer fds to test if writable.
:param err: Set of fds to test for error condition.
All fd sets passed must be mutable as this function
will remove non-working fds from them, this also means
the caller must make sure there are still fds in the sets
before calling us again.
:returns: tuple of ``(readable, writable, again)``, where
``readable`` is a set of fds that have data available for read,
``writable`` is a set of fds that is ready to be written to
and ``again`` is a flag that if set means the caller must
throw away the result and call us again.
"""
readers = set() if readers is None else readers
writers = set() if writers is None else writers
err = set() if err is None else err
try:
r, w, e = select.select(readers, writers, err, timeout)
if e:
r = list(set(r) | set(e))
return r, w, 0
except (select.error, socket.error) as exc:
if get_errno(exc) == errno.EINTR:
return [], [], 1
elif get_errno(exc) in SELECT_BAD_FD:
for fd in readers | writers | err:
try:
select.select([fd], [], [], 0)
except (select.error, socket.error) as exc:
if get_errno(exc) not in SELECT_BAD_FD:
raise
readers.discard(fd)
writers.discard(fd)
err.discard(fd)
return [], [], 1
else:
raise
def _repr_result(obj):
try:
return repr(obj)
except Exception as orig_exc:
try:
return text_t(obj)
except UnicodeDecodeError:
if isinstance(obj, string_t):
try:
return obj.decode('utf-8', errors='replace')
except Exception:
pass
return '<Unrepresentable: {0!r} (o.__repr__ returns unicode?)>'.format(
orig_exc,
)
class Worker(_pool.Worker):
"""Pool worker process."""
dead = False
def on_loop_start(self, pid):
# our version sends a WORKER_UP message when the process is ready
# to accept work, this will tell the parent that the inqueue fd
# is writable.
self.outq.put((WORKER_UP, (pid, )))
def prepare_result(self, result, maxlen=RESULT_MAXLEN, truncate=truncate):
if not isinstance(result, ExceptionInfo):
return truncate(_repr_result(result), maxlen)
return result
class ResultHandler(_pool.ResultHandler):
"""Handles messages from the pool processes."""
def __init__(self, *args, **kwargs):
self.fileno_to_outq = kwargs.pop('fileno_to_outq')
self.on_process_alive = kwargs.pop('on_process_alive')
super(ResultHandler, self).__init__(*args, **kwargs)
# add our custom message handler
self.state_handlers[WORKER_UP] = self.on_process_alive
def _recv_message(self, add_reader, fd, callback,
__read__=__read__, readcanbuf=readcanbuf,
BytesIO=BytesIO, unpack_from=unpack_from,
load=_pickle.load):
Hr = Br = 0
if readcanbuf:
buf = bytearray(4)
bufv = memoryview(buf)
else:
buf = bufv = BytesIO()
# header
while Hr < 4:
try:
n = __read__(
fd, bufv[Hr:] if readcanbuf else bufv, 4 - Hr,
)
except OSError as exc:
if get_errno(exc) not in UNAVAIL:
raise
yield
else:
if n == 0:
raise (OSError('End of file during message') if Hr
else EOFError())
Hr += n
body_size, = unpack_from('>i', bufv)
if readcanbuf:
buf = bytearray(body_size)
bufv = memoryview(buf)
else:
buf = bufv = BytesIO()
while Br < body_size:
try:
n = __read__(
fd, bufv[Br:] if readcanbuf else bufv, body_size - Br,
)
except OSError as exc:
if get_errno(exc) not in UNAVAIL:
raise
yield
else:
if n == 0:
raise (OSError('End of file during message') if Br
else EOFError())
Br += n
add_reader(fd, self.handle_event, fd)
if readcanbuf:
message = load(BytesIO(bufv))
else:
bufv.seek(0)
message = load(bufv)
if message:
callback(message)
def _make_process_result(self, hub):
"""Coroutine that reads messages from the pool processes
and calls the appropriate handler."""
fileno_to_outq = self.fileno_to_outq
on_state_change = self.on_state_change
add_reader = hub.add_reader
remove_reader = hub.remove_reader
recv_message = self._recv_message
def on_result_readable(fileno):
try:
fileno_to_outq[fileno]
except KeyError: # process gone
return remove_reader(fileno)
it = recv_message(add_reader, fileno, on_state_change)
try:
next(it)
except StopIteration:
pass
except (IOError, OSError, EOFError):
remove_reader(fileno)
else:
add_reader(fileno, it)
return on_result_readable
def register_with_event_loop(self, hub):
self.handle_event = self._make_process_result(hub)
def handle_event(self, fileno):
raise RuntimeError('Not registered with event loop')
def on_stop_not_started(self):
"""This method is always used to stop when the helper thread is not
started."""
cache = self.cache
check_timeouts = self.check_timeouts
fileno_to_outq = self.fileno_to_outq
on_state_change = self.on_state_change
join_exited_workers = self.join_exited_workers
# flush the processes outqueues until they have all terminated.
outqueues = set(fileno_to_outq)
while cache and outqueues and self._state != TERMINATE:
if check_timeouts is not None:
# make sure tasks with a time limit will time out.
check_timeouts()
# cannot iterate and remove at the same time
pending_remove_fd = set()
for fd in outqueues:
self._flush_outqueue(
fd, pending_remove_fd.discard, fileno_to_outq,
on_state_change,
)
try:
join_exited_workers(shutdown=True)
except WorkersJoined:
return debug('result handler: all workers terminated')
outqueues.difference_update(pending_remove_fd)
def _flush_outqueue(self, fd, remove, process_index, on_state_change):
try:
proc = process_index[fd]
except KeyError:
# process already found terminated
# which means its outqueue has already been processed
# by the worker lost handler.
return remove(fd)
reader = proc.outq._reader
try:
setblocking(reader, 1)
except (OSError, IOError):
return remove(fd)
try:
if reader.poll(0):
task = reader.recv()
else:
task = None
sleep(0.5)
except (IOError, EOFError):
return remove(fd)
else:
if task:
on_state_change(task)
finally:
try:
setblocking(reader, 0)
except (OSError, IOError):
return remove(fd)
class AsynPool(_pool.Pool):
"""Pool version that uses AIO instead of helper threads."""
ResultHandler = ResultHandler
Worker = Worker
def __init__(self, processes=None, synack=False,
sched_strategy=None, *args, **kwargs):
self.sched_strategy = SCHED_STRATEGIES.get(sched_strategy,
sched_strategy)
processes = self.cpu_count() if processes is None else processes
self.synack = synack
# create queue-pairs for all our processes in advance.
self._queues = dict((self.create_process_queues(), None)
for _ in range(processes))
# inqueue fileno -> process mapping
self._fileno_to_inq = {}
# outqueue fileno -> process mapping
self._fileno_to_outq = {}
# synqueue fileno -> process mapping
self._fileno_to_synq = {}
# We keep track of processes that have not yet
# sent a WORKER_UP message. If a process fails to send
# this message within proc_up_timeout we terminate it
# and hope the next process will recover.
self._proc_alive_timeout = PROC_ALIVE_TIMEOUT
self._waiting_to_start = set()
# denormalized set of all inqueues.
self._all_inqueues = set()
# Set of fds being written to (busy)
self._active_writes = set()
# Set of active co-routines currently writing jobs.
self._active_writers = set()
# Set of fds that are busy (executing task)
self._busy_workers = set()
self._mark_worker_as_available = self._busy_workers.discard
# Holds jobs waiting to be written to child processes.
self.outbound_buffer = deque()
self.write_stats = Counter()
super(AsynPool, self).__init__(processes, *args, **kwargs)
for proc in self._pool:
# create initial mappings, these will be updated
# as processes are recycled, or found lost elsewhere.
self._fileno_to_outq[proc.outqR_fd] = proc
self._fileno_to_synq[proc.synqW_fd] = proc
self.on_soft_timeout = self._timeout_handler.on_soft_timeout
self.on_hard_timeout = self._timeout_handler.on_hard_timeout
def _event_process_exit(self, hub, fd):
# This method is called whenever the process sentinel is readable.
hub.remove(fd)
self.maintain_pool()
def register_with_event_loop(self, hub):
"""Registers the async pool with the current event loop."""
self._result_handler.register_with_event_loop(hub)
self.handle_result_event = self._result_handler.handle_event
self._create_timelimit_handlers(hub)
self._create_process_handlers(hub)
self._create_write_handlers(hub)
# Add handler for when a process exits (calls maintain_pool)
[hub.add_reader(fd, self._event_process_exit, hub, fd)
for fd in self.process_sentinels]
# Handle_result_event is called whenever one of the
# result queues are readable.
[hub.add_reader(fd, self.handle_result_event, fd)
for fd in self._fileno_to_outq]
# Timers include calling maintain_pool at a regular interval
# to be certain processes are restarted.
for handler, interval in items(self.timers):
hub.call_repeatedly(interval, handler)
hub.on_tick.add(self.on_poll_start)
def _create_timelimit_handlers(self, hub, now=time.time):
"""For async pool this sets up the handlers used
to implement time limits."""
call_later = hub.call_later
trefs = self._tref_for_id = WeakValueDictionary()
def on_timeout_set(R, soft, hard):
if soft:
trefs[R._job] = call_later(
soft, self._on_soft_timeout, R._job, soft, hard, hub,
)
elif hard:
trefs[R._job] = call_later(
hard, self._on_hard_timeout, R._job,
)
self.on_timeout_set = on_timeout_set
def _discard_tref(job):
try:
tref = trefs.pop(job)
tref.cancel()
del(tref)
except (KeyError, AttributeError):
pass # out of scope
self._discard_tref = _discard_tref
def on_timeout_cancel(R):
_discard_tref(R._job)
self.on_timeout_cancel = on_timeout_cancel
def _on_soft_timeout(self, job, soft, hard, hub, now=time.time):
# only used by async pool.
if hard:
self._tref_for_id[job] = hub.call_at(
now() + (hard - soft), self._on_hard_timeout, job,
)
try:
result = self._cache[job]
except KeyError:
pass # job ready
else:
self.on_soft_timeout(result)
finally:
if not hard:
# remove tref
self._discard_tref(job)
def _on_hard_timeout(self, job):
# only used by async pool.
try:
result = self._cache[job]
except KeyError:
pass # job ready
else:
self.on_hard_timeout(result)
finally:
# remove tref
self._discard_tref(job)
def on_job_ready(self, job, i, obj, inqW_fd):
self._mark_worker_as_available(inqW_fd)
def _create_process_handlers(self, hub, READ=READ, ERR=ERR):
"""For async pool this will create the handlers called
when a process is up/down and etc."""
add_reader, remove_reader, remove_writer = (
hub.add_reader, hub.remove_reader, hub.remove_writer,
)
cache = self._cache
all_inqueues = self._all_inqueues
fileno_to_inq = self._fileno_to_inq
fileno_to_outq = self._fileno_to_outq
fileno_to_synq = self._fileno_to_synq
busy_workers = self._busy_workers
event_process_exit = self._event_process_exit
handle_result_event = self.handle_result_event
process_flush_queues = self.process_flush_queues
waiting_to_start = self._waiting_to_start
def verify_process_alive(proc):
if proc._is_alive() and proc in waiting_to_start:
assert proc.outqR_fd in fileno_to_outq
assert fileno_to_outq[proc.outqR_fd] is proc
assert proc.outqR_fd in hub.readers
error('Timed out waiting for UP message from %r', proc)
os.kill(proc.pid, 9)
def on_process_up(proc):
"""Called when a process has started."""
# If we got the same fd as a previous process then we will also
# receive jobs in the old buffer, so we need to reset the
# job._write_to and job._scheduled_for attributes used to recover
# message boundaries when processes exit.
infd = proc.inqW_fd
for job in values(cache):
if job._write_to and job._write_to.inqW_fd == infd:
job._write_to = proc
if job._scheduled_for and job._scheduled_for.inqW_fd == infd:
job._scheduled_for = proc
fileno_to_outq[proc.outqR_fd] = proc
# maintain_pool is called whenever a process exits.
add_reader(
proc.sentinel, event_process_exit, hub, proc.sentinel,
)
assert not isblocking(proc.outq._reader)
# handle_result_event is called when the processes outqueue is
# readable.
add_reader(proc.outqR_fd, handle_result_event, proc.outqR_fd)
waiting_to_start.add(proc)
hub.call_later(
self._proc_alive_timeout, verify_process_alive, proc,
)
self.on_process_up = on_process_up
def _remove_from_index(obj, proc, index, remove_fun, callback=None):
# this remove the file descriptors for a process from
# the indices. we have to make sure we don't overwrite
# another processes fds, as the fds may be reused.
try:
fd = obj.fileno()
except (IOError, OSError):
return
try:
if index[fd] is proc:
# fd has not been reused so we can remove it from index.
index.pop(fd, None)
except KeyError:
pass
else:
remove_fun(fd)
if callback is not None:
callback(fd)
return fd
def on_process_down(proc):
"""Called when a worker process exits."""
if proc.dead:
return
process_flush_queues(proc)
_remove_from_index(
proc.outq._reader, proc, fileno_to_outq, remove_reader,
)
if proc.synq:
_remove_from_index(
proc.synq._writer, proc, fileno_to_synq, remove_writer,
)
inq = _remove_from_index(
proc.inq._writer, proc, fileno_to_inq, remove_writer,
callback=all_inqueues.discard,
)
if inq:
busy_workers.discard(inq)
remove_reader(proc.sentinel)
waiting_to_start.discard(proc)
self._active_writes.discard(proc.inqW_fd)
remove_writer(proc.inqW_fd)
remove_reader(proc.outqR_fd)
if proc.synqR_fd:
remove_reader(proc.synqR_fd)
if proc.synqW_fd:
self._active_writes.discard(proc.synqW_fd)
remove_reader(proc.synqW_fd)
self.on_process_down = on_process_down
def _create_write_handlers(self, hub,
pack=struct.pack, dumps=_pickle.dumps,
protocol=HIGHEST_PROTOCOL):
"""For async pool this creates the handlers used to write data to
child processes."""
fileno_to_inq = self._fileno_to_inq
fileno_to_synq = self._fileno_to_synq
outbound = self.outbound_buffer
pop_message = outbound.popleft
append_message = outbound.append
put_back_message = outbound.appendleft
all_inqueues = self._all_inqueues
active_writes = self._active_writes
active_writers = self._active_writers
busy_workers = self._busy_workers
diff = all_inqueues.difference
add_writer = hub.add_writer
hub_add, hub_remove = hub.add, hub.remove
mark_write_fd_as_active = active_writes.add
mark_write_gen_as_active = active_writers.add
mark_worker_as_busy = busy_workers.add
write_generator_done = active_writers.discard
get_job = self._cache.__getitem__
write_stats = self.write_stats
is_fair_strategy = self.sched_strategy == SCHED_STRATEGY_FAIR
revoked_tasks = worker_state.revoked
getpid = os.getpid
precalc = {ACK: self._create_payload(ACK, (0, )),
NACK: self._create_payload(NACK, (0, ))}
def _put_back(job, _time=time.time):
# puts back at the end of the queue
if job._terminated is not None or \
job.correlation_id in revoked_tasks:
if not job._accepted:
job._ack(None, _time(), getpid(), None)
job._set_terminated(job._terminated)
else:
# XXX linear lookup, should find a better way,
# but this happens rarely and is here to protect against races.
if job not in outbound:
outbound.appendleft(job)
self._put_back = _put_back
# called for every event loop iteration, and if there
# are messages pending this will schedule writing one message
# by registering the 'schedule_writes' function for all currently
# inactive inqueues (not already being written to)
# consolidate means the event loop will merge them
# and call the callback once with the list writable fds as
# argument. Using this means we minimize the risk of having
# the same fd receive every task if the pipe read buffer is not
# full.
if is_fair_strategy:
def on_poll_start():
if outbound and len(busy_workers) < len(all_inqueues):
inactive = diff(active_writes)
[hub_add(fd, None, WRITE | ERR, consolidate=True)
for fd in inactive]
else:
[hub_remove(fd) for fd in diff(active_writes)]
else:
def on_poll_start(): # noqa
if outbound:
[hub_add(fd, None, WRITE | ERR, consolidate=True)
for fd in diff(active_writes)]
else:
[hub_remove(fd) for fd in diff(active_writes)]
self.on_poll_start = on_poll_start
def on_inqueue_close(fd, proc):
# Makes sure the fd is removed from tracking when
# the connection is closed, this is essential as fds may be reused.
busy_workers.discard(fd)
try:
if fileno_to_inq[fd] is proc:
fileno_to_inq.pop(fd, None)
active_writes.discard(fd)
all_inqueues.discard(fd)
hub_remove(fd)
except KeyError:
pass
self.on_inqueue_close = on_inqueue_close
def schedule_writes(ready_fds, curindex=[0]):
# Schedule write operation to ready file descriptor.
# The file descriptor is writeable, but that does not
# mean the process is currently reading from the socket.
# The socket is buffered so writeable simply means that
# the buffer can accept at least 1 byte of data.
# This means we have to cycle between the ready fds.
# the first version used shuffle, but using i % total
# is about 30% faster with many processes. The latter
# also shows more fairness in write stats when used with
# many processes [XXX On OS X, this may vary depending
# on event loop implementation (i.e select vs epoll), so
# have to test further]
total = len(ready_fds)
for i in range(total):
ready_fd = ready_fds[curindex[0] % total]
if ready_fd in active_writes:
# already writing to this fd
curindex[0] += 1
continue
if is_fair_strategy and ready_fd in busy_workers:
# worker is already busy with another task
curindex[0] += 1
continue
if ready_fd not in all_inqueues:
hub_remove(ready_fd)
curindex[0] += 1
continue
try:
job = pop_message()
except IndexError:
# no more messages, remove all inactive fds from the hub.
# this is important since the fds are always writeable
# as long as there's 1 byte left in the buffer, and so
# this may create a spinloop where the event loop
# always wakes up.
for inqfd in diff(active_writes):
hub_remove(inqfd)
break
else:
if not job._accepted: # job not accepted by another worker
try:
# keep track of what process the write operation
# was scheduled for.
proc = job._scheduled_for = fileno_to_inq[ready_fd]
except KeyError:
# write was scheduled for this fd but the process
# has since exited and the message must be sent to
# another process.
put_back_message(job)
curindex[0] += 1
continue
cor = _write_job(proc, ready_fd, job)
job._writer = ref(cor)
mark_write_gen_as_active(cor)
mark_write_fd_as_active(ready_fd)
mark_worker_as_busy(ready_fd)
# Try to write immediately, in case there's an error.
try:
next(cor)
except StopIteration:
pass
except OSError as exc:
if get_errno(exc) != errno.EBADF:
raise
else:
add_writer(ready_fd, cor)
curindex[0] += 1
hub.consolidate_callback = schedule_writes
def send_job(tup):
# Schedule writing job request for when one of the process
# inqueues are writable.
body = dumps(tup, protocol=protocol)
body_size = len(body)
header = pack('>I', body_size)
# index 1,0 is the job ID.
job = get_job(tup[1][0])
job._payload = buf_t(header), buf_t(body), body_size
append_message(job)
self._quick_put = send_job
def on_not_recovering(proc, fd, job):
error('Process inqueue damaged: %r %r' % (proc, proc.exitcode))
if proc._is_alive():
proc.terminate()
hub.remove(fd)
self._put_back(job)
def _write_job(proc, fd, job):
# writes job to the worker process.
# Operation must complete if more than one byte of data
# was written. If the broker connection is lost
# and no data was written the operation shall be cancelled.
header, body, body_size = job._payload
errors = 0
try:
# job result keeps track of what process the job is sent to.
job._write_to = proc
send = proc.send_job_offset
Hw = Bw = 0
# write header
while Hw < 4:
try:
Hw += send(header, Hw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
# suspend until more data
errors += 1
if errors > 100:
on_not_recovering(proc, fd, job)
raise StopIteration()
yield
else:
errors = 0
# write body
while Bw < body_size:
try:
Bw += send(body, Bw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
# suspend until more data
errors += 1
if errors > 100:
on_not_recovering(proc, fd, job)
raise StopIteration()
yield
else:
errors = 0
finally:
hub_remove(fd)
write_stats[proc.index] += 1
# message written, so this fd is now available
active_writes.discard(fd)
write_generator_done(job._writer()) # is a weakref
def send_ack(response, pid, job, fd, WRITE=WRITE, ERR=ERR):
# Only used when synack is enabled.
# Schedule writing ack response for when the fd is writeable.
msg = Ack(job, fd, precalc[response])
callback = promise(write_generator_done)
cor = _write_ack(fd, msg, callback=callback)
mark_write_gen_as_active(cor)
mark_write_fd_as_active(fd)
callback.args = (cor, )
add_writer(fd, cor)
self.send_ack = send_ack
def _write_ack(fd, ack, callback=None):
# writes ack back to the worker if synack enabled.
# this operation *MUST* complete, otherwise
# the worker process will hang waiting for the ack.
header, body, body_size = ack[2]
try:
try:
proc = fileno_to_synq[fd]
except KeyError:
# process died, we can safely discard the ack at this
# point.
raise StopIteration()
send = proc.send_syn_offset
Hw = Bw = 0
# write header
while Hw < 4:
try:
Hw += send(header, Hw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
yield
# write body
while Bw < body_size:
try:
Bw += send(body, Bw)
except Exception as exc:
if get_errno(exc) not in UNAVAIL:
raise
# suspend until more data
yield
finally:
if callback:
callback()
# message written, so this fd is now available
active_writes.discard(fd)
def flush(self):
if self._state == TERMINATE:
return
# cancel all tasks that have not been accepted so that NACK is sent.
for job in values(self._cache):
if not job._accepted:
job._cancel()
# clear the outgoing buffer as the tasks will be redelivered by
# the broker anyway.
if self.outbound_buffer:
self.outbound_buffer.clear()
self.maintain_pool()
try:
# ...but we must continue writing the payloads we already started
# to keep message boundaries.
# The messages may be NACK'ed later if synack is enabled.
if self._state == RUN:
# flush outgoing buffers
intervals = fxrange(0.01, 0.1, 0.01, repeatlast=True)
owned_by = {}
for job in values(self._cache):
writer = _get_job_writer(job)
if writer is not None:
owned_by[writer] = job
while self._active_writers:
writers = list(self._active_writers)
for gen in writers:
if (gen.__name__ == '_write_job' and
gen_not_started(gen)):
# has not started writing the job so can
# discard the task, but we must also remove
# it from the Pool._cache.
try:
job = owned_by[gen]
except KeyError:
pass
else:
# removes from Pool._cache
job.discard()
self._active_writers.discard(gen)
else:
try:
job = owned_by[gen]
except KeyError:
pass
else:
job_proc = job._write_to
if job_proc._is_alive():
self._flush_writer(job_proc, gen)
# workers may have exited in the meantime.
self.maintain_pool()
sleep(next(intervals)) # don't busyloop
finally:
self.outbound_buffer.clear()
self._active_writers.clear()
self._active_writes.clear()
self._busy_workers.clear()
def _flush_writer(self, proc, writer):
fds = set([proc.inq._writer])
try:
while fds:
if not proc._is_alive():
break # process exited
readable, writable, again = _select(
writers=fds, err=fds, timeout=0.5,
)
if not again and (writable or readable):
try:
next(writer)
except (StopIteration, OSError, IOError, EOFError):
break
finally:
self._active_writers.discard(writer)
def get_process_queues(self):
"""Get queues for a new process.
Here we will find an unused slot, as there should always
be one available when we start a new process.
"""
return next(q for q, owner in items(self._queues)
if owner is None)
def on_grow(self, n):
"""Grow the pool by ``n`` proceses."""
diff = max(self._processes - len(self._queues), 0)
if diff:
self._queues.update(
dict((self.create_process_queues(), None) for _ in range(diff))
)
def on_shrink(self, n):
"""Shrink the pool by ``n`` processes."""
pass
def create_process_queues(self):
"""Creates new in, out (and optionally syn) queues,
returned as a tuple."""
# NOTE: Pipes must be set O_NONBLOCK at creation time (the original
# fd), otherwise it will not be possible to change the flags until
# there is an actual reader/writer on the other side.
inq = _SimpleQueue(wnonblock=True)
outq = _SimpleQueue(rnonblock=True)
synq = None
assert isblocking(inq._reader)
assert not isblocking(inq._writer)
assert not isblocking(outq._reader)
assert isblocking(outq._writer)
if self.synack:
synq = _SimpleQueue(wnonblock=True)
assert isblocking(synq._reader)
assert not isblocking(synq._writer)
return inq, outq, synq
def on_process_alive(self, pid):
"""Handler called when the :const:`WORKER_UP` message is received
from a child process, which marks the process as ready
to receive work."""
try:
proc = next(w for w in self._pool if w.pid == pid)
except StopIteration:
return logger.warning('process with pid=%s already exited', pid)
assert proc.inqW_fd not in self._fileno_to_inq
assert proc.inqW_fd not in self._all_inqueues
self._waiting_to_start.discard(proc)
self._fileno_to_inq[proc.inqW_fd] = proc
self._fileno_to_synq[proc.synqW_fd] = proc
self._all_inqueues.add(proc.inqW_fd)
def on_job_process_down(self, job, pid_gone):
"""Handler called for each job when the process it was assigned to
exits."""
if job._write_to and not job._write_to._is_alive():
# job was partially written
self.on_partial_read(job, job._write_to)
elif job._scheduled_for and not job._scheduled_for._is_alive():
# job was only scheduled to be written to this process,
# but no data was sent so put it back on the outbound_buffer.
self._put_back(job)
def on_job_process_lost(self, job, pid, exitcode):
"""Handler called for each *started* job when the process it
was assigned to exited by mysterious means (error exitcodes and
signals)"""
self.mark_as_worker_lost(job, exitcode)
def human_write_stats(self):
if self.write_stats is None:
return 'N/A'
vals = list(values(self.write_stats))
total = sum(vals)
def per(v, total):
return '{0:.2f}%'.format((float(v) / total) * 100.0 if v else 0)
return {
'total': total,
'avg': per(total / len(self.write_stats) if total else 0, total),
'all': ', '.join(per(v, total) for v in vals),
'raw': ', '.join(map(str, vals)),
'inqueues': {
'total': len(self._all_inqueues),
'active': len(self._active_writes),
}
}
def _process_cleanup_queues(self, proc):
"""Handler called to clean up a processes queues after process
exit."""
if not proc.dead:
try:
self._queues[self._find_worker_queues(proc)] = None
except (KeyError, ValueError):
pass
@staticmethod
def _stop_task_handler(task_handler):
"""Called at shutdown to tell processes that we are shutting down."""
for proc in task_handler.pool:
try:
setblocking(proc.inq._writer, 1)
except (OSError, IOError):
pass
else:
try:
proc.inq.put(None)
except OSError as exc:
if get_errno(exc) != errno.EBADF:
raise
def create_result_handler(self):
return super(AsynPool, self).create_result_handler(
fileno_to_outq=self._fileno_to_outq,
on_process_alive=self.on_process_alive,
)
def _process_register_queues(self, proc, queues):
"""Marks new ownership for ``queues`` so that the fileno indices are
updated."""
assert queues in self._queues
b = len(self._queues)
self._queues[queues] = proc
assert b == len(self._queues)
def _find_worker_queues(self, proc):
"""Find the queues owned by ``proc``."""
try:
return next(q for q, owner in items(self._queues)
if owner == proc)
except StopIteration:
raise ValueError(proc)
def _setup_queues(self):
# this is only used by the original pool which uses a shared
# queue for all processes.
# these attributes makes no sense for us, but we will still
# have to initialize them.
self._inqueue = self._outqueue = \
self._quick_put = self._quick_get = self._poll_result = None
def process_flush_queues(self, proc):
"""Flushes all queues, including the outbound buffer, so that
all tasks that have not been started will be discarded.
In Celery this is called whenever the transport connection is lost
(consumer restart).
"""
resq = proc.outq._reader
on_state_change = self._result_handler.on_state_change
fds = set([resq])
while fds and not resq.closed and self._state != TERMINATE:
readable, _, again = _select(fds, None, fds, timeout=0.01)
if readable:
try:
task = resq.recv()
except (OSError, IOError, EOFError) as exc:
if get_errno(exc) == errno.EINTR:
continue
elif get_errno(exc) == errno.EAGAIN:
break
else:
debug('got %r while flushing process %r',
exc, proc, exc_info=1)
if get_errno(exc) not in UNAVAIL:
debug('got %r while flushing process %r',
exc, proc, exc_info=1)
break
else:
if task is None:
debug('got sentinel while flushing process %r', proc)
break
else:
on_state_change(task)
else:
break
def on_partial_read(self, job, proc):
"""Called when a job was only partially written to a child process
and it exited."""
# worker terminated by signal:
# we cannot reuse the sockets again, because we don't know if
# the process wrote/read anything frmo them, and if so we cannot
# restore the message boundaries.
if not job._accepted:
# job was not acked, so find another worker to send it to.
self._put_back(job)
writer = _get_job_writer(job)
if writer:
self._active_writers.discard(writer)
del(writer)
if not proc.dead:
proc.dead = True
# Replace queues to avoid reuse
before = len(self._queues)
try:
queues = self._find_worker_queues(proc)
if self.destroy_queues(queues, proc):
self._queues[self.create_process_queues()] = None
except ValueError:
pass
assert len(self._queues) == before
def destroy_queues(self, queues, proc):
"""Destroy queues that can no longer be used, so that they
be replaced by new sockets."""
assert not proc._is_alive()
self._waiting_to_start.discard(proc)
removed = 1
try:
self._queues.pop(queues)
except KeyError:
removed = 0
try:
self.on_inqueue_close(queues[0]._writer.fileno(), proc)
except IOError:
pass
for queue in queues:
if queue:
for sock in (queue._reader, queue._writer):
if not sock.closed:
try:
sock.close()
except (IOError, OSError):
pass
return removed
def _create_payload(self, type_, args,
dumps=_pickle.dumps, pack=struct.pack,
protocol=HIGHEST_PROTOCOL):
body = dumps((type_, args), protocol=protocol)
size = len(body)
header = pack('>I', size)
return header, body, size
@classmethod
def _set_result_sentinel(cls, _outqueue, _pool):
# unused
pass
def _help_stuff_finish_args(self):
# Pool._help_stuff_finished is a classmethod so we have to use this
# trick to modify the arguments passed to it.
return (self._pool, )
@classmethod
def _help_stuff_finish(cls, pool):
debug(
'removing tasks from inqueue until task handler finished',
)
fileno_to_proc = {}
inqR = set()
for w in pool:
try:
fd = w.inq._reader.fileno()
inqR.add(fd)
fileno_to_proc[fd] = w
except IOError:
pass
while inqR:
readable, _, again = _select(inqR, timeout=0.5)
if again:
continue
if not readable:
break
for fd in readable:
fileno_to_proc[fd].inq._reader.recv()
sleep(0)
@property
def timers(self):
return {self.maintain_pool: 5.0}
| mit |
JieweiWei/googletest | test/gtest_catch_exceptions_test.py | 2139 | 9901 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's exception catching behavior.
This script invokes gtest_catch_exceptions_test_ and
gtest_catch_exceptions_ex_test_ (programs written with
Google Test) and verifies their output.
"""
__author__ = '[email protected] (Vlad Losev)'
import os
import gtest_test_utils
# Constants.
FLAG_PREFIX = '--gtest_'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'
FILTER_FLAG = FLAG_PREFIX + 'filter'
# Path to the gtest_catch_exceptions_ex_test_ binary, compiled with
# exceptions enabled.
EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_ex_test_')
# Path to the gtest_catch_exceptions_test_ binary, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_no_ex_test_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
TEST_LIST = gtest_test_utils.Subprocess(
[EXE_PATH, LIST_TESTS_FLAG], env=environ).output
SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
if SUPPORTS_SEH_EXCEPTIONS:
BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
[EX_EXE_PATH], env=environ).output
# The tests.
if SUPPORTS_SEH_EXCEPTIONS:
# pylint:disable-msg=C6302
class CatchSehExceptionsTest(gtest_test_utils.TestCase):
"""Tests exception-catching behavior."""
def TestSehExceptions(self, test_output):
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s constructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s destructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUp()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDown()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in the test body'
in test_output)
def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
self.TestSehExceptions(EX_BINARY_OUTPUT)
def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):
self.TestSehExceptions(BINARY_OUTPUT)
class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""Tests C++ exception-catching behavior.
Tests in this test case verify that:
* C++ exceptions are caught and logged as C++ (not SEH) exceptions
* Exception thrown affect the remainder of the test work flow in the
expected manner.
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s constructor'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInConstructorTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
EX_BINARY_OUTPUT):
def testCatchesCxxExceptionsInFixtureDestructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s destructor'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUpTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUpTestCase()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest test body '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTearDownTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDownTestCase()'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUp(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUp()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInSetUpTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
def testCatchesCxxExceptionsInTearDown(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDown()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTestBody(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in the test body'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesNonStdCxxExceptions(self):
self.assert_('Unknown C++ exception thrown in the test body'
in EX_BINARY_OUTPUT)
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
# cause tests to show pop-up windows there.
FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
# By default, Google Test doesn't catch the exceptions.
uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
[EX_EXE_PATH,
NO_CATCH_EXCEPTIONS_FLAG,
FITLER_OUT_SEH_TESTS_FLAG],
env=environ).output
self.assert_('Unhandled C++ exception terminating the program'
in uncaught_exceptions_ex_binary_output)
self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
vmturbo/nova | nova/tests/functional/regressions/test_bug_1522536.py | 4 | 2538 | # Copyright 2016 HPE, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import nova.scheduler.utils
import nova.servicegroup
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api import client
from nova.tests.unit import cast_as_call
import nova.tests.unit.image.fake
from nova.tests.unit import policy_fixture
class TestServerGet(test.TestCase):
REQUIRES_LOCKING = True
def setUp(self):
super(TestServerGet, self).setUp()
self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
# the image fake backend needed for image discovery
nova.tests.unit.image.fake.stub_out_image_service(self)
self.start_service('conductor')
self.flags(driver='chance_scheduler', group='scheduler')
self.start_service('scheduler')
self.compute = self.start_service('compute')
self.useFixture(cast_as_call.CastAsCall(self.stubs))
self.addCleanup(nova.tests.unit.image.fake.FakeImageService_reset)
self.image_id = self.api.get_images()[0]['id']
self.flavor_id = self.api.get_flavors()[0]['id']
def test_id_overlap(self):
"""Regression test for bug #1522536.
Before fixing this bug, getting a numeric id caused a 500
error because it treated the numeric value as the db index,
fetched the server, but then processing of extensions blew up.
Since we have fixed this bug it returns a 404, which is
expected. In future a 400 might be more appropriate.
"""
server = dict(name='server1',
imageRef=self.image_id,
flavorRef=self.flavor_id)
self.api.post_server({'server': server})
self.assertRaises(client.OpenStackApiNotFoundException,
self.api.get_server, 1)
| apache-2.0 |
akretion/odoo | addons/website_hr_recruitment/models/hr_recruitment.py | 10 | 2093 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from werkzeug import urls
from odoo import api, fields, models
from odoo.tools.translate import html_translate
class RecruitmentSource(models.Model):
_inherit = 'hr.recruitment.source'
url = fields.Char(compute='_compute_url', string='Url Parameters')
@api.one
@api.depends('source_id', 'source_id.name', 'job_id')
def _compute_url(self):
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
for source in self:
source.url = urls.url_join(base_url, "%s?%s" % (source.job_id.website_url,
urls.url_encode({
'utm_campaign': self.env.ref('hr_recruitment.utm_campaign_job').name,
'utm_medium': self.env.ref('utm.utm_medium_website').name,
'utm_source': source.source_id.name
})
))
class Applicant(models.Model):
_inherit = 'hr.applicant'
def website_form_input_filter(self, request, values):
if 'partner_name' in values:
values.setdefault('name', '%s\'s Application' % values['partner_name'])
return values
class Job(models.Model):
_name = 'hr.job'
_inherit = ['hr.job', 'website.seo.metadata', 'website.published.multi.mixin']
def _get_default_website_description(self):
default_description = self.env["ir.model.data"].xmlid_to_object("website_hr_recruitment.default_website_description")
return (default_description.render() if default_description else "")
website_description = fields.Html('Website description', translate=html_translate, sanitize_attributes=False, default=_get_default_website_description, prefetch=False)
@api.multi
def _compute_website_url(self):
super(Job, self)._compute_website_url()
for job in self:
job.website_url = "/jobs/detail/%s" % job.id
@api.multi
def set_open(self):
self.write({'website_published': False})
return super(Job, self).set_open()
| agpl-3.0 |
gregbuehler/ansible-modules-extras | packaging/pacman.py | 22 | 7083 | #!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2012, Afterburn <http://github.com/afterburn>
# (c) 2013, Aaron Bull Schaefer <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: pacman
short_description: Manage packages with I(pacman)
description:
- Manage packages with the I(pacman) package manager, which is used by
Arch Linux and its variants.
version_added: "1.0"
author: Afterburn
notes: []
requirements: []
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: false
default: null
state:
description:
- Desired state of the package.
required: false
default: "present"
choices: ["present", "absent"]
recurse:
description:
- When removing a package, also remove its dependencies, provided
that they are not required by other packages and were not
explicitly installed by a user.
required: false
default: "no"
choices: ["yes", "no"]
version_added: "1.3"
update_cache:
description:
- Whether or not to refresh the master package lists. This can be
run as part of a package installation or as a separate step.
required: false
default: "no"
choices: ["yes", "no"]
'''
EXAMPLES = '''
# Install package foo
- pacman: name=foo state=present
# Remove packages foo and bar
- pacman: name=foo,bar state=absent
# Recursively remove package baz
- pacman: name=baz state=absent recurse=yes
# Run the equivalent of "pacman -Syy" as a separate step
- pacman: update_cache=yes
'''
import json
import shlex
import os
import re
import sys
PACMAN_PATH = "/usr/bin/pacman"
def query_package(module, name, state="present"):
# pacman -Q returns 0 if the package is installed,
# 1 if it is not installed
if state == "present":
cmd = "pacman -Q %s" % (name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
return False
def update_package_db(module):
cmd = "pacman -Syy"
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
module.fail_json(msg="could not update package db")
def remove_packages(module, packages):
if module.params["recurse"]:
args = "Rs"
else:
args = "R"
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
cmd = "pacman -%s %s --noconfirm" % (args, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, packages, package_files):
install_c = 0
for i, package in enumerate(packages):
if query_package(module, package):
continue
if package_files[i]:
params = '-U %s' % package_files[i]
else:
params = '-S %s' % package
cmd = "pacman %s --noconfirm" % (params)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s" % (package))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already installed")
def check_packages(module, packages, state):
would_be_changed = []
for package in packages:
installed = query_package(module, package)
if ((state == "present" and not installed) or
(state == "absent" and installed)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
else:
module.exit_json(change=False, msg="package(s) already %s" % state)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['pkg']),
state = dict(default='present', choices=['present', 'installed', 'absent', 'removed']),
recurse = dict(default='no', choices=BOOLEANS, type='bool'),
update_cache = dict(default='no', aliases=['update-cache'], choices=BOOLEANS, type='bool')),
required_one_of = [['name', 'update_cache']],
supports_check_mode = True)
if not os.path.exists(PACMAN_PATH):
module.fail_json(msg="cannot find pacman, looking for %s" % (PACMAN_PATH))
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p["update_cache"] and not module.check_mode:
update_package_db(module)
if not p['name']:
module.exit_json(changed=True, msg='updated the package master lists')
if p['update_cache'] and module.check_mode and not p['name']:
module.exit_json(changed=True, msg='Would have updated the package cache')
if p['name']:
pkgs = p['name'].split(',')
pkg_files = []
for i, pkg in enumerate(pkgs):
if pkg.endswith('.pkg.tar.xz'):
# The package given is a filename, extract the raw pkg name from
# it and store the filename
pkg_files.append(pkg)
pkgs[i] = re.sub('-[0-9].*$', '', pkgs[i].split('/')[-1])
else:
pkg_files.append(None)
if module.check_mode:
check_packages(module, pkgs, p['state'])
if p['state'] == 'present':
install_packages(module, pkgs, pkg_files)
elif p['state'] == 'absent':
remove_packages(module, pkgs)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
JazzeYoung/VeryDeepAutoEncoder | pylearn2/train_extensions/window_flip.py | 41 | 7218 | """ TrainExtensions for doing random spatial windowing and flipping of an
image dataset on every epoch. TODO: fill out properly."""
import warnings
import numpy
from . import TrainExtension
from pylearn2.datasets.preprocessing import CentralWindow
from pylearn2.utils.exc import reraise_as
from pylearn2.utils.rng import make_np_rng
from pylearn2.utils import py_integer_types
try:
from ..utils._window_flip import random_window_and_flip_c01b
from ..utils._window_flip import random_window_and_flip_b01c
except ImportError:
reraise_as(ImportError("Import of Cython module failed. Please make sure "
"you have run 'python setup.py develop' in the "
"pylearn2 directory"))
__authors__ = "David Warde-Farley"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "David Warde-Farley"
__email__ = "wardefar@iro"
def _zero_pad(array, amount, axes=(1, 2)):
"""
Returns a copy of <array> with zero-filled padding around the margins.
The new array has the same dimensions as the input array, except for
the dimensions given by <axes>, which are increased by 2*<amount>.
Parameters
----------
array: numpy.ndarray
The array to zero-pad.
amount: int
The number of zeros to append to the beginning and end of each dimension
in <axes>. (That axis will grow by 2*<amount>).
axes: tuple
The dimensions to pad. These are indices, not axis names like the 0, 1
in ('b', 0, 1, 'c').
"""
if amount == 0:
return array
new_shape = []
slices = []
for i, s in enumerate(array.shape):
if i in axes:
new_shape.append(s + 2 * amount)
slices.append(slice(amount, -amount))
else:
new_shape.append(s)
slices.append(slice(None))
new_shape = tuple(new_shape)
slices = tuple(slices)
new_array = numpy.zeros(new_shape, dtype=array.dtype)
new_array[slices] = array
return new_array
class WindowAndFlip(TrainExtension):
"""
An extension that allows an image dataset to be flipped and
windowed after each epoch of training.
Parameters
----------
window_shape : WRITEME
randomize : list, optional
If specified, a list of Datasets to randomly window and
flip at each epoch.
randomize_once : list, optional
If specified, a list of Datasets to randomly window and
flip once at the start of training.
center : list, optional
If specified, a list of Datasets to centrally window
once at the start of training.
rng : numpy.random.RandomState object or seed, optional
A random number generator or seed used to create one.
Seeded deterministically by default.
pad_randomized : int, optional
Amount of padding to add to each side of the images
in `randomize` and `randomize_once`. Useful if you
want to do zero-padded windowing with `window_shape`
the actual size of the dataset, and validate/test on
full-size images instead of central patches. Default
is 0.
flip : bool, optional
Reflect images on the horizontal axis with probability
0.5. `True` by default.
"""
def __init__(self,
window_shape,
randomize=None,
randomize_once=None,
center=None,
rng=(2013, 2, 20),
pad_randomized=0,
flip=True):
self._window_shape = tuple(window_shape)
# Defined in setup(). A dict that maps Datasets in self._randomize and
# self._randomize_once to zero-padded versions of their topological
# views.
self._original = None
self._randomize = randomize if randomize else []
self._randomize_once = randomize_once if randomize_once else []
self._center = center if center else []
self._pad_randomized = pad_randomized
self._flip = flip
assert isinstance(self._randomize, list), (
"The 'randomize' parameter of WindowAndFlip should be a list")
assert isinstance(self._randomize_once, list), (
"The 'randomize_once' parameter of WindowAndFlip should be a list")
assert isinstance(self._center, list), (
"The 'center' parameter of WindowAndFlip should be a list")
assert isinstance(self._pad_randomized, py_integer_types), (
"The 'pad_randomized' parameter of WindowAndFlip should be an int")
if randomize is None and randomize_once is None and center is None:
warnings.warn(self.__class__.__name__ + " instantiated without "
"any dataset arguments, and therefore does nothing",
stacklevel=2)
self._rng = make_np_rng(rng, which_method="random_integers")
def setup(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
Notes
-----
`dataset` argument is ignored
"""
dataset = None
# Central windowing of auxiliary datasets (e.g. validation sets)
preprocessor = CentralWindow(self._window_shape)
for data in self._center:
preprocessor.apply(data)
#
# Do the initial random windowing
#
randomize_now = self._randomize + self._randomize_once
# maps each dataset in randomize_now to a zero-padded topological view
# of its data.
self._original = dict((data, _zero_pad(
data.get_topological_view().astype('float32'),
self._pad_randomized))
for data in randomize_now)
# For each dataset, for each image, extract a randomly positioned and
# potentially horizontal-flipped window
self.randomize_datasets(randomize_now)
def randomize_datasets(self, datasets):
"""
Applies random translations and flips to the selected datasets.
Parameters
----------
datasets : WRITEME
"""
for dataset in datasets:
if tuple(dataset.view_converter.axes) == ('c', 0, 1, 'b'):
wf_func = random_window_and_flip_c01b
elif tuple(dataset.view_converter.axes) == ('b', 0, 1, 'c'):
wf_func = random_window_and_flip_b01c
else:
raise ValueError("Axes of dataset is not supported: %s" %
(str(dataset.view_converter.axes)))
arr = wf_func(self._original[dataset],
self._window_shape,
rng=self._rng, flip=self._flip)
dataset.set_topological_view(arr, axes=dataset.view_converter.axes)
def on_monitor(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
Notes
-----
All arguments are ignored.
"""
model = None
dataset = None
algorithm = None
self.randomize_datasets(self._randomize)
| bsd-3-clause |
s7v7nislands/flask | tests/test_instance_config.py | 157 | 4365 | # -*- coding: utf-8 -*-
"""
tests.test_instance
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Flask Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pytest
import flask
from flask._compat import PY2
def test_explicit_instance_paths(modules_tmpdir):
with pytest.raises(ValueError) as excinfo:
flask.Flask(__name__, instance_path='instance')
assert 'must be absolute' in str(excinfo.value)
app = flask.Flask(__name__, instance_path=str(modules_tmpdir))
assert app.instance_path == str(modules_tmpdir)
def test_main_module_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.join('main_app.py')
app.write('import flask\n\napp = flask.Flask("__main__")')
purge_module('main_app')
from main_app import app
here = os.path.abspath(os.getcwd())
assert app.instance_path == os.path.join(here, 'instance')
def test_uninstalled_module_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.join('config_module_app.py').write(
'import os\n'
'import flask\n'
'here = os.path.abspath(os.path.dirname(__file__))\n'
'app = flask.Flask(__name__)\n'
)
purge_module('config_module_app')
from config_module_app import app
assert app.instance_path == str(modules_tmpdir.join('instance'))
def test_uninstalled_package_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.mkdir('config_package_app')
init = app.join('__init__.py')
init.write(
'import os\n'
'import flask\n'
'here = os.path.abspath(os.path.dirname(__file__))\n'
'app = flask.Flask(__name__)\n'
)
purge_module('config_package_app')
from config_package_app import app
assert app.instance_path == str(modules_tmpdir.join('instance'))
def test_installed_module_paths(modules_tmpdir, modules_tmpdir_prefix,
purge_module, site_packages, limit_loader):
site_packages.join('site_app.py').write(
'import flask\n'
'app = flask.Flask(__name__)\n'
)
purge_module('site_app')
from site_app import app
assert app.instance_path == \
modules_tmpdir.join('var').join('site_app-instance')
def test_installed_package_paths(limit_loader, modules_tmpdir,
modules_tmpdir_prefix, purge_module,
monkeypatch):
installed_path = modules_tmpdir.mkdir('path')
monkeypatch.syspath_prepend(installed_path)
app = installed_path.mkdir('installed_package')
init = app.join('__init__.py')
init.write('import flask\napp = flask.Flask(__name__)')
purge_module('installed_package')
from installed_package import app
assert app.instance_path == \
modules_tmpdir.join('var').join('installed_package-instance')
def test_prefix_package_paths(limit_loader, modules_tmpdir,
modules_tmpdir_prefix, purge_module,
site_packages):
app = site_packages.mkdir('site_package')
init = app.join('__init__.py')
init.write('import flask\napp = flask.Flask(__name__)')
purge_module('site_package')
import site_package
assert site_package.app.instance_path == \
modules_tmpdir.join('var').join('site_package-instance')
def test_egg_installed_paths(install_egg, modules_tmpdir,
modules_tmpdir_prefix):
modules_tmpdir.mkdir('site_egg').join('__init__.py').write(
'import flask\n\napp = flask.Flask(__name__)'
)
install_egg('site_egg')
try:
import site_egg
assert site_egg.app.instance_path == \
str(modules_tmpdir.join('var/').join('site_egg-instance'))
finally:
if 'site_egg' in sys.modules:
del sys.modules['site_egg']
@pytest.mark.skipif(not PY2, reason='This only works under Python 2.')
def test_meta_path_loader_without_is_package(request, modules_tmpdir):
app = modules_tmpdir.join('unimportable.py')
app.write('import flask\napp = flask.Flask(__name__)')
class Loader(object):
def find_module(self, name, path=None):
return self
sys.meta_path.append(Loader())
request.addfinalizer(sys.meta_path.pop)
with pytest.raises(AttributeError):
import unimportable
| bsd-3-clause |
canwe/NewsBlur | apps/feed_import/views.py | 5 | 12057 | import datetime
import pickle
import base64
import httplib2
from utils import log as logging
from oauth2client.client import OAuth2WebServerFlow, FlowExchangeError
from bson.errors import InvalidStringData
import uuid
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
# from django.db import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.contrib.auth import login as login_user
from django.shortcuts import render_to_response
from apps.reader.forms import SignupForm
from apps.reader.models import UserSubscription
from apps.feed_import.models import OAuthToken, GoogleReaderImporter
from apps.feed_import.models import OPMLImporter, OPMLExporter, UploadedOPML
from apps.feed_import.tasks import ProcessOPML, ProcessReaderImport, ProcessReaderStarredImport
from utils import json_functions as json
from utils.user_functions import ajax_login_required, get_user
from utils.feed_functions import TimeoutError
@ajax_login_required
def opml_upload(request):
xml_opml = None
message = "OK"
code = 1
payload = {}
if request.method == 'POST':
if 'file' in request.FILES:
logging.user(request, "~FR~SBOPML upload starting...")
file = request.FILES['file']
xml_opml = str(file.read().decode('utf-8', 'ignore'))
try:
UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml)
except (UnicodeDecodeError, InvalidStringData):
folders = None
code = -1
message = "There was a Unicode decode error when reading your OPML file."
opml_importer = OPMLImporter(xml_opml, request.user)
try:
folders = opml_importer.try_processing()
except TimeoutError:
folders = None
ProcessOPML.delay(request.user.pk)
feed_count = opml_importer.count_feeds_in_opml()
logging.user(request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count)
payload = dict(folders=folders, delayed=True, feed_count=feed_count)
code = 2
message = ""
except AttributeError:
code = -1
message = "OPML import failed. Couldn't parse XML file."
folders = None
if folders:
code = 1
feeds = UserSubscription.objects.filter(user=request.user).values()
payload = dict(folders=folders, feeds=feeds)
logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds)))
request.session['import_from_google_reader'] = False
else:
message = "Attach an .opml file."
code = -1
return HttpResponse(json.encode(dict(message=message, code=code, payload=payload)),
mimetype='text/html')
def opml_export(request):
user = get_user(request)
now = datetime.datetime.now()
if request.REQUEST.get('user_id') and user.is_staff:
user = User.objects.get(pk=request.REQUEST['user_id'])
exporter = OPMLExporter(user)
opml = exporter.process()
response = HttpResponse(opml, mimetype='text/xml')
response['Content-Disposition'] = 'attachment; filename=NewsBlur Subscriptions - %s - %s' % (
user.username,
now.strftime('%Y-%m-%d')
)
return response
def reader_authorize(request):
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
reader_importer = GoogleReaderImporter(request.user)
if reader_importer.test():
logging.user(request, "~BB~FW~SBSkipping Google Reader import, already tokened")
return render_to_response('social/social_connect.xhtml', {
}, context_instance=RequestContext(request))
domain = Site.objects.get_current().domain
STEP2_URI = "http://%s%s" % (
(domain + '.com') if not domain.endswith('.com') else domain,
reverse('google-reader-callback'),
)
FLOW = OAuth2WebServerFlow(
client_id=settings.GOOGLE_OAUTH2_CLIENTID,
client_secret=settings.GOOGLE_OAUTH2_SECRET,
scope="http://www.google.com/reader/api",
redirect_uri=STEP2_URI,
user_agent='NewsBlur Pro, www.newsblur.com',
approval_prompt="force",
)
logging.user(request, "~BB~FW~SBAuthorize Google Reader import - %s" % (
request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', ""),
))
authorize_url = FLOW.step1_get_authorize_url(redirect_uri=STEP2_URI)
response = render_to_response('social/social_connect.xhtml', {
'next': authorize_url,
}, context_instance=RequestContext(request))
# Save request token and delete old tokens
auth_token_dict = dict()
if request.user.is_authenticated():
OAuthToken.objects.filter(user=request.user).delete()
auth_token_dict['user'] = request.user
else:
OAuthToken.objects.filter(session_id=request.session.session_key).delete()
OAuthToken.objects.filter(remote_ip=ip).delete()
auth_token_dict['uuid'] = str(uuid.uuid4())
auth_token_dict['session_id'] = request.session.session_key
auth_token_dict['remote_ip'] = ip
OAuthToken.objects.create(**auth_token_dict)
response.set_cookie('newsblur_reader_uuid', str(uuid.uuid4()))
return response
def reader_callback(request):
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
domain = Site.objects.get_current().domain
STEP2_URI = "http://%s%s" % (
(domain + '.com') if not domain.endswith('.com') else domain,
reverse('google-reader-callback'),
)
FLOW = OAuth2WebServerFlow(
client_id=settings.GOOGLE_OAUTH2_CLIENTID,
client_secret=settings.GOOGLE_OAUTH2_SECRET,
scope="http://www.google.com/reader/api",
redirect_uri=STEP2_URI,
user_agent='NewsBlur Pro, www.newsblur.com',
)
FLOW.redirect_uri = STEP2_URI
http = httplib2.Http()
http.disable_ssl_certificate_validation = True
try:
credential = FLOW.step2_exchange(request.REQUEST)
except FlowExchangeError:
logging.info(" ***> [%s] Bad token from Google Reader." % (request.user,))
return render_to_response('social/social_connect.xhtml', {
'error': 'There was an error trying to import from Google Reader. Trying again will probably fix the issue.'
}, context_instance=RequestContext(request))
user_token = None
if request.user.is_authenticated():
user_token = OAuthToken.objects.filter(user=request.user).order_by('-created_date')
if not user_token:
user_uuid = request.COOKIES.get('newsblur_reader_uuid')
if user_uuid:
user_token = OAuthToken.objects.filter(uuid=user_uuid).order_by('-created_date')
if not user_token:
session = request.session
if session.session_key:
user_token = OAuthToken.objects.filter(session_id=request.session.session_key).order_by('-created_date')
if not user_token:
user_token = OAuthToken.objects.filter(remote_ip=ip).order_by('-created_date')
if user_token:
user_token = user_token[0]
user_token.credential = base64.b64encode(pickle.dumps(credential))
user_token.session_id = request.session.session_key
user_token.save()
# Fetch imported feeds on next page load
request.session['import_from_google_reader'] = True
logging.user(request, "~BB~FW~SBFinishing Google Reader import - %s" % ip)
if request.user.is_authenticated():
return render_to_response('social/social_connect.xhtml', {}, context_instance=RequestContext(request))
return HttpResponseRedirect(reverse('import-signup'))
@json.json_view
def import_from_google_reader(request):
code = 0
feed_count = 0
starred_count = 0
delayed = False
if request.user.is_authenticated():
reader_importer = GoogleReaderImporter(request.user)
auto_active = bool(request.REQUEST.get('auto_active') or False)
try:
code = reader_importer.try_import_feeds(auto_active=auto_active)
except TimeoutError:
ProcessReaderImport.delay(request.user.pk, auto_active=auto_active)
feed_count = UserSubscription.objects.filter(user=request.user).count()
logging.user(request, "~FR~SBGoogle Reader import took too long, found %s feeds. Tasking..." % feed_count)
delayed = True
code = 2
if 'import_from_google_reader' in request.session:
del request.session['import_from_google_reader']
feed_count = UserSubscription.objects.filter(user=request.user).count()
return dict(code=code, delayed=delayed, feed_count=feed_count, starred_count=starred_count)
@json.json_view
def import_starred_stories_from_google_reader(request):
code = 0
feed_count = 0
starred_count = 0
delayed = False
if request.user.is_authenticated():
reader_importer = GoogleReaderImporter(request.user)
try:
starred_count = reader_importer.try_import_starred_stories()
except TimeoutError:
ProcessReaderStarredImport.delay(request.user.pk)
feed_count = UserSubscription.objects.filter(user=request.user).count()
logging.user(request, "~FR~SBGoogle Reader starred stories import took too long, found %s feeds, %s stories. Tasking..." % (feed_count, starred_count))
delayed = True
code = 2
feed_count = UserSubscription.objects.filter(user=request.user).count()
return dict(code=code, delayed=delayed, feed_count=feed_count, starred_count=starred_count)
def import_signup(request):
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
if request.method == "POST":
signup_form = SignupForm(prefix='signup', data=request.POST)
if signup_form.is_valid():
new_user = signup_form.save()
user_token = OAuthToken.objects.filter(user=new_user)
if not user_token:
user_uuid = request.COOKIES.get('newsblur_reader_uuid')
if user_uuid:
user_token = OAuthToken.objects.filter(uuid=user_uuid).order_by('-created_date')
if not user_token:
if request.session.session_key:
user_token = OAuthToken.objects.filter(session_id=request.session.session_key).order_by('-created_date')
if not user_token:
user_token = OAuthToken.objects.filter(remote_ip=ip).order_by('-created_date')
if user_token:
user_token = user_token[0]
user_token.session_id = request.session.session_key
user_token.user = new_user
user_token.save()
login_user(request, new_user)
if request.user.profile.is_premium:
return HttpResponseRedirect(reverse('index'))
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
else:
logging.user(request, "~BR~FW ***> Can't find user token during import/signup. Re-authenticating...")
return HttpResponseRedirect(reverse('google-reader-authorize'))
else:
signup_form = SignupForm(prefix='signup')
return render_to_response('import/signup.xhtml', {
'signup_form': signup_form,
}, context_instance=RequestContext(request)) | mit |
kzvyahin/cfme_tests | utils/units.py | 1 | 3393 | # -*- coding: utf-8 -*-
import math
import re
# TODO: Split the 1000 and 1024 factor out. Now it is not an issue as it is used FOR COMPARISON ONLY
FACTOR = 1024
PREFIXES = ['', 'K', 'M', 'G', 'T', 'P']
FACTORS = {prefix: int(math.pow(FACTOR, i)) for i, prefix in enumerate(PREFIXES)}
UNITS = ['Byte', 'Bytes', 'B', 'b', 'Hz']
EQUAL_UNITS = {
'B': ('Byte', 'Bytes')
}
# Sanity check
for target_unit, units in EQUAL_UNITS.iteritems():
assert target_unit in UNITS
for unit in units:
assert unit in UNITS
REGEXP = re.compile(
r'^\s*(\d+(?:\.\d+)?)\s*({})?({})\s*$'.format('|'.join(PREFIXES), '|'.join(UNITS)))
class Unit(object):
"""This class serves for simple comparison of numbers that have units.
Imagine you pull a text value from the UI. 2 GB. By doing ``Unit.parse('2 GB')`` you get an
instance of :py:class:`Unit`, which is comparable.
You can compare two :py:class:`Unit` instances or you can compare :py:class:`Unit` with
:py:class:`int`, :py:class:`float` or any :py:class:`str` as long as it can go through the
:py:method:`Unit.parse`.
If you compare :py:class:`Unit` only (or a string that gets subsequently parsed), it also takes
the kind of the unit it is, you cannot compare bytes with hertzes. It then calculates the
absolute value in the base units and that gets compared.
If you compare with a number, it does it like it was the number of the same unit. So eg.
doing ``Unit.parse('2 GB') == 2 *1024 * 1024 * 1024 `` is True.
"""
__slots__ = ['number', 'prefix', 'unit_type']
@classmethod
def parse(cls, s):
s = str(s)
match = REGEXP.match(s)
if match is None:
raise ValueError('{} is not a proper value to be parsed!'.format(repr(s)))
number, prefix, unit_type = match.groups()
# Check if it isnt just an another name for another unit.
for target_unit, units in EQUAL_UNITS.iteritems():
if unit_type in units:
unit_type = target_unit
return cls(float(number), prefix, unit_type)
def __init__(self, number, prefix, unit_type):
self.number = float(number)
self.prefix = prefix
self.unit_type = unit_type
@property
def absolute(self):
return self.number * FACTORS[self.prefix]
def _as_same_unit(self, int_or_float):
return type(self)(int_or_float, PREFIXES[0], self.unit_type)
def __cmp__(self, other):
if isinstance(other, basestring):
other = self.parse(other)
elif isinstance(other, (int, float)):
other = self._as_same_unit(other)
elif not isinstance(other, Unit):
raise TypeError('Incomparable types {} and {}'.format(type(self), type(other)))
# other is instance of this class too now
if self.unit_type != other.unit_type:
raise TypeError('Incomparable units {} and {}'.format(self.unit_type, other.unit_type))
return cmp(self.absolute, other.absolute)
def __float__(self):
return self.absolute
def __int__(self):
return int(self.absolute)
def __repr__(self):
return '{}({}, {}, {})'.format(
type(self).__name__, repr(self.number), repr(self.prefix), repr(self.unit_type))
def __str__(self):
return '{} {}{}'.format(self.number, self.prefix, self.unit_type)
| gpl-2.0 |
ptitjes/quodlibet | quodlibet/ext/editing/resub.py | 2 | 1625 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import re
from gi.repository import Gtk, GObject
from quodlibet import _
from quodlibet.plugins.editing import RenameFilesPlugin, TagsFromPathPlugin
from quodlibet.util import connect_obj
from quodlibet.qltk import Icons
class RegExpSub(Gtk.HBox, RenameFilesPlugin, TagsFromPathPlugin):
PLUGIN_ID = "Regex Substitution"
PLUGIN_NAME = _("Regex Substitution")
PLUGIN_DESC = _("Allows arbitrary regex substitutions (s///) when "
"tagging or renaming files.")
PLUGIN_ICON = Icons.EDIT_FIND_REPLACE
__gsignals__ = {
"changed": (GObject.SignalFlags.RUN_LAST, None, ())
}
active = True
def __init__(self):
super(RegExpSub, self).__init__()
self._from = Gtk.Entry()
self._to = Gtk.Entry()
self.pack_start(Gtk.Label("s/"), True, True, 0)
self.pack_start(self._from, True, True, 0)
self.pack_start(Gtk.Label("/"), True, True, 0)
self.pack_start(self._to, True, True, 0)
self.pack_start(Gtk.Label("/"), True, True, 0)
connect_obj(self._from, 'changed', self.emit, 'changed')
connect_obj(self._to, 'changed', self.emit, 'changed')
def filter(self, orig_or_tag, value):
fr = self._from.get_text()
to = self._to.get_text()
try:
return re.sub(fr, to, value)
except:
return value
| gpl-2.0 |
pku9104038/edx-platform | common/djangoapps/student/migrations/0020_add_test_center_user.py | 188 | 15924 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TestCenterUser'
db.create_table('student_testcenteruser', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['auth.User'], unique=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('user_updated_at', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
('candidate_id', self.gf('django.db.models.fields.IntegerField')(null=True, db_index=True)),
('client_candidate_id', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, db_index=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('middle_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('suffix', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('salutation', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('address_1', self.gf('django.db.models.fields.CharField')(max_length=40)),
('address_2', self.gf('django.db.models.fields.CharField')(max_length=40, blank=True)),
('address_3', self.gf('django.db.models.fields.CharField')(max_length=40, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)),
('state', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=20, blank=True)),
('postal_code', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=16, blank=True)),
('country', self.gf('django.db.models.fields.CharField')(max_length=3, db_index=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=35)),
('extension', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=8, blank=True)),
('phone_country_code', self.gf('django.db.models.fields.CharField')(max_length=3, db_index=True)),
('fax', self.gf('django.db.models.fields.CharField')(max_length=35, blank=True)),
('fax_country_code', self.gf('django.db.models.fields.CharField')(max_length=3, blank=True)),
('company_name', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
))
db.send_create_signal('student', ['TestCenterUser'])
def backwards(self, orm):
# Deleting model 'TestCenterUser'
db.delete_table('student_testcenteruser')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.courseenrollment': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.testcenteruser': {
'Meta': {'object_name': 'TestCenterUser'},
'address_1': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'address_2': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'address_3': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'candidate_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'client_candidate_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'extension': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'fax_country_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'middle_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'phone_country_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'db_index': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'salutation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'unique': 'True'}),
'user_updated_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
| agpl-3.0 |
rickmendes/ansible-modules-extras | cloud/amazon/sns_topic.py | 33 | 13805 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: sns_topic
short_description: Manages AWS SNS topics and subscriptions
description:
- The M(sns_topic) module allows you to create, delete, and manage subscriptions for AWS SNS topics.
version_added: 2.0
author:
- "Joel Thompson (@joelthompson)"
- "Fernando Jose Pando (@nand0p)"
options:
name:
description:
- The name or ARN of the SNS topic to converge
required: True
state:
description:
- Whether to create or destroy an SNS topic
required: False
default: present
choices: ["absent", "present"]
display_name:
description:
- Display name of the topic
required: False
default: None
policy:
description:
- Policy to apply to the SNS topic
required: False
default: None
delivery_policy:
description:
- Delivery policy to apply to the SNS topic
required: False
default: None
subscriptions:
description:
- List of subscriptions to apply to the topic. Note that AWS requires
subscriptions to be confirmed, so you will need to confirm any new
subscriptions.
required: False
default: []
purge_subscriptions:
description:
- "Whether to purge any subscriptions not listed here. NOTE: AWS does not
allow you to purge any PendingConfirmation subscriptions, so if any
exist and would be purged, they are silently skipped. This means that
somebody could come back later and confirm the subscription. Sorry.
Blame Amazon."
required: False
default: True
extends_documentation_fragment: aws
requirements: [ "boto" ]
"""
EXAMPLES = """
- name: Create alarm SNS topic
sns_topic:
name: "alarms"
state: present
display_name: "alarm SNS topic"
delivery_policy:
http:
defaultHealthyRetryPolicy:
minDelayTarget: 2
maxDelayTarget: 4
numRetries: 3
numMaxDelayRetries: 5
backoffFunction: "<linear|arithmetic|geometric|exponential>"
disableSubscriptionOverrides: True
defaultThrottlePolicy:
maxReceivesPerSecond: 10
subscriptions:
- endpoint: "[email protected]"
protocol: "email"
- endpoint: "my_mobile_number"
protocol: "sms"
"""
RETURN = '''
sns_arn:
description: The ARN of the topic you are modifying
type: string
sample: "arn:aws:sns:us-east-1:123456789012:my_topic_name"
sns_topic:
description: Dict of sns topic details
type: dict
sample:
name: sns-topic-name
state: present
display_name: default
policy: {}
delivery_policy: {}
subscriptions_new: []
subscriptions_existing: []
subscriptions_deleted: []
subscriptions_added: []
subscriptions_purge': false
check_mode: false
topic_created: false
topic_deleted: false
attributes_set: []
'''
import sys
import time
import json
import re
try:
import boto.sns
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class SnsTopicManager(object):
""" Handles SNS Topic creation and destruction """
def __init__(self,
module,
name,
state,
display_name,
policy,
delivery_policy,
subscriptions,
purge_subscriptions,
check_mode,
region,
**aws_connect_params):
self.region = region
self.aws_connect_params = aws_connect_params
self.connection = self._get_boto_connection()
self.changed = False
self.module = module
self.name = name
self.state = state
self.display_name = display_name
self.policy = policy
self.delivery_policy = delivery_policy
self.subscriptions = subscriptions
self.subscriptions_existing = []
self.subscriptions_deleted = []
self.subscriptions_added = []
self.purge_subscriptions = purge_subscriptions
self.check_mode = check_mode
self.topic_created = False
self.topic_deleted = False
self.arn_topic = None
self.attributes_set = []
def _get_boto_connection(self):
try:
return connect_to_aws(boto.sns, self.region,
**self.aws_connect_params)
except BotoServerError, err:
self.module.fail_json(msg=err.message)
def _get_all_topics(self):
next_token = None
topics = []
while True:
try:
response = self.connection.get_all_topics(next_token)
except BotoServerError, err:
module.fail_json(msg=err.message)
topics.extend(response['ListTopicsResponse']['ListTopicsResult']['Topics'])
next_token = response['ListTopicsResponse']['ListTopicsResult']['NextToken']
if not next_token:
break
return [t['TopicArn'] for t in topics]
def _arn_topic_lookup(self):
# topic names cannot have colons, so this captures the full topic name
all_topics = self._get_all_topics()
lookup_topic = ':%s' % self.name
for topic in all_topics:
if topic.endswith(lookup_topic):
return topic
def _create_topic(self):
self.changed = True
self.topic_created = True
if not self.check_mode:
self.connection.create_topic(self.name)
self.arn_topic = self._arn_topic_lookup()
while not self.arn_topic:
time.sleep(3)
self.arn_topic = self._arn_topic_lookup()
def _set_topic_attrs(self):
topic_attributes = self.connection.get_topic_attributes(self.arn_topic) \
['GetTopicAttributesResponse'] ['GetTopicAttributesResult'] \
['Attributes']
if self.display_name and self.display_name != topic_attributes['DisplayName']:
self.changed = True
self.attributes_set.append('display_name')
if not self.check_mode:
self.connection.set_topic_attributes(self.arn_topic, 'DisplayName',
self.display_name)
if self.policy and self.policy != json.loads(topic_attributes['Policy']):
self.changed = True
self.attributes_set.append('policy')
if not self.check_mode:
self.connection.set_topic_attributes(self.arn_topic, 'Policy',
json.dumps(self.policy))
if self.delivery_policy and ('DeliveryPolicy' not in topic_attributes or \
self.delivery_policy != json.loads(topic_attributes['DeliveryPolicy'])):
self.changed = True
self.attributes_set.append('delivery_policy')
if not self.check_mode:
self.connection.set_topic_attributes(self.arn_topic, 'DeliveryPolicy',
json.dumps(self.delivery_policy))
def _canonicalize_endpoint(self, protocol, endpoint):
if protocol == 'sms':
return re.sub('[^0-9]*', '', endpoint)
return endpoint
def _get_topic_subs(self):
next_token = None
while True:
response = self.connection.get_all_subscriptions_by_topic(self.arn_topic, next_token)
self.subscriptions_existing.extend(response['ListSubscriptionsByTopicResponse'] \
['ListSubscriptionsByTopicResult']['Subscriptions'])
next_token = response['ListSubscriptionsByTopicResponse'] \
['ListSubscriptionsByTopicResult']['NextToken']
if not next_token:
break
def _set_topic_subs(self):
subscriptions_existing_list = []
desired_subscriptions = [(sub['protocol'],
self._canonicalize_endpoint(sub['protocol'], sub['endpoint'])) for sub in
self.subscriptions]
if self.subscriptions_existing:
for sub in self.subscriptions_existing:
sub_key = (sub['Protocol'], sub['Endpoint'])
subscriptions_existing_list.append(sub_key)
if self.purge_subscriptions and sub_key not in desired_subscriptions and \
sub['SubscriptionArn'] != 'PendingConfirmation':
self.changed = True
self.subscriptions_deleted.append(sub_key)
if not self.check_mode:
self.connection.unsubscribe(sub['SubscriptionArn'])
for (protocol, endpoint) in desired_subscriptions:
if (protocol, endpoint) not in subscriptions_existing_list:
self.changed = True
self.subscriptions_added.append(sub)
if not self.check_mode:
self.connection.subscribe(self.arn_topic, protocol, endpoint)
def _delete_subscriptions(self):
# NOTE: subscriptions in 'PendingConfirmation' timeout in 3 days
# https://forums.aws.amazon.com/thread.jspa?threadID=85993
for sub in self.subscriptions_existing:
if sub['SubscriptionArn'] != 'PendingConfirmation':
self.subscriptions_deleted.append(sub['SubscriptionArn'])
self.changed = True
if not self.check_mode:
self.connection.unsubscribe(sub['SubscriptionArn'])
def _delete_topic(self):
self.topic_deleted = True
self.changed = True
if not self.check_mode:
self.connection.delete_topic(self.arn_topic)
def ensure_ok(self):
self.arn_topic = self._arn_topic_lookup()
if not self.arn_topic:
self._create_topic()
self._set_topic_attrs()
self._get_topic_subs()
self._set_topic_subs()
def ensure_gone(self):
self.arn_topic = self._arn_topic_lookup()
if self.arn_topic:
self._get_topic_subs()
if self.subscriptions_existing:
self._delete_subscriptions()
self._delete_topic()
def get_info(self):
info = {
'name': self.name,
'state': self.state,
'display_name': self.display_name,
'policy': self.policy,
'delivery_policy': self.delivery_policy,
'subscriptions_new': self.subscriptions,
'subscriptions_existing': self.subscriptions_existing,
'subscriptions_deleted': self.subscriptions_deleted,
'subscriptions_added': self.subscriptions_added,
'subscriptions_purge': self.purge_subscriptions,
'check_mode': self.check_mode,
'topic_created': self.topic_created,
'topic_deleted': self.topic_deleted,
'attributes_set': self.attributes_set
}
return info
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present',
'absent']),
display_name=dict(type='str', required=False),
policy=dict(type='dict', required=False),
delivery_policy=dict(type='dict', required=False),
subscriptions=dict(default=[], type='list', required=False),
purge_subscriptions=dict(type='bool', default=True),
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
name = module.params.get('name')
state = module.params.get('state')
display_name = module.params.get('display_name')
policy = module.params.get('policy')
delivery_policy = module.params.get('delivery_policy')
subscriptions = module.params.get('subscriptions')
purge_subscriptions = module.params.get('purge_subscriptions')
check_mode = module.check_mode
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="region must be specified")
sns_topic = SnsTopicManager(module,
name,
state,
display_name,
policy,
delivery_policy,
subscriptions,
purge_subscriptions,
check_mode,
region,
**aws_connect_params)
if state == 'present':
sns_topic.ensure_ok()
elif state == 'absent':
sns_topic.ensure_gone()
sns_facts = dict(changed=sns_topic.changed,
sns_arn=sns_topic.arn_topic,
sns_topic=sns_topic.get_info())
module.exit_json(**sns_facts)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
georchestra/cadastrapp | addons/cadastrapp/js/external/openlayers2/openlayers/tools/jsmin.py | 513 | 7471 | #!/usr/bin/python
# This code is original from jsmin by Douglas Crockford, it was translated to
# Python by Baruch Even. The original code had the following copyright and
# license.
#
# /* jsmin.c
# 2007-01-08
#
# Copyright (c) 2002 Douglas Crockford (www.crockford.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# The Software shall be used for Good, not Evil.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# */
from StringIO import StringIO
def jsmin(js):
ins = StringIO(js)
outs = StringIO()
JavascriptMinify().minify(ins, outs)
str = outs.getvalue()
if len(str) > 0 and str[0] == '\n':
str = str[1:]
return str
def isAlphanum(c):
"""return true if the character is a letter, digit, underscore,
dollar sign, or non-ASCII character.
"""
return ((c >= 'a' and c <= 'z') or (c >= '0' and c <= '9') or
(c >= 'A' and c <= 'Z') or c == '_' or c == '$' or c == '\\' or (c is not None and ord(c) > 126));
class UnterminatedComment(Exception):
pass
class UnterminatedStringLiteral(Exception):
pass
class UnterminatedRegularExpression(Exception):
pass
class JavascriptMinify(object):
def _outA(self):
self.outstream.write(self.theA)
def _outB(self):
self.outstream.write(self.theB)
def _get(self):
"""return the next character from stdin. Watch out for lookahead. If
the character is a control character, translate it to a space or
linefeed.
"""
c = self.theLookahead
self.theLookahead = None
if c == None:
c = self.instream.read(1)
if c >= ' ' or c == '\n':
return c
if c == '': # EOF
return '\000'
if c == '\r':
return '\n'
return ' '
def _peek(self):
self.theLookahead = self._get()
return self.theLookahead
def _next(self):
"""get the next character, excluding comments. peek() is used to see
if a '/' is followed by a '/' or '*'.
"""
c = self._get()
if c == '/':
p = self._peek()
if p == '/':
c = self._get()
while c > '\n':
c = self._get()
return c
if p == '*':
c = self._get()
while 1:
c = self._get()
if c == '*':
if self._peek() == '/':
self._get()
return ' '
if c == '\000':
raise UnterminatedComment()
return c
def _action(self, action):
"""do something! What you do is determined by the argument:
1 Output A. Copy B to A. Get the next B.
2 Copy B to A. Get the next B. (Delete A).
3 Get the next B. (Delete B).
action treats a string as a single character. Wow!
action recognizes a regular expression if it is preceded by ( or , or =.
"""
if action <= 1:
self._outA()
if action <= 2:
self.theA = self.theB
if self.theA == "'" or self.theA == '"':
while 1:
self._outA()
self.theA = self._get()
if self.theA == self.theB:
break
if self.theA <= '\n':
raise UnterminatedStringLiteral()
if self.theA == '\\':
self._outA()
self.theA = self._get()
if action <= 3:
self.theB = self._next()
if self.theB == '/' and (self.theA == '(' or self.theA == ',' or
self.theA == '=' or self.theA == ':' or
self.theA == '[' or self.theA == '?' or
self.theA == '!' or self.theA == '&' or
self.theA == '|'):
self._outA()
self._outB()
while 1:
self.theA = self._get()
if self.theA == '/':
break
elif self.theA == '\\':
self._outA()
self.theA = self._get()
elif self.theA <= '\n':
raise UnterminatedRegularExpression()
self._outA()
self.theB = self._next()
def _jsmin(self):
"""Copy the input to the output, deleting the characters which are
insignificant to JavaScript. Comments will be removed. Tabs will be
replaced with spaces. Carriage returns will be replaced with linefeeds.
Most spaces and linefeeds will be removed.
"""
self.theA = '\n'
self._action(3)
while self.theA != '\000':
if self.theA == ' ':
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
elif self.theA == '\n':
if self.theB in ['{', '[', '(', '+', '-']:
self._action(1)
elif self.theB == ' ':
self._action(3)
else:
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
else:
if self.theB == ' ':
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
elif self.theB == '\n':
if self.theA in ['}', ']', ')', '+', '-', '"', '\'']:
self._action(1)
else:
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
else:
self._action(1)
def minify(self, instream, outstream):
self.instream = instream
self.outstream = outstream
self.theA = None
self.thaB = None
self.theLookahead = None
self._jsmin()
self.instream.close()
if __name__ == '__main__':
import sys
jsm = JavascriptMinify()
jsm.minify(sys.stdin, sys.stdout)
| gpl-3.0 |
Venturi/cms | env/lib/python2.7/site-packages/django/contrib/postgres/fields/array.py | 1 | 8579 | import json
from django.contrib.postgres import lookups
from django.contrib.postgres.forms import SimpleArrayField
from django.contrib.postgres.validators import ArrayMaxLengthValidator
from django.core import checks, exceptions
from django.db.models import Field, IntegerField, Transform
from django.db.models.lookups import Exact
from django.utils import six
from django.utils.translation import string_concat, ugettext_lazy as _
from .utils import AttributeSetter
__all__ = ['ArrayField']
class ArrayField(Field):
empty_strings_allowed = False
default_error_messages = {
'item_invalid': _('Item %(nth)s in the array did not validate: '),
'nested_array_mismatch': _('Nested arrays must have the same length.'),
}
def __init__(self, base_field, size=None, **kwargs):
self.base_field = base_field
self.size = size
if self.size:
self.default_validators = self.default_validators[:]
self.default_validators.append(ArrayMaxLengthValidator(self.size))
super(ArrayField, self).__init__(**kwargs)
def check(self, **kwargs):
errors = super(ArrayField, self).check(**kwargs)
if self.base_field.rel:
errors.append(
checks.Error(
'Base field for array cannot be a related field.',
hint=None,
obj=self,
id='postgres.E002'
)
)
else:
# Remove the field name checks as they are not needed here.
base_errors = self.base_field.check()
if base_errors:
messages = '\n '.join('%s (%s)' % (error.msg, error.id) for error in base_errors)
errors.append(
checks.Error(
'Base field for array has errors:\n %s' % messages,
hint=None,
obj=self,
id='postgres.E001'
)
)
return errors
def set_attributes_from_name(self, name):
super(ArrayField, self).set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
@property
def description(self):
return 'Array of %s' % self.base_field.description
def db_type(self, connection):
size = self.size or ''
return '%s[%s]' % (self.base_field.db_type(connection), size)
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, list) or isinstance(value, tuple):
return [self.base_field.get_db_prep_value(i, connection, prepared) for i in value]
return value
def deconstruct(self):
name, path, args, kwargs = super(ArrayField, self).deconstruct()
if path == 'django.contrib.postgres.fields.array.ArrayField':
path = 'django.contrib.postgres.fields.ArrayField'
kwargs.update({
'base_field': self.base_field,
'size': self.size,
})
return name, path, args, kwargs
def to_python(self, value):
if isinstance(value, six.string_types):
# Assume we're deserializing
vals = json.loads(value)
value = [self.base_field.to_python(val) for val in vals]
return value
def value_to_string(self, obj):
values = []
vals = self._get_val_from_obj(obj)
base_field = self.base_field
for val in vals:
obj = AttributeSetter(base_field.attname, val)
values.append(base_field.value_to_string(obj))
return json.dumps(values)
def get_transform(self, name):
transform = super(ArrayField, self).get_transform(name)
if transform:
return transform
try:
index = int(name)
except ValueError:
pass
else:
index += 1 # postgres uses 1-indexing
return IndexTransformFactory(index, self.base_field)
try:
start, end = name.split('_')
start = int(start) + 1
end = int(end) # don't add one here because postgres slices are weird
except ValueError:
pass
else:
return SliceTransformFactory(start, end)
def validate(self, value, model_instance):
super(ArrayField, self).validate(value, model_instance)
for i, part in enumerate(value):
try:
self.base_field.validate(part, model_instance)
except exceptions.ValidationError as e:
raise exceptions.ValidationError(
string_concat(self.error_messages['item_invalid'], e.message),
code='item_invalid',
params={'nth': i},
)
if isinstance(self.base_field, ArrayField):
if len({len(i) for i in value}) > 1:
raise exceptions.ValidationError(
self.error_messages['nested_array_mismatch'],
code='nested_array_mismatch',
)
def run_validators(self, value):
super(ArrayField, self).run_validators(value)
for i, part in enumerate(value):
try:
self.base_field.run_validators(part)
except exceptions.ValidationError as e:
raise exceptions.ValidationError(
string_concat(self.error_messages['item_invalid'], ' '.join(e.messages)),
code='item_invalid',
params={'nth': i},
)
def formfield(self, **kwargs):
defaults = {
'form_class': SimpleArrayField,
'base_field': self.base_field.formfield(),
'max_length': self.size,
}
defaults.update(kwargs)
return super(ArrayField, self).formfield(**defaults)
@ArrayField.register_lookup
class ArrayContains(lookups.DataContains):
def as_sql(self, qn, connection):
sql, params = super(ArrayContains, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayContainedBy(lookups.ContainedBy):
def as_sql(self, qn, connection):
sql, params = super(ArrayContainedBy, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayExact(Exact):
def as_sql(self, qn, connection):
sql, params = super(ArrayExact, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayOverlap(lookups.Overlap):
def as_sql(self, qn, connection):
sql, params = super(ArrayOverlap, self).as_sql(qn, connection)
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
return sql, params
@ArrayField.register_lookup
class ArrayLenTransform(Transform):
lookup_name = 'len'
output_field = IntegerField()
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'array_length(%s, 1)' % lhs, params
class IndexTransform(Transform):
def __init__(self, index, base_field, *args, **kwargs):
super(IndexTransform, self).__init__(*args, **kwargs)
self.index = index
self.base_field = base_field
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return '%s[%s]' % (lhs, self.index), params
@property
def output_field(self):
return self.base_field
class IndexTransformFactory(object):
def __init__(self, index, base_field):
self.index = index
self.base_field = base_field
def __call__(self, *args, **kwargs):
return IndexTransform(self.index, self.base_field, *args, **kwargs)
class SliceTransform(Transform):
def __init__(self, start, end, *args, **kwargs):
super(SliceTransform, self).__init__(*args, **kwargs)
self.start = start
self.end = end
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return '%s[%s:%s]' % (lhs, self.start, self.end), params
class SliceTransformFactory(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, *args, **kwargs):
return SliceTransform(self.start, self.end, *args, **kwargs)
| gpl-2.0 |
lord63-forks/flask | examples/persona/persona.py | 159 | 1442 | from flask import Flask, render_template, session, request, abort, g
import requests
app = Flask(__name__)
app.config.update(
DEBUG=True,
SECRET_KEY='my development key',
PERSONA_JS='https://login.persona.org/include.js',
PERSONA_VERIFIER='https://verifier.login.persona.org/verify',
)
app.config.from_envvar('PERSONA_SETTINGS', silent=True)
@app.before_request
def get_current_user():
g.user = None
email = session.get('email')
if email is not None:
g.user = email
@app.route('/')
def index():
"""Just a generic index page to show."""
return render_template('index.html')
@app.route('/_auth/login', methods=['GET', 'POST'])
def login_handler():
"""This is used by the persona.js file to kick off the
verification securely from the server side. If all is okay
the email address is remembered on the server.
"""
resp = requests.post(app.config['PERSONA_VERIFIER'], data={
'assertion': request.form['assertion'],
'audience': request.host_url,
}, verify=True)
if resp.ok:
verification_data = resp.json()
if verification_data['status'] == 'okay':
session['email'] = verification_data['email']
return 'OK'
abort(400)
@app.route('/_auth/logout', methods=['POST'])
def logout_handler():
"""This is what persona.js will call to sign the user
out again.
"""
session.clear()
return 'OK'
| bsd-3-clause |
yaroslavvb/tensorflow | tensorflow/contrib/slim/python/slim/nets/inception_v1_test.py | 112 | 8960 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nets.inception_v1."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.framework.python.ops import variables as variables_lib
from tensorflow.contrib.slim.python.slim import model_analyzer
from tensorflow.contrib.slim.python.slim.nets import inception_v1
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class InceptionV1Test(test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = random_ops.random_uniform((batch_size, height, width, 3))
logits, end_points = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
mixed_6c, end_points = inception_v1.inception_v1_base(inputs)
self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_6c.get_shape().as_list(),
[batch_size, 7, 7, 1024])
expected_endpoints = [
'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c'
]
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
height, width = 224, 224
endpoints = [
'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c'
]
for index, endpoint in enumerate(endpoints):
with ops.Graph().as_default():
inputs = random_ops.random_uniform((batch_size, height, width, 3))
out_tensor, end_points = inception_v1.inception_v1_base(
inputs, final_endpoint=endpoint)
self.assertTrue(
out_tensor.op.name.startswith('InceptionV1/' + endpoint))
self.assertItemsEqual(endpoints[:index + 1], end_points)
def testBuildAndCheckAllEndPointsUptoMixed5c(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
_, end_points = inception_v1.inception_v1_base(
inputs, final_endpoint='Mixed_5c')
endpoints_shapes = {
'Conv2d_1a_7x7': [5, 112, 112, 64],
'MaxPool_2a_3x3': [5, 56, 56, 64],
'Conv2d_2b_1x1': [5, 56, 56, 64],
'Conv2d_2c_3x3': [5, 56, 56, 192],
'MaxPool_3a_3x3': [5, 28, 28, 192],
'Mixed_3b': [5, 28, 28, 256],
'Mixed_3c': [5, 28, 28, 480],
'MaxPool_4a_3x3': [5, 14, 14, 480],
'Mixed_4b': [5, 14, 14, 512],
'Mixed_4c': [5, 14, 14, 512],
'Mixed_4d': [5, 14, 14, 512],
'Mixed_4e': [5, 14, 14, 528],
'Mixed_4f': [5, 14, 14, 832],
'MaxPool_5a_2x2': [5, 7, 7, 832],
'Mixed_5b': [5, 7, 7, 832],
'Mixed_5c': [5, 7, 7, 1024]
}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name in endpoints_shapes:
expected_shape = endpoints_shapes[endpoint_name]
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testModelHasExpectedNumberOfParameters(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
with arg_scope(inception_v1.inception_v1_arg_scope()):
inception_v1.inception_v1_base(inputs)
total_params, _ = model_analyzer.analyze_vars(
variables_lib.get_model_variables())
self.assertAlmostEqual(5607184, total_params)
def testHalfSizeImages(self):
batch_size = 5
height, width = 112, 112
inputs = random_ops.random_uniform((batch_size, height, width, 3))
mixed_5c, _ = inception_v1.inception_v1_base(inputs)
self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_5c.get_shape().as_list(),
[batch_size, 4, 4, 1024])
def testUnknownImageShape(self):
ops.reset_default_graph()
batch_size = 2
height, width = 224, 224
num_classes = 1000
input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
with self.test_session() as sess:
inputs = array_ops.placeholder(
dtypes.float32, shape=(batch_size, None, None, 3))
logits, end_points = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Mixed_5c']
feed_dict = {inputs: input_np}
variables.global_variables_initializer().run()
pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])
def testUnknownBatchSize(self):
batch_size = 1
height, width = 224, 224
num_classes = 1000
inputs = array_ops.placeholder(dtypes.float32, (None, height, width, 3))
logits, _ = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(), [None, num_classes])
images = random_ops.random_uniform((batch_size, height, width, 3))
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEquals(output.shape, (batch_size, num_classes))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
eval_inputs = random_ops.random_uniform((batch_size, height, width, 3))
logits, _ = inception_v1.inception_v1(
eval_inputs, num_classes, is_training=False)
predictions = math_ops.argmax(logits, 1)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
def testTrainEvalWithReuse(self):
train_batch_size = 5
eval_batch_size = 2
height, width = 224, 224
num_classes = 1000
train_inputs = random_ops.random_uniform(
(train_batch_size, height, width, 3))
inception_v1.inception_v1(train_inputs, num_classes)
eval_inputs = random_ops.random_uniform((eval_batch_size, height, width, 3))
logits, _ = inception_v1.inception_v1(eval_inputs, num_classes, reuse=True)
predictions = math_ops.argmax(logits, 1)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (eval_batch_size,))
def testLogitsNotSqueezed(self):
num_classes = 25
images = random_ops.random_uniform([1, 224, 224, 3])
logits, _ = inception_v1.inception_v1(
images, num_classes=num_classes, spatial_squeeze=False)
with self.test_session() as sess:
variables.global_variables_initializer().run()
logits_out = sess.run(logits)
self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
if __name__ == '__main__':
test.main()
| apache-2.0 |
Garmelon/itbot | script.py | 1 | 20301 | # Rewrite of the original script; it worked, but not well enough...
# still ugly tho
CONFIGFILE = "config.txt"
import re
import json
import time
import requests
import datetime
import configparser
import imgurpython
# overwrite print() to only print ascii
import builtins
def asciify(text):
return ''.join([i if ord(i) < 128 else '?' for i in text])
def print(*args, **kwargs):
newargs = []
for text in args:
newargs.append(asciify(text))
builtins.print(*newargs, **kwargs)
class Client:
"""
Imgur API and config+authentication
"""
def __init__(self, config):
self.config = config
if not self.config.has_section("auth"):
self.config.modified = True
self.config["auth"] = {}
if ( not self.config.has_option("auth", "client_id")
and not self.config.has_option("auth", "client_secret")):
self.prompt_client_info()
self.connect()
self.account = self.client.get_account("me")
def prompt_client_info(self):
print("No client info found. If you haven't yet, visit")
print("https://api.imgur.com/oauth2/addclient and register an application.")
print("Pick 'OAuth 2 authorization without a callback URL'.")
print("If you have already registered an application, visit")
print("https://imgur.com/account/settings/apps and generate a new secret.")
print("Then, fill in the client id and secret below.")
self.config["auth"]["client_id"] = input("Client ID: ").strip()
self.config["auth"]["client_secret"] = input("Client Secret: ").strip()
self.config.modified = True
print("")
def prompt_pin(self):
"""
prompt_pin() -> pin
Assumes that there is already a client connected to Imgur.
"""
authorization_url = self.client.get_auth_url("pin")
print("Please visit {}".format(authorization_url))
print("and enter the PIN code displayed on the site.")
return input("PIN code: ").strip()
def connect(self):
"""
Creates and connects self.client.
"""
if self.config.has_option("auth", "refresh_token"):
self.client = imgurpython.ImgurClient(self.config["auth"]["client_id"],
self.config["auth"]["client_secret"],
refresh_token=self.config["auth"]["refresh_token"])
else:
self.client = imgurpython.ImgurClient(self.config["auth"]["client_id"],
self.config["auth"]["client_secret"])
credentials = self.client.authorize(self.prompt_pin(), "pin")
self.config["auth"]["refresh_token"] = credentials["refresh_token"]
self.config.modified = True
self.client.set_user_auth(credentials["access_token"], credentials["refresh_token"])
class Subscribers:
"""
Manages subscribers and subscribing/unsubscribing
"""
subregex = re.compile(r"^<?subscribe>?.?$",
flags=re.IGNORECASE)
unsubregex = re.compile(r"^<?unsubscribe>?\.?$",
flags=re.IGNORECASE)
askregex = re.compile(r"subscri|\bsign.*\b(up|in|on)\b|\b(join|tag|includ)|<.*>|\bdot|\b(leav|cancel)$",
flags=re.IGNORECASE)
def __init__(self, subsfile):
self.subsfile = subsfile
self.subs = {}
self.modified = False
self.load()
def load(self):
try:
with open(self.subsfile) as f:
for line in f:
self.load_line(line)
except FileNotFoundError:
print("File not found: {}".format(repr(self.subsfile)))
print("If you already have a subscribers file, you can set it in the config file.")
print("A new file will be created.")
def load_line(self, line):
if line[0] == "#":
return
parts = line[:-1].split(" ")
parts = [item for item in parts if item] # remove empty strings
if not parts:
return
status = parts[0]
nick = parts[1].lower()
datetime = int(parts[2])
self.subs[nick] = {"status": status, "dt": datetime}
def save(self):
with open(self.subsfile, "w") as f:
for sub, info in sorted(self.subs.items()):
f.write("{} {} {}\n".format(info["status"], sub, info["dt"]))
def add(self, nick, datetime=None):
print("Adding {}.".format(nick))
nick = nick.lower()
if nick in self.subs:
self.subs[nick] = {"status": "s", "dt": max(datetime or 0, self.subs[nick]["dt"])}
else:
self.subs[nick] = {"status": "s", "dt": datetime or 0}
self.modified = True
def remove(self, nick, datetime=None):
print("Removing {}.".format(nick))
nick = nick.lower()
if nick in self.subs:
self.subs[nick] = {"status": "u", "dt": max(datetime or 0, self.subs[nick]["dt"])}
else:
self.subs[nick] = {"status": "u", "dt": datetime or 0}
self.modified = True
def subscribed(self):
return {sub: info for sub, info in self.subs.items() if info["status"] == "s"}
def clean_up(self):
self.subs = self.subscribed()
self.modified = True
def count(self):
return len(self.subscribed())
def to_comments(self):
comments = []
comment = ""
for sub in self.subscribed():
sub = "@" + sub
if comment:
if len(comment) + len(sub) + 1 <= 140: #character limit
comment += " " + sub
continue
else:
comments.append(comment)
comment = sub
if comment:
comments.append(comment)
return comments
def check_comment(self, nick, comment, datetime):
"""
Returns True when comment is to be added to the ignore list.
"""
nick = nick.lower()
if nick in self.subs and self.subs[nick]["dt"] >= datetime:
return
if self.subregex.search(comment):
self.add(nick, datetime=datetime)
elif self.unsubregex.search(comment):
self.remove(nick, datetime=datetime)
elif self.askregex.search(comment):
action = self.ask_user_about_comment(comment)
if action == "add":
self.add(nick, datetime=datetime)
elif action == "remove":
self.remove(nick, datetime=datetime)
else:
return True
def ask_user_about_comment(self, comment):
print("\nWhat is the following comment?")
print(comment)
print("[s] subscribe | [d] unsubscribe | [anything else] neither")
action = input("[s/d/f] ").strip().lower()
print("")
if action == "s":
return "add"
elif action == "d":
return "remove"
class Albums:
"""
Manages added albums and keeps track of comments with uninteresting content
"""
def __init__(self, albumsfile):
self.albumsfile = albumsfile
self.albums = {}
self.modified = False
self.load()
def load(self):
try:
with open(self.albumsfile) as f:
for line in f:
self.load_line(line)
except FileNotFoundError:
print("File not found: {}".format(repr(self.albumsfile)))
print("If you already have an albums file, you can set it in the config file.")
print("A new file will be created.")
def load_line(self, line):
if line[0] == "#":
return
parts = line[:-1].split(" ", 1)
if len(parts) < 2:
return
album = parts[0]
comments = json.loads(parts[1])
if album in self.albums:
for comment in comments:
if not comment in self.albums[album]:
self.albums[album].append(comment)
else:
self.albums[album] = comments
def save(self):
with open(self.albumsfile, "w") as f:
for album, comments in sorted(self.albums.items()):
f.write("{} {}\n".format(album, json.dumps(comments)))
def add(self, album):
print ("Adding album {}".format(album))
if not album in self.albums:
self.albums[album] = []
self.modified = True
def remove(self, album):
print ("Removing album {}".format(album))
if album in self.albums:
del self.albums[album]
self.modified = True
def add_comment(self, album, comment):
print ("Adding comment {} to album {} ignore list".format(comment, album))
if not comment in self.albums[album]:
self.albums[album].append(comment)
self.modified = True
def in_album(self, album, comment):
return comment in self.albums[album]
class ITBot:
"""
Manage the input and resources
"""
def __init__(self, configfile="config.txt"):
"""
Load the config and connect to imgur.
"""
self.configfile = configfile
self.config = configparser.ConfigParser()
self.config.read(self.configfile)
self.config.modified = False
if not self.config.has_section("misc"):
self.config["misc"] = {}
self.config.modified = True
if not self.config.has_option("misc", "delay"):
self.config["misc"]["delay"] = "10"
self.config.modified = True
if not self.config.has_option("misc", "retry_delay"):
self.config["misc"]["retry_delay"] = "60"
self.config.modified = True
if not self.config.has_option("misc", "branches_per_node"):
self.config["misc"]["branches_per_node"] = "10"
self.config.modified = True
if not self.config.has_option("misc", "subsfile"):
self.config["misc"]["subsfile"] = "subscribers.txt"
self.config.modified = True
if not self.config.has_option("misc", "albumsfile"):
self.config["misc"]["albumsfile"] = "albums.txt"
self.config.modified = True
self.client = Client(self.config)
self.subs = Subscribers(self.config["misc"]["subsfile"])
self.albums = Albums(self.config["misc"]["albumsfile"])
self._commands = {}
self._add_command("quit", self.command_quit, "Quit.",
("It's just quitting. Why would you call help on that?\n"
"Ctrl+D (EOF) or Ctrl+C (KeyboardInterrupt) work too."))
self._add_command("q", self.command_quit, "Short for 'quit'.",
("You seem desparate... There really is nothing new here."))
self._add_command("help", self.command_help, "Show th- Oh, you already figured it out...",
("I believe there is nothing more I could tell you about this command.\n"
"Go and try out the other commands instead of doing - well, this :P"))
self._add_command("comment", self.command_comment, "Comment on an image with all your subs.",
("comment <image_id>\n"
"Posts a top-level comment and then replies with the full list of your subs."))
self._add_command("scan", self.command_scan, "Scan your albums' comments for (un)subscribers.",
("Scans through the comments below your albums and processes any obvious '(un)subscribe's.\n"
"In difficult cases, presents the comment to you and lets you decide."))
self._add_command("add", self.command_add, "Add subscribers.",
("add <nick> [<nick> [...]]\n"
"List all the nicks after the command and they'll be added to your\n"
"subs in the subscribers file."))
self._add_command("remove", self.command_remove, "Remove subscribers.",
("remove <nick> [<nick> [...]]\n"
"Works the same way as add, but in reverse :P"))
self._add_command("reg", self.command_reg, "Register albums.",
("reg <album_id> [<album_id> [...]]\n"
"Register albums to be scanned by the scan command."))
self._add_command("dereg", self.command_dereg, "Deregister albums.",
("dereg <album_id> [<album_id> [...]]\n"
"The albums will no longer be included in further calls to the scan command.\n"
"WARNING: This also deletes all info about messages from those albums which were\n"
"marked as \"ignore\" (neither a subscribe nor an unsubscribe)."))
self._add_command("count", self.command_count, "Boost ego.",
("Lean back and relax"))
self._add_command("cleanup", self.command_count, "Removes all unsubscribed nicks from the subsfile.",
("Don't do this unless your subsfile is too large.\n"
"Normally, it is not necessary to clean up at all."))
def _add_command(self, command, function, shorthelp, longhelp):
"""
Helps organising commands
"""
self._commands[command] = {
"function": function,
"shorthelp": shorthelp,
"longhelp": longhelp
}
def fancy_intro(self):
"""
Nothing important...
"""
logo = [" ___________________",
" .' '.",
" / _ \\",
"| (_)_ __ __ _ _ _ _ _ |",
"| | | ' \/ _` | || | '_| |",
"| |_|_|_|_\__, |\_,_|_| |",
" \\ |___/ /",
" '.___________________.'"]
for line in logo:
print(line)
time.sleep(0.1)
def fancy_outtro(self):
"""
Nothing important...
"""
logo = [" ________________",
" .' '.",
" / ____ _ \\",
"| | __ ) _ _ ___| | |",
"| | _ \| | | |/ _ \ | |",
"| | |_) | |_| | __/_| |",
"| |____/ \__, |\___(_) |",
" \\ |___/ /",
" '.________________.'"]
for line in logo:
print(line)
time.sleep(0.1)
def command_help(self, args):
if args:
if args[0] in self._commands:
print(self._commands[args[0]]["longhelp"])
else:
print("No help found for {}. You might want to check 'help'.".format(args[0]))
else:
print("Use 'help <command>' for a more detailed help text.\n")
for command, info in sorted(self._commands.items()):
print(" {} - {}".format(command.ljust(10), info["shorthelp"]))
def command_quit(self, args):
return True
def command_add(self, args):
if not args:
print("No names found, check the 'help subadd' or just enter some names...")
return
for arg in args:
self.subs.add(arg)
def command_remove(self, args):
if not args:
print("No names found, check the 'help subremove' or just enter some names...")
return
for arg in args:
self.subs.remove(arg)
def command_count(self, args):
print("You currently have {} subscribers.".format(self.subs.count()))
print("\\(^o^)/")
def command_comment(self, args):
try:
image_id = args[0]
except IndexError:
print("Image ID missing. Maybe check the 'help comment'?")
return
comments = self.subs.to_comments()
print("{} subscribers in {} comments.".format(self.subs.count(), len(comments)))
top_comment = input("Top-level comment: ").strip()
if not top_comment:
print("Comment can't be empty.")
return
if len(top_comment) > 140:
print("Too many characters (>140), aborting!")
return
print("\nYou entered the following:")
print("Image ID:", repr(image_id))
print("Top-level comment:", repr(top_comment))
if input("Do you want to continue? [Y/n] ").lower() != "y":
return
# use tree of comments to lower the lag on mobile
comment_count = len(comments)
print("\nBuilding tree")
tree = self.build_comment_tree(comments)
print("Posting top-level comment")
root_comment = self.client.client.post_comment(image_id, top_comment)
print("Posting rest of comments")
print("This may take a few hours.")
print("The number of branches per node can be adjusted in the config file.")
self.post_comment_tree(image_id, tree, root_comment["id"], comment_count)
# old comment posting code
"""
print("\nPosting top-level comment")
root_comment = self.client.client.post_comment(image_id, top_comment)
for index, comment in enumerate(comments):
print("Posting comment {} of {}".format(index+1, len(comments)))
while(True):
time.sleep(self.config.getint("misc", "delay"))
try:
self.client.client.post_comment_reply(root_comment["id"], image_id, comment)
except imgurpython.helpers.error.ImgurClientError:
print("An error occurred while sending this comment. Retrying...")
except imgurpython.helpers.error.ImgurClientRateLimitError:
print("Rate limit hit. Retrying...")
except requests.exceptions.ConnectionError:
delay = self.config.getint("misc", "retry_delay")
print("Connection problems, retrying in {}s...".format(delay))
time.sleep(delay)
else:
break
"""
def traverse_level(self, tree, level):
if level == 0:
yield from tree.values()
else:
for _, branch in tree.items():
yield from self.traverse_level(branch, level - 1)
def build_comment_tree(self, comments):
tree = {"root":{}}
level = 0
while True:
for branch in self.traverse_level(tree, level):
for i in range(self.config.getint("misc", "branches_per_node")):
if comments:
branch[comments.pop()] = {}
else:
return tree["root"]
level += 1
def post_comment_tree(self, image_id, tree, root_comment_id, comment_count):
for comment, branch in tree.items():
time.sleep(self.config.getint("misc", "delay"))
while(True):
try:
comment_id = self.client.client.post_comment_reply(root_comment_id, image_id, comment)["id"]
except imgurpython.helpers.error.ImgurClientError as e:
print("An error occurred while sending this comment ({}: {}). Retrying...".format(e.status_code, e.error_message))
except imgurpython.helpers.error.ImgurClientRateLimitError:
print("Rate limit hit. Retrying...")
except requests.exceptions.ConnectionError:
print("Connection problems. Retrying...")
else:
time_per_comment = self.config.getint("misc", "delay") + 1
delta = datetime.timedelta(seconds=time_per_comment*comment_count)
print("{} comments left; estimated time: {}".format(comment_count, delta))
comment_count -= 1
break
time.sleep(self.config.getint("misc", "retry_delay")) # something went wrong, so we wait...
comment_count = self.post_comment_tree(image_id, branch, comment_id, comment_count)
return comment_count
def command_scan(self, args):
for album in self.albums.albums:
print("Scanning album {}...".format(album))
try:
comments = self.client.client.gallery_item_comments(album, sort="new")
except imgurpython.helpers.error.ImgurClientError:
print("Error while loading comments. You might want to double-check your albums file.")
else:
for comment in self.flatten_comments(comments):
if comment.author_id != self.client.account.id \
and not self.albums.in_album(album, comment.id) \
and self.subs.check_comment(comment.author, comment.comment, comment.datetime):
self.albums.add_comment(album, comment.id)
def command_reg(self, args):
if not args:
print("Album IDs missing. Maybe check the 'help reg'?")
for album in args:
self.albums.add(album)
def command_dereg(self, args):
if not args:
print("Album IDs missing. Maybe check the 'help dereg'?")
for album in args:
self.albums.remove(album)
def flatten_comments(self, comments):
for comment in comments:
yield comment
if comment.children:
yield from self.flatten_comments(comment.children)
def parse_command(self, inputstr):
"""
parse_command(inputstring) -> command, [args]
In case command parsing will need to be improved in the future.
"""
args = inputstr.split(" ")
args = [arg for arg in args if arg] # remove empty strings
if not args: # no command found
return "", []
command = args[0]
args = args[1:]
return command, args
def prompt_command(self):
"""
prompt_command() -> exit
Takes a command and calls the respective functions.
Returns True if user exited.
"""
inputstr = input("\n>>> ")
command, args = self.parse_command(inputstr)
if not command:
return
if command in self._commands:
return self._commands[command]["function"](args)
else:
print("Invalid command. Type 'help' for a list of available commands.")
def interactive(self):
"""
Start the interactive mode (entering commands)
"""
self.fancy_intro()
print("\nWelcome to TITsBot v.2 *dial-up noises in background*")
print("('help' for a list of commands)")
try:
while(True):
if self.prompt_command():
break
except (EOFError, KeyboardInterrupt):
print("")
if self.config.modified:
print("Saving config.")
with open(self.configfile, "w") as f:
self.config.write(f)
if self.subs.modified:
print("Saving subs.")
self.subs.save()
if self.albums.modified:
print("Saving albums.")
self.albums.save()
self.fancy_outtro()
print("\nGoodbye! *beeping noise, then bluescreen*")
if __name__ == "__main__":
bot = ITBot(CONFIGFILE)
bot.interactive()
| mit |
amitsela/incubator-beam | sdks/python/apache_beam/pipeline.py | 3 | 21582 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pipeline, the top-level Dataflow object.
A pipeline holds a DAG of data transforms. Conceptually the nodes of the DAG
are transforms (PTransform objects) and the edges are values (mostly PCollection
objects). The transforms take as inputs one or more PValues and output one or
more PValues.
The pipeline offers functionality to traverse the graph. The actual operation
to be executed for each node visited is specified through a runner object.
Typical usage:
# Create a pipeline object using a local runner for execution.
p = beam.Pipeline('DirectRunner')
# Add to the pipeline a "Create" transform. When executed this
# transform will produce a PCollection object with the specified values.
pcoll = p | 'create' >> beam.Create([1, 2, 3])
# Another transform could be applied to pcoll, e.g., writing to a text file.
# For other transforms, refer to transforms/ directory.
pcoll | 'write' >> beam.io.WriteToText('./output')
# run() will execute the DAG stored in the pipeline. The execution of the
# nodes visited is done using the specified local runner.
p.run()
"""
from __future__ import absolute_import
import collections
import logging
import os
import shutil
import tempfile
from google.protobuf import wrappers_pb2
from apache_beam import pvalue
from apache_beam import typehints
from apache_beam.internal import pickler
from apache_beam.runners import create_runner
from apache_beam.runners import PipelineRunner
from apache_beam.transforms import ptransform
from apache_beam.typehints import TypeCheckError
from apache_beam.utils import proto_utils
from apache_beam.utils import urns
from apache_beam.utils.pipeline_options import PipelineOptions
from apache_beam.utils.pipeline_options import SetupOptions
from apache_beam.utils.pipeline_options import StandardOptions
from apache_beam.utils.pipeline_options import TypeOptions
from apache_beam.utils.pipeline_options_validator import PipelineOptionsValidator
class Pipeline(object):
"""A pipeline object that manages a DAG of PValues and their PTransforms.
Conceptually the PValues are the DAG's nodes and the PTransforms computing
the PValues are the edges.
All the transforms applied to the pipeline must have distinct full labels.
If same transform instance needs to be applied then a clone should be created
with a new label (e.g., transform.clone('new label')).
"""
def __init__(self, runner=None, options=None, argv=None):
"""Initialize a pipeline object.
Args:
runner: An object of type 'PipelineRunner' that will be used to execute
the pipeline. For registered runners, the runner name can be specified,
otherwise a runner object must be supplied.
options: A configured 'PipelineOptions' object containing arguments
that should be used for running the Dataflow job.
argv: a list of arguments (such as sys.argv) to be used for building a
'PipelineOptions' object. This will only be used if argument 'options'
is None.
Raises:
ValueError: if either the runner or options argument is not of the
expected type.
"""
if options is not None:
if isinstance(options, PipelineOptions):
self.options = options
else:
raise ValueError(
'Parameter options, if specified, must be of type PipelineOptions. '
'Received : %r', options)
elif argv is not None:
if isinstance(argv, list):
self.options = PipelineOptions(argv)
else:
raise ValueError(
'Parameter argv, if specified, must be a list. Received : %r', argv)
else:
self.options = PipelineOptions([])
if runner is None:
runner = self.options.view_as(StandardOptions).runner
if runner is None:
runner = StandardOptions.DEFAULT_RUNNER
logging.info(('Missing pipeline option (runner). Executing pipeline '
'using the default runner: %s.'), runner)
if isinstance(runner, str):
runner = create_runner(runner)
elif not isinstance(runner, PipelineRunner):
raise TypeError('Runner must be a PipelineRunner object or the '
'name of a registered runner.')
# Validate pipeline options
errors = PipelineOptionsValidator(self.options, runner).validate()
if errors:
raise ValueError(
'Pipeline has validations errors: \n' + '\n'.join(errors))
# Default runner to be used.
self.runner = runner
# Stack of transforms generated by nested apply() calls. The stack will
# contain a root node as an enclosing (parent) node for top transforms.
self.transforms_stack = [AppliedPTransform(None, None, '', None)]
# Set of transform labels (full labels) applied to the pipeline.
# If a transform is applied and the full label is already in the set
# then the transform will have to be cloned with a new label.
self.applied_labels = set()
def _current_transform(self):
"""Returns the transform currently on the top of the stack."""
return self.transforms_stack[-1]
def _root_transform(self):
"""Returns the root transform of the transform stack."""
return self.transforms_stack[0]
def run(self, test_runner_api=True):
"""Runs the pipeline. Returns whatever our runner returns after running."""
# When possible, invoke a round trip through the runner API.
if test_runner_api and self._verify_runner_api_compatible():
return Pipeline.from_runner_api(
self.to_runner_api(), self.runner, self.options).run(False)
if self.options.view_as(SetupOptions).save_main_session:
# If this option is chosen, verify we can pickle the main session early.
tmpdir = tempfile.mkdtemp()
try:
pickler.dump_session(os.path.join(tmpdir, 'main_session.pickle'))
finally:
shutil.rmtree(tmpdir)
return self.runner.run(self)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if not exc_type:
self.run().wait_until_finish()
def visit(self, visitor):
"""Visits depth-first every node of a pipeline's DAG.
Args:
visitor: PipelineVisitor object whose callbacks will be called for each
node visited. See PipelineVisitor comments.
Raises:
TypeError: if node is specified and is not a PValue.
pipeline.PipelineError: if node is specified and does not belong to this
pipeline instance.
"""
visited = set()
self._root_transform().visit(visitor, self, visited)
def apply(self, transform, pvalueish=None, label=None):
"""Applies a custom transform using the pvalueish specified.
Args:
transform: the PTranform to apply.
pvalueish: the input for the PTransform (typically a PCollection).
label: label of the PTransform.
Raises:
TypeError: if the transform object extracted from the argument list is
not a PTransform.
RuntimeError: if the transform object was already applied to this pipeline
and needs to be cloned in order to apply again.
"""
if isinstance(transform, ptransform._NamedPTransform):
return self.apply(transform.transform, pvalueish,
label or transform.label)
if not isinstance(transform, ptransform.PTransform):
raise TypeError("Expected a PTransform object, got %s" % transform)
if label:
# Fix self.label as it is inspected by some PTransform operations
# (e.g. to produce error messages for type hint violations).
try:
old_label, transform.label = transform.label, label
return self.apply(transform, pvalueish)
finally:
transform.label = old_label
full_label = '/'.join([self._current_transform().full_label,
label or transform.label]).lstrip('/')
if full_label in self.applied_labels:
raise RuntimeError(
'Transform "%s" does not have a stable unique label. '
'This will prevent updating of pipelines. '
'To apply a transform with a specified label write '
'pvalue | "label" >> transform'
% full_label)
self.applied_labels.add(full_label)
pvalueish, inputs = transform._extract_input_pvalues(pvalueish)
try:
inputs = tuple(inputs)
for leaf_input in inputs:
if not isinstance(leaf_input, pvalue.PValue):
raise TypeError
except TypeError:
raise NotImplementedError(
'Unable to extract PValue inputs from %s; either %s does not accept '
'inputs of this format, or it does not properly override '
'_extract_input_pvalues' % (pvalueish, transform))
current = AppliedPTransform(
self._current_transform(), transform, full_label, inputs)
self._current_transform().add_part(current)
self.transforms_stack.append(current)
type_options = self.options.view_as(TypeOptions)
if type_options.pipeline_type_check:
transform.type_check_inputs(pvalueish)
pvalueish_result = self.runner.apply(transform, pvalueish)
if type_options is not None and type_options.pipeline_type_check:
transform.type_check_outputs(pvalueish_result)
for result in ptransform.GetPValues().visit(pvalueish_result):
assert isinstance(result, (pvalue.PValue, pvalue.DoOutputsTuple))
# Make sure we set the producer only for a leaf node in the transform DAG.
# This way we preserve the last transform of a composite transform as
# being the real producer of the result.
if result.producer is None:
result.producer = current
# TODO(robertwb): Multi-input, multi-output inference.
# TODO(robertwb): Ideally we'd do intersection here.
if (type_options is not None and type_options.pipeline_type_check
and isinstance(result, pvalue.PCollection)
and not result.element_type):
input_element_type = (
inputs[0].element_type
if len(inputs) == 1
else typehints.Any)
type_hints = transform.get_type_hints()
declared_output_type = type_hints.simple_output_type(transform.label)
if declared_output_type:
input_types = type_hints.input_types
if input_types and input_types[0]:
declared_input_type = input_types[0][0]
result.element_type = typehints.bind_type_variables(
declared_output_type,
typehints.match_type_variables(declared_input_type,
input_element_type))
else:
result.element_type = declared_output_type
else:
result.element_type = transform.infer_output_type(input_element_type)
assert isinstance(result.producer.inputs, tuple)
current.add_output(result)
if (type_options is not None and
type_options.type_check_strictness == 'ALL_REQUIRED' and
transform.get_type_hints().output_types is None):
ptransform_name = '%s(%s)' % (transform.__class__.__name__, full_label)
raise TypeCheckError('Pipeline type checking is enabled, however no '
'output type-hint was found for the '
'PTransform %s' % ptransform_name)
current.update_input_refcounts()
self.transforms_stack.pop()
return pvalueish_result
def _verify_runner_api_compatible(self):
class Visitor(PipelineVisitor): # pylint: disable=used-before-assignment
ok = True # Really a nonlocal.
def visit_transform(self, transform_node):
if transform_node.side_inputs:
# No side inputs (yet).
Visitor.ok = False
try:
# Transforms must be picklable.
pickler.loads(pickler.dumps(transform_node.transform))
except Exception:
Visitor.ok = False
def visit_value(self, value, _):
if isinstance(value, pvalue.PDone):
Visitor.ok = False
self.visit(Visitor())
return Visitor.ok
def to_runner_api(self):
from apache_beam.runners import pipeline_context
from apache_beam.runners.api import beam_runner_api_pb2
context = pipeline_context.PipelineContext()
# Mutates context; placing inline would force dependence on
# argument evaluation order.
root_transform_id = context.transforms.get_id(self._root_transform())
proto = beam_runner_api_pb2.Pipeline(
root_transform_id=root_transform_id,
components=context.to_runner_api())
return proto
@staticmethod
def from_runner_api(proto, runner, options):
p = Pipeline(runner=runner, options=options)
from apache_beam.runners import pipeline_context
context = pipeline_context.PipelineContext(proto.components)
p.transforms_stack = [
context.transforms.get_by_id(proto.root_transform_id)]
# TODO(robertwb): These are only needed to continue construction. Omit?
p.applied_labels = set([
t.unique_name for t in proto.components.transforms.values()])
for id in proto.components.pcollections:
context.pcollections.get_by_id(id).pipeline = p
return p
class PipelineVisitor(object):
"""Visitor pattern class used to traverse a DAG of transforms.
This is an internal class used for bookkeeping by a Pipeline.
"""
def visit_value(self, value, producer_node):
"""Callback for visiting a PValue in the pipeline DAG.
Args:
value: PValue visited (typically a PCollection instance).
producer_node: AppliedPTransform object whose transform produced the
pvalue.
"""
pass
def visit_transform(self, transform_node):
"""Callback for visiting a transform node in the pipeline DAG."""
pass
def enter_composite_transform(self, transform_node):
"""Callback for entering traversal of a composite transform node."""
pass
def leave_composite_transform(self, transform_node):
"""Callback for leaving traversal of a composite transform node."""
pass
class AppliedPTransform(object):
"""A transform node representing an instance of applying a PTransform.
This is an internal class used for bookkeeping by a Pipeline.
"""
def __init__(self, parent, transform, full_label, inputs):
self.parent = parent
self.transform = transform
# Note that we want the PipelineVisitor classes to use the full_label,
# inputs, side_inputs, and outputs fields from this instance instead of the
# ones of the PTransform instance associated with it. Doing this permits
# reusing PTransform instances in different contexts (apply() calls) without
# any interference. This is particularly useful for composite transforms.
self.full_label = full_label
self.inputs = inputs or ()
self.side_inputs = () if transform is None else tuple(transform.side_inputs)
self.outputs = {}
self.parts = []
# Per tag refcount dictionary for PValues for which this node is a
# root producer.
self.refcounts = collections.defaultdict(int)
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__, self.full_label,
type(self.transform).__name__)
def update_input_refcounts(self):
"""Increment refcounts for all transforms providing inputs."""
def real_producer(pv):
real = pv.producer
while real.parts:
real = real.parts[-1]
return real
if not self.is_composite():
for main_input in self.inputs:
if not isinstance(main_input, pvalue.PBegin):
real_producer(main_input).refcounts[main_input.tag] += 1
for side_input in self.side_inputs:
real_producer(side_input.pvalue).refcounts[side_input.pvalue.tag] += 1
def add_output(self, output, tag=None):
if isinstance(output, pvalue.DoOutputsTuple):
self.add_output(output[output._main_tag])
elif isinstance(output, pvalue.PValue):
# TODO(BEAM-1833): Require tags when calling this method.
if tag is None and None in self.outputs:
tag = len(self.outputs)
assert tag not in self.outputs
self.outputs[tag] = output
else:
raise TypeError("Unexpected output type: %s" % output)
def add_part(self, part):
assert isinstance(part, AppliedPTransform)
self.parts.append(part)
def is_composite(self):
"""Returns whether this is a composite transform.
A composite transform has parts (inner transforms) or isn't the
producer for any of its outputs. (An example of a transform that
is not a producer is one that returns its inputs instead.)
"""
return bool(self.parts) or all(
pval.producer is not self for pval in self.outputs.values())
def visit(self, visitor, pipeline, visited):
"""Visits all nodes reachable from the current node."""
for pval in self.inputs:
if pval not in visited and not isinstance(pval, pvalue.PBegin):
assert pval.producer is not None
pval.producer.visit(visitor, pipeline, visited)
# The value should be visited now since we visit outputs too.
assert pval in visited, pval
# Visit side inputs.
for pval in self.side_inputs:
if isinstance(pval, pvalue.AsSideInput) and pval.pvalue not in visited:
pval = pval.pvalue # Unpack marker-object-wrapped pvalue.
assert pval.producer is not None
pval.producer.visit(visitor, pipeline, visited)
# The value should be visited now since we visit outputs too.
assert pval in visited
# TODO(silviuc): Is there a way to signal that we are visiting a side
# value? The issue is that the same PValue can be reachable through
# multiple paths and therefore it is not guaranteed that the value
# will be visited as a side value.
# Visit a composite or primitive transform.
if self.is_composite():
visitor.enter_composite_transform(self)
for part in self.parts:
part.visit(visitor, pipeline, visited)
visitor.leave_composite_transform(self)
else:
visitor.visit_transform(self)
# Visit the outputs (one or more). It is essential to mark as visited the
# tagged PCollections of the DoOutputsTuple object. A tagged PCollection is
# connected directly with its producer (a multi-output ParDo), but the
# output of such a transform is the containing DoOutputsTuple, not the
# PCollection inside it. Without the code below a tagged PCollection will
# not be marked as visited while visiting its producer.
for pval in self.outputs.values():
if isinstance(pval, pvalue.DoOutputsTuple):
pvals = (v for v in pval)
else:
pvals = (pval,)
for v in pvals:
if v not in visited:
visited.add(v)
visitor.visit_value(v, self)
def named_inputs(self):
# TODO(BEAM-1833): Push names up into the sdk construction.
return {str(ix): input for ix, input in enumerate(self.inputs)
if isinstance(input, pvalue.PCollection)}
def named_outputs(self):
return {str(tag): output for tag, output in self.outputs.items()
if isinstance(output, pvalue.PCollection)}
def to_runner_api(self, context):
from apache_beam.runners.api import beam_runner_api_pb2
return beam_runner_api_pb2.PTransform(
unique_name=self.full_label,
spec=beam_runner_api_pb2.FunctionSpec(
urn=urns.PICKLED_TRANSFORM,
parameter=proto_utils.pack_Any(
wrappers_pb2.BytesValue(value=pickler.dumps(self.transform)))),
subtransforms=[context.transforms.get_id(part) for part in self.parts],
# TODO(BEAM-115): Side inputs.
inputs={tag: context.pcollections.get_id(pc)
for tag, pc in self.named_inputs().items()},
outputs={str(tag): context.pcollections.get_id(out)
for tag, out in self.named_outputs().items()},
# TODO(BEAM-115): display_data
display_data=None)
@staticmethod
def from_runner_api(proto, context):
result = AppliedPTransform(
parent=None,
transform=pickler.loads(
proto_utils.unpack_Any(proto.spec.parameter,
wrappers_pb2.BytesValue).value),
full_label=proto.unique_name,
inputs=[
context.pcollections.get_by_id(id) for id in proto.inputs.values()])
result.parts = [
context.transforms.get_by_id(id) for id in proto.subtransforms]
result.outputs = {
None if tag == 'None' else tag: context.pcollections.get_by_id(id)
for tag, id in proto.outputs.items()}
if not result.parts:
for tag, pc in result.outputs.items():
if pc not in result.inputs:
pc.producer = result
pc.tag = tag
result.update_input_refcounts()
return result
| apache-2.0 |
aarchiba/numpy | numpy/ma/tests/test_extras.py | 41 | 44785 | # pylint: disable-msg=W0611, W0612, W0511
"""Tests suite for MaskedArray.
Adapted from the original test_ma by Pierre Gerard-Marchant
:author: Pierre Gerard-Marchant
:contact: pierregm_at_uga_dot_edu
:version: $Id: test_extras.py 3473 2007-10-29 15:18:13Z jarrod.millman $
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
from numpy.testing import (
TestCase, run_module_suite, assert_warns, clear_and_catch_warnings
)
from numpy.ma.testutils import (
assert_, assert_array_equal, assert_equal, assert_almost_equal
)
from numpy.ma.core import (
array, arange, masked, MaskedArray, masked_array, getmaskarray, shape,
nomask, ones, zeros, count
)
from numpy.ma.extras import (
atleast_2d, mr_, dot, polyfit, cov, corrcoef, median, average, unique,
setxor1d, setdiff1d, union1d, intersect1d, in1d, ediff1d,
apply_over_axes, apply_along_axis, compress_nd, compress_rowcols,
mask_rowcols, clump_masked, clump_unmasked, flatnotmasked_contiguous,
notmasked_contiguous, notmasked_edges, masked_all, masked_all_like
)
import numpy.ma.extras as mae
class TestGeneric(TestCase):
#
def test_masked_all(self):
# Tests masked_all
# Standard dtype
test = masked_all((2,), dtype=float)
control = array([1, 1], mask=[1, 1], dtype=float)
assert_equal(test, control)
# Flexible dtype
dt = np.dtype({'names': ['a', 'b'], 'formats': ['f', 'f']})
test = masked_all((2,), dtype=dt)
control = array([(0, 0), (0, 0)], mask=[(1, 1), (1, 1)], dtype=dt)
assert_equal(test, control)
test = masked_all((2, 2), dtype=dt)
control = array([[(0, 0), (0, 0)], [(0, 0), (0, 0)]],
mask=[[(1, 1), (1, 1)], [(1, 1), (1, 1)]],
dtype=dt)
assert_equal(test, control)
# Nested dtype
dt = np.dtype([('a', 'f'), ('b', [('ba', 'f'), ('bb', 'f')])])
test = masked_all((2,), dtype=dt)
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
assert_equal(test, control)
test = masked_all((2,), dtype=dt)
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
assert_equal(test, control)
test = masked_all((1, 1), dtype=dt)
control = array([[(1, (1, 1))]], mask=[[(1, (1, 1))]], dtype=dt)
assert_equal(test, control)
def test_masked_all_like(self):
# Tests masked_all
# Standard dtype
base = array([1, 2], dtype=float)
test = masked_all_like(base)
control = array([1, 1], mask=[1, 1], dtype=float)
assert_equal(test, control)
# Flexible dtype
dt = np.dtype({'names': ['a', 'b'], 'formats': ['f', 'f']})
base = array([(0, 0), (0, 0)], mask=[(1, 1), (1, 1)], dtype=dt)
test = masked_all_like(base)
control = array([(10, 10), (10, 10)], mask=[(1, 1), (1, 1)], dtype=dt)
assert_equal(test, control)
# Nested dtype
dt = np.dtype([('a', 'f'), ('b', [('ba', 'f'), ('bb', 'f')])])
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
test = masked_all_like(control)
assert_equal(test, control)
def test_clump_masked(self):
# Test clump_masked
a = masked_array(np.arange(10))
a[[0, 1, 2, 6, 8, 9]] = masked
#
test = clump_masked(a)
control = [slice(0, 3), slice(6, 7), slice(8, 10)]
assert_equal(test, control)
def test_clump_unmasked(self):
# Test clump_unmasked
a = masked_array(np.arange(10))
a[[0, 1, 2, 6, 8, 9]] = masked
test = clump_unmasked(a)
control = [slice(3, 6), slice(7, 8), ]
assert_equal(test, control)
def test_flatnotmasked_contiguous(self):
# Test flatnotmasked_contiguous
a = arange(10)
# No mask
test = flatnotmasked_contiguous(a)
assert_equal(test, slice(0, a.size))
# Some mask
a[(a < 3) | (a > 8) | (a == 5)] = masked
test = flatnotmasked_contiguous(a)
assert_equal(test, [slice(3, 5), slice(6, 9)])
#
a[:] = masked
test = flatnotmasked_contiguous(a)
assert_equal(test, None)
class TestAverage(TestCase):
# Several tests of average. Why so many ? Good point...
def test_testAverage1(self):
# Test of average.
ott = array([0., 1., 2., 3.], mask=[True, False, False, False])
assert_equal(2.0, average(ott, axis=0))
assert_equal(2.0, average(ott, weights=[1., 1., 2., 1.]))
result, wts = average(ott, weights=[1., 1., 2., 1.], returned=1)
assert_equal(2.0, result)
self.assertTrue(wts == 4.0)
ott[:] = masked
assert_equal(average(ott, axis=0).mask, [True])
ott = array([0., 1., 2., 3.], mask=[True, False, False, False])
ott = ott.reshape(2, 2)
ott[:, 1] = masked
assert_equal(average(ott, axis=0), [2.0, 0.0])
assert_equal(average(ott, axis=1).mask[0], [True])
assert_equal([2., 0.], average(ott, axis=0))
result, wts = average(ott, axis=0, returned=1)
assert_equal(wts, [1., 0.])
def test_testAverage2(self):
# More tests of average.
w1 = [0, 1, 1, 1, 1, 0]
w2 = [[0, 1, 1, 1, 1, 0], [1, 0, 0, 0, 0, 1]]
x = arange(6, dtype=np.float_)
assert_equal(average(x, axis=0), 2.5)
assert_equal(average(x, axis=0, weights=w1), 2.5)
y = array([arange(6, dtype=np.float_), 2.0 * arange(6)])
assert_equal(average(y, None), np.add.reduce(np.arange(6)) * 3. / 12.)
assert_equal(average(y, axis=0), np.arange(6) * 3. / 2.)
assert_equal(average(y, axis=1),
[average(x, axis=0), average(x, axis=0) * 2.0])
assert_equal(average(y, None, weights=w2), 20. / 6.)
assert_equal(average(y, axis=0, weights=w2),
[0., 1., 2., 3., 4., 10.])
assert_equal(average(y, axis=1),
[average(x, axis=0), average(x, axis=0) * 2.0])
m1 = zeros(6)
m2 = [0, 0, 1, 1, 0, 0]
m3 = [[0, 0, 1, 1, 0, 0], [0, 1, 1, 1, 1, 0]]
m4 = ones(6)
m5 = [0, 1, 1, 1, 1, 1]
assert_equal(average(masked_array(x, m1), axis=0), 2.5)
assert_equal(average(masked_array(x, m2), axis=0), 2.5)
assert_equal(average(masked_array(x, m4), axis=0).mask, [True])
assert_equal(average(masked_array(x, m5), axis=0), 0.0)
assert_equal(count(average(masked_array(x, m4), axis=0)), 0)
z = masked_array(y, m3)
assert_equal(average(z, None), 20. / 6.)
assert_equal(average(z, axis=0), [0., 1., 99., 99., 4.0, 7.5])
assert_equal(average(z, axis=1), [2.5, 5.0])
assert_equal(average(z, axis=0, weights=w2),
[0., 1., 99., 99., 4.0, 10.0])
def test_testAverage3(self):
# Yet more tests of average!
a = arange(6)
b = arange(6) * 3
r1, w1 = average([[a, b], [b, a]], axis=1, returned=1)
assert_equal(shape(r1), shape(w1))
assert_equal(r1.shape, w1.shape)
r2, w2 = average(ones((2, 2, 3)), axis=0, weights=[3, 1], returned=1)
assert_equal(shape(w2), shape(r2))
r2, w2 = average(ones((2, 2, 3)), returned=1)
assert_equal(shape(w2), shape(r2))
r2, w2 = average(ones((2, 2, 3)), weights=ones((2, 2, 3)), returned=1)
assert_equal(shape(w2), shape(r2))
a2d = array([[1, 2], [0, 4]], float)
a2dm = masked_array(a2d, [[False, False], [True, False]])
a2da = average(a2d, axis=0)
assert_equal(a2da, [0.5, 3.0])
a2dma = average(a2dm, axis=0)
assert_equal(a2dma, [1.0, 3.0])
a2dma = average(a2dm, axis=None)
assert_equal(a2dma, 7. / 3.)
a2dma = average(a2dm, axis=1)
assert_equal(a2dma, [1.5, 4.0])
def test_onintegers_with_mask(self):
# Test average on integers with mask
a = average(array([1, 2]))
assert_equal(a, 1.5)
a = average(array([1, 2, 3, 4], mask=[False, False, True, True]))
assert_equal(a, 1.5)
def test_complex(self):
# Test with complex data.
# (Regression test for https://github.com/numpy/numpy/issues/2684)
mask = np.array([[0, 0, 0, 1, 0],
[0, 1, 0, 0, 0]], dtype=bool)
a = masked_array([[0, 1+2j, 3+4j, 5+6j, 7+8j],
[9j, 0+1j, 2+3j, 4+5j, 7+7j]],
mask=mask)
av = average(a)
expected = np.average(a.compressed())
assert_almost_equal(av.real, expected.real)
assert_almost_equal(av.imag, expected.imag)
av0 = average(a, axis=0)
expected0 = average(a.real, axis=0) + average(a.imag, axis=0)*1j
assert_almost_equal(av0.real, expected0.real)
assert_almost_equal(av0.imag, expected0.imag)
av1 = average(a, axis=1)
expected1 = average(a.real, axis=1) + average(a.imag, axis=1)*1j
assert_almost_equal(av1.real, expected1.real)
assert_almost_equal(av1.imag, expected1.imag)
# Test with the 'weights' argument.
wts = np.array([[0.5, 1.0, 2.0, 1.0, 0.5],
[1.0, 1.0, 1.0, 1.0, 1.0]])
wav = average(a, weights=wts)
expected = np.average(a.compressed(), weights=wts[~mask])
assert_almost_equal(wav.real, expected.real)
assert_almost_equal(wav.imag, expected.imag)
wav0 = average(a, weights=wts, axis=0)
expected0 = (average(a.real, weights=wts, axis=0) +
average(a.imag, weights=wts, axis=0)*1j)
assert_almost_equal(wav0.real, expected0.real)
assert_almost_equal(wav0.imag, expected0.imag)
wav1 = average(a, weights=wts, axis=1)
expected1 = (average(a.real, weights=wts, axis=1) +
average(a.imag, weights=wts, axis=1)*1j)
assert_almost_equal(wav1.real, expected1.real)
assert_almost_equal(wav1.imag, expected1.imag)
class TestConcatenator(TestCase):
# Tests for mr_, the equivalent of r_ for masked arrays.
def test_1d(self):
# Tests mr_ on 1D arrays.
assert_array_equal(mr_[1, 2, 3, 4, 5, 6], array([1, 2, 3, 4, 5, 6]))
b = ones(5)
m = [1, 0, 0, 0, 0]
d = masked_array(b, mask=m)
c = mr_[d, 0, 0, d]
self.assertTrue(isinstance(c, MaskedArray))
assert_array_equal(c, [1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1])
assert_array_equal(c.mask, mr_[m, 0, 0, m])
def test_2d(self):
# Tests mr_ on 2D arrays.
a_1 = np.random.rand(5, 5)
a_2 = np.random.rand(5, 5)
m_1 = np.round_(np.random.rand(5, 5), 0)
m_2 = np.round_(np.random.rand(5, 5), 0)
b_1 = masked_array(a_1, mask=m_1)
b_2 = masked_array(a_2, mask=m_2)
# append columns
d = mr_['1', b_1, b_2]
self.assertTrue(d.shape == (5, 10))
assert_array_equal(d[:, :5], b_1)
assert_array_equal(d[:, 5:], b_2)
assert_array_equal(d.mask, np.r_['1', m_1, m_2])
d = mr_[b_1, b_2]
self.assertTrue(d.shape == (10, 5))
assert_array_equal(d[:5,:], b_1)
assert_array_equal(d[5:,:], b_2)
assert_array_equal(d.mask, np.r_[m_1, m_2])
class TestNotMasked(TestCase):
# Tests notmasked_edges and notmasked_contiguous.
def test_edges(self):
# Tests unmasked_edges
data = masked_array(np.arange(25).reshape(5, 5),
mask=[[0, 0, 1, 0, 0],
[0, 0, 0, 1, 1],
[1, 1, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 0, 0]],)
test = notmasked_edges(data, None)
assert_equal(test, [0, 24])
test = notmasked_edges(data, 0)
assert_equal(test[0], [(0, 0, 1, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(3, 3, 3, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data, 1)
assert_equal(test[0], [(0, 1, 2, 3, 4), (0, 0, 2, 0, 3)])
assert_equal(test[1], [(0, 1, 2, 3, 4), (4, 2, 4, 4, 4)])
#
test = notmasked_edges(data.data, None)
assert_equal(test, [0, 24])
test = notmasked_edges(data.data, 0)
assert_equal(test[0], [(0, 0, 0, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(4, 4, 4, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data.data, -1)
assert_equal(test[0], [(0, 1, 2, 3, 4), (0, 0, 0, 0, 0)])
assert_equal(test[1], [(0, 1, 2, 3, 4), (4, 4, 4, 4, 4)])
#
data[-2] = masked
test = notmasked_edges(data, 0)
assert_equal(test[0], [(0, 0, 1, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(1, 1, 2, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data, -1)
assert_equal(test[0], [(0, 1, 2, 4), (0, 0, 2, 3)])
assert_equal(test[1], [(0, 1, 2, 4), (4, 2, 4, 4)])
def test_contiguous(self):
# Tests notmasked_contiguous
a = masked_array(np.arange(24).reshape(3, 8),
mask=[[0, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 0], ])
tmp = notmasked_contiguous(a, None)
assert_equal(tmp[-1], slice(23, 24, None))
assert_equal(tmp[-2], slice(16, 22, None))
assert_equal(tmp[-3], slice(0, 4, None))
#
tmp = notmasked_contiguous(a, 0)
self.assertTrue(len(tmp[-1]) == 1)
self.assertTrue(tmp[-2] is None)
assert_equal(tmp[-3], tmp[-1])
self.assertTrue(len(tmp[0]) == 2)
#
tmp = notmasked_contiguous(a, 1)
assert_equal(tmp[0][-1], slice(0, 4, None))
self.assertTrue(tmp[1] is None)
assert_equal(tmp[2][-1], slice(7, 8, None))
assert_equal(tmp[2][-2], slice(0, 6, None))
class TestCompressFunctions(TestCase):
def test_compress_nd(self):
# Tests compress_nd
x = np.array(list(range(3*4*5))).reshape(3, 4, 5)
m = np.zeros((3,4,5)).astype(bool)
m[1,1,1] = True
x = array(x, mask=m)
# axis=None
a = compress_nd(x)
assert_equal(a, [[[ 0, 2, 3, 4],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[40, 42, 43, 44],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
# axis=0
a = compress_nd(x, 0)
assert_equal(a, [[[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]],
[[40, 41, 42, 43, 44],
[45, 46, 47, 48, 49],
[50, 51, 52, 53, 54],
[55, 56, 57, 58, 59]]])
# axis=1
a = compress_nd(x, 1)
assert_equal(a, [[[ 0, 1, 2, 3, 4],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]],
[[20, 21, 22, 23, 24],
[30, 31, 32, 33, 34],
[35, 36, 37, 38, 39]],
[[40, 41, 42, 43, 44],
[50, 51, 52, 53, 54],
[55, 56, 57, 58, 59]]])
a2 = compress_nd(x, (1,))
a3 = compress_nd(x, -2)
a4 = compress_nd(x, (-2,))
assert_equal(a, a2)
assert_equal(a, a3)
assert_equal(a, a4)
# axis=2
a = compress_nd(x, 2)
assert_equal(a, [[[ 0, 2, 3, 4],
[ 5, 7, 8, 9],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[20, 22, 23, 24],
[25, 27, 28, 29],
[30, 32, 33, 34],
[35, 37, 38, 39]],
[[40, 42, 43, 44],
[45, 47, 48, 49],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
a2 = compress_nd(x, (2,))
a3 = compress_nd(x, -1)
a4 = compress_nd(x, (-1,))
assert_equal(a, a2)
assert_equal(a, a3)
assert_equal(a, a4)
# axis=(0, 1)
a = compress_nd(x, (0, 1))
assert_equal(a, [[[ 0, 1, 2, 3, 4],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]],
[[40, 41, 42, 43, 44],
[50, 51, 52, 53, 54],
[55, 56, 57, 58, 59]]])
a2 = compress_nd(x, (0, -2))
assert_equal(a, a2)
# axis=(1, 2)
a = compress_nd(x, (1, 2))
assert_equal(a, [[[ 0, 2, 3, 4],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[20, 22, 23, 24],
[30, 32, 33, 34],
[35, 37, 38, 39]],
[[40, 42, 43, 44],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
a2 = compress_nd(x, (-2, 2))
a3 = compress_nd(x, (1, -1))
a4 = compress_nd(x, (-2, -1))
assert_equal(a, a2)
assert_equal(a, a3)
assert_equal(a, a4)
# axis=(0, 2)
a = compress_nd(x, (0, 2))
assert_equal(a, [[[ 0, 2, 3, 4],
[ 5, 7, 8, 9],
[10, 12, 13, 14],
[15, 17, 18, 19]],
[[40, 42, 43, 44],
[45, 47, 48, 49],
[50, 52, 53, 54],
[55, 57, 58, 59]]])
a2 = compress_nd(x, (0, -1))
assert_equal(a, a2)
def test_compress_rowcols(self):
# Tests compress_rowcols
x = array(np.arange(9).reshape(3, 3),
mask=[[1, 0, 0], [0, 0, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[4, 5], [7, 8]])
assert_equal(compress_rowcols(x, 0), [[3, 4, 5], [6, 7, 8]])
assert_equal(compress_rowcols(x, 1), [[1, 2], [4, 5], [7, 8]])
x = array(x._data, mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[0, 2], [6, 8]])
assert_equal(compress_rowcols(x, 0), [[0, 1, 2], [6, 7, 8]])
assert_equal(compress_rowcols(x, 1), [[0, 2], [3, 5], [6, 8]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[8]])
assert_equal(compress_rowcols(x, 0), [[6, 7, 8]])
assert_equal(compress_rowcols(x, 1,), [[2], [5], [8]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
assert_equal(compress_rowcols(x).size, 0)
assert_equal(compress_rowcols(x, 0).size, 0)
assert_equal(compress_rowcols(x, 1).size, 0)
def test_mask_rowcols(self):
# Tests mask_rowcols.
x = array(np.arange(9).reshape(3, 3),
mask=[[1, 0, 0], [0, 0, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask,
[[1, 1, 1], [1, 0, 0], [1, 0, 0]])
assert_equal(mask_rowcols(x, 0).mask,
[[1, 1, 1], [0, 0, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1).mask,
[[1, 0, 0], [1, 0, 0], [1, 0, 0]])
x = array(x._data, mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask,
[[0, 1, 0], [1, 1, 1], [0, 1, 0]])
assert_equal(mask_rowcols(x, 0).mask,
[[0, 0, 0], [1, 1, 1], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1).mask,
[[0, 1, 0], [0, 1, 0], [0, 1, 0]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask,
[[1, 1, 1], [1, 1, 1], [1, 1, 0]])
assert_equal(mask_rowcols(x, 0).mask,
[[1, 1, 1], [1, 1, 1], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1,).mask,
[[1, 1, 0], [1, 1, 0], [1, 1, 0]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.assertTrue(mask_rowcols(x).all() is masked)
self.assertTrue(mask_rowcols(x, 0).all() is masked)
self.assertTrue(mask_rowcols(x, 1).all() is masked)
self.assertTrue(mask_rowcols(x).mask.all())
self.assertTrue(mask_rowcols(x, 0).mask.all())
self.assertTrue(mask_rowcols(x, 1).mask.all())
def test_dot(self):
# Tests dot product
n = np.arange(1, 7)
#
m = [1, 0, 0, 0, 0, 0]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 1], [1, 0]])
c = dot(b, a, True)
assert_equal(c.mask, [[1, 1, 1], [1, 0, 0], [1, 0, 0]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
m = [0, 0, 0, 0, 0, 1]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[0, 1], [1, 1]])
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [0, 0, 1], [1, 1, 1]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
assert_equal(c, dot(a, b))
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
m = [0, 0, 0, 0, 0, 0]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b)
assert_equal(c.mask, nomask)
c = dot(b, a)
assert_equal(c.mask, nomask)
#
a = masked_array(n, mask=[1, 0, 0, 0, 0, 0]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 0, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 1], [0, 0]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[1, 0, 0], [1, 0, 0], [1, 0, 0]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
a = masked_array(n, mask=[0, 0, 0, 0, 0, 1]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 0, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[0, 0], [1, 1]])
c = dot(a, b)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [0, 0, 1], [0, 0, 1]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
a = masked_array(n, mask=[0, 0, 0, 0, 0, 1]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 1, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 0], [1, 1]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [1, 1, 1], [0, 0, 1]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
class TestApplyAlongAxis(TestCase):
# Tests 2D functions
def test_3d(self):
a = arange(12.).reshape(2, 2, 3)
def myfunc(b):
return b[1]
xa = apply_along_axis(myfunc, 2, a)
assert_equal(xa, [[1, 4], [7, 10]])
# Tests kwargs functions
def test_3d_kwargs(self):
a = arange(12).reshape(2, 2, 3)
def myfunc(b, offset=0):
return b[1+offset]
xa = apply_along_axis(myfunc, 2, a, offset=1)
assert_equal(xa, [[2, 5], [8, 11]])
class TestApplyOverAxes(TestCase):
# Tests apply_over_axes
def test_basic(self):
a = arange(24).reshape(2, 3, 4)
test = apply_over_axes(np.sum, a, [0, 2])
ctrl = np.array([[[60], [92], [124]]])
assert_equal(test, ctrl)
a[(a % 2).astype(np.bool)] = masked
test = apply_over_axes(np.sum, a, [0, 2])
ctrl = np.array([[[28], [44], [60]]])
assert_equal(test, ctrl)
class TestMedian(TestCase):
def test_pytype(self):
r = np.ma.median([[np.inf, np.inf], [np.inf, np.inf]], axis=-1)
assert_equal(r, np.inf)
def test_non_masked(self):
assert_equal(np.ma.median(np.arange(9)), 4.)
assert_equal(np.ma.median(range(9)), 4)
def test_2d(self):
# Tests median w/ 2D
(n, p) = (101, 30)
x = masked_array(np.linspace(-1., 1., n),)
x[:10] = x[-10:] = masked
z = masked_array(np.empty((n, p), dtype=float))
z[:, 0] = x[:]
idx = np.arange(len(x))
for i in range(1, p):
np.random.shuffle(idx)
z[:, i] = x[idx]
assert_equal(median(z[:, 0]), 0)
assert_equal(median(z), 0)
assert_equal(median(z, axis=0), np.zeros(p))
assert_equal(median(z.T, axis=1), np.zeros(p))
def test_2d_waxis(self):
# Tests median w/ 2D arrays and different axis.
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
assert_equal(median(x), 14.5)
assert_equal(median(x, axis=0), [13.5, 14.5, 15.5])
assert_equal(median(x, axis=1), [0, 0, 0, 10, 13, 16, 19, 0, 0, 0])
assert_equal(median(x, axis=1).mask, [1, 1, 1, 0, 0, 0, 0, 1, 1, 1])
def test_3d(self):
# Tests median w/ 3D
x = np.ma.arange(24).reshape(3, 4, 2)
x[x % 3 == 0] = masked
assert_equal(median(x, 0), [[12, 9], [6, 15], [12, 9], [18, 15]])
x.shape = (4, 3, 2)
assert_equal(median(x, 0), [[99, 10], [11, 99], [13, 14]])
x = np.ma.arange(24).reshape(4, 3, 2)
x[x % 5 == 0] = masked
assert_equal(median(x, 0), [[12, 10], [8, 9], [16, 17]])
def test_neg_axis(self):
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
assert_equal(median(x, axis=-1), median(x, axis=1))
def test_out(self):
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
out = masked_array(np.ones(10))
r = median(x, axis=1, out=out)
assert_equal(r, out)
assert_(type(r) == MaskedArray)
class TestCov(TestCase):
def setUp(self):
self.data = array(np.random.rand(12))
def test_1d_wo_missing(self):
# Test cov on 1D variable w/o missing values
x = self.data
assert_almost_equal(np.cov(x), cov(x))
assert_almost_equal(np.cov(x, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(x, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
def test_2d_wo_missing(self):
# Test cov on 1 2D variable w/o missing values
x = self.data.reshape(3, 4)
assert_almost_equal(np.cov(x), cov(x))
assert_almost_equal(np.cov(x, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(x, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
def test_1d_w_missing(self):
# Test cov 1 1D variable w/missing values
x = self.data
x[-1] = masked
x -= x.mean()
nx = x.compressed()
assert_almost_equal(np.cov(nx), cov(x))
assert_almost_equal(np.cov(nx, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(nx, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
#
try:
cov(x, allow_masked=False)
except ValueError:
pass
#
# 2 1D variables w/ missing values
nx = x[1:-1]
assert_almost_equal(np.cov(nx, nx[::-1]), cov(x, x[::-1]))
assert_almost_equal(np.cov(nx, nx[::-1], rowvar=False),
cov(x, x[::-1], rowvar=False))
assert_almost_equal(np.cov(nx, nx[::-1], rowvar=False, bias=True),
cov(x, x[::-1], rowvar=False, bias=True))
def test_2d_w_missing(self):
# Test cov on 2D variable w/ missing value
x = self.data
x[-1] = masked
x = x.reshape(3, 4)
valid = np.logical_not(getmaskarray(x)).astype(int)
frac = np.dot(valid, valid.T)
xf = (x - x.mean(1)[:, None]).filled(0)
assert_almost_equal(cov(x),
np.cov(xf) * (x.shape[1] - 1) / (frac - 1.))
assert_almost_equal(cov(x, bias=True),
np.cov(xf, bias=True) * x.shape[1] / frac)
frac = np.dot(valid.T, valid)
xf = (x - x.mean(0)).filled(0)
assert_almost_equal(cov(x, rowvar=False),
(np.cov(xf, rowvar=False) *
(x.shape[0] - 1) / (frac - 1.)))
assert_almost_equal(cov(x, rowvar=False, bias=True),
(np.cov(xf, rowvar=False, bias=True) *
x.shape[0] / frac))
class catch_warn_mae(clear_and_catch_warnings):
""" Context manager to catch, reset warnings in ma.extras module
"""
class_modules = (mae,)
class TestCorrcoef(TestCase):
def setUp(self):
self.data = array(np.random.rand(12))
self.data2 = array(np.random.rand(12))
def test_ddof(self):
# ddof raises DeprecationWarning
x, y = self.data, self.data2
expected = np.corrcoef(x)
expected2 = np.corrcoef(x, y)
with catch_warn_mae():
warnings.simplefilter("always")
assert_warns(DeprecationWarning, corrcoef, x, ddof=-1)
warnings.simplefilter("ignore")
# ddof has no or negligible effect on the function
assert_almost_equal(np.corrcoef(x, ddof=0), corrcoef(x, ddof=0))
assert_almost_equal(corrcoef(x, ddof=-1), expected)
assert_almost_equal(corrcoef(x, y, ddof=-1), expected2)
assert_almost_equal(corrcoef(x, ddof=3), expected)
assert_almost_equal(corrcoef(x, y, ddof=3), expected2)
def test_bias(self):
x, y = self.data, self.data2
expected = np.corrcoef(x)
# bias raises DeprecationWarning
with catch_warn_mae():
warnings.simplefilter("always")
assert_warns(DeprecationWarning, corrcoef, x, y, True, False)
assert_warns(DeprecationWarning, corrcoef, x, y, True, True)
assert_warns(DeprecationWarning, corrcoef, x, bias=False)
warnings.simplefilter("ignore")
# bias has no or negligible effect on the function
assert_almost_equal(corrcoef(x, bias=1), expected)
def test_1d_wo_missing(self):
# Test cov on 1D variable w/o missing values
x = self.data
assert_almost_equal(np.corrcoef(x), corrcoef(x))
assert_almost_equal(np.corrcoef(x, rowvar=False),
corrcoef(x, rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
assert_almost_equal(np.corrcoef(x, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
def test_2d_wo_missing(self):
# Test corrcoef on 1 2D variable w/o missing values
x = self.data.reshape(3, 4)
assert_almost_equal(np.corrcoef(x), corrcoef(x))
assert_almost_equal(np.corrcoef(x, rowvar=False),
corrcoef(x, rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
assert_almost_equal(np.corrcoef(x, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
def test_1d_w_missing(self):
# Test corrcoef 1 1D variable w/missing values
x = self.data
x[-1] = masked
x -= x.mean()
nx = x.compressed()
assert_almost_equal(np.corrcoef(nx), corrcoef(x))
assert_almost_equal(np.corrcoef(nx, rowvar=False),
corrcoef(x, rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
assert_almost_equal(np.corrcoef(nx, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
try:
corrcoef(x, allow_masked=False)
except ValueError:
pass
# 2 1D variables w/ missing values
nx = x[1:-1]
assert_almost_equal(np.corrcoef(nx, nx[::-1]), corrcoef(x, x[::-1]))
assert_almost_equal(np.corrcoef(nx, nx[::-1], rowvar=False),
corrcoef(x, x[::-1], rowvar=False))
with catch_warn_mae():
warnings.simplefilter("ignore")
# ddof and bias have no or negligible effect on the function
assert_almost_equal(np.corrcoef(nx, nx[::-1]),
corrcoef(x, x[::-1], bias=1))
assert_almost_equal(np.corrcoef(nx, nx[::-1]),
corrcoef(x, x[::-1], ddof=2))
def test_2d_w_missing(self):
# Test corrcoef on 2D variable w/ missing value
x = self.data
x[-1] = masked
x = x.reshape(3, 4)
test = corrcoef(x)
control = np.corrcoef(x)
assert_almost_equal(test[:-1, :-1], control[:-1, :-1])
with catch_warn_mae():
warnings.simplefilter("ignore")
# ddof and bias have no or negligible effect on the function
assert_almost_equal(corrcoef(x, ddof=-2)[:-1, :-1],
control[:-1, :-1])
assert_almost_equal(corrcoef(x, ddof=3)[:-1, :-1],
control[:-1, :-1])
assert_almost_equal(corrcoef(x, bias=1)[:-1, :-1],
control[:-1, :-1])
class TestPolynomial(TestCase):
#
def test_polyfit(self):
# Tests polyfit
# On ndarrays
x = np.random.rand(10)
y = np.random.rand(20).reshape(-1, 2)
assert_almost_equal(polyfit(x, y, 3), np.polyfit(x, y, 3))
# ON 1D maskedarrays
x = x.view(MaskedArray)
x[0] = masked
y = y.view(MaskedArray)
y[0, 0] = y[-1, -1] = masked
#
(C, R, K, S, D) = polyfit(x, y[:, 0], 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:], y[1:, 0].compressed(), 3,
full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
(C, R, K, S, D) = polyfit(x, y[:, -1], 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1, -1], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
(C, R, K, S, D) = polyfit(x, y, 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1,:], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
w = np.random.rand(10) + 1
wo = w.copy()
xs = x[1:-1]
ys = y[1:-1]
ws = w[1:-1]
(C, R, K, S, D) = polyfit(x, y, 3, full=True, w=w)
(c, r, k, s, d) = np.polyfit(xs, ys, 3, full=True, w=ws)
assert_equal(w, wo)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
def test_polyfit_with_masked_NaNs(self):
x = np.random.rand(10)
y = np.random.rand(20).reshape(-1, 2)
x[0] = np.nan
y[-1,-1] = np.nan
x = x.view(MaskedArray)
y = y.view(MaskedArray)
x[0] = masked
y[-1,-1] = masked
(C, R, K, S, D) = polyfit(x, y, 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1,:], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
class TestArraySetOps(TestCase):
def test_unique_onlist(self):
# Test unique on list
data = [1, 1, 1, 2, 2, 3]
test = unique(data, return_index=True, return_inverse=True)
self.assertTrue(isinstance(test[0], MaskedArray))
assert_equal(test[0], masked_array([1, 2, 3], mask=[0, 0, 0]))
assert_equal(test[1], [0, 3, 5])
assert_equal(test[2], [0, 0, 0, 1, 1, 2])
def test_unique_onmaskedarray(self):
# Test unique on masked data w/use_mask=True
data = masked_array([1, 1, 1, 2, 2, 3], mask=[0, 0, 1, 0, 1, 0])
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, 2, 3, -1], mask=[0, 0, 0, 1]))
assert_equal(test[1], [0, 3, 5, 2])
assert_equal(test[2], [0, 0, 3, 1, 3, 2])
#
data.fill_value = 3
data = masked_array(data=[1, 1, 1, 2, 2, 3],
mask=[0, 0, 1, 0, 1, 0], fill_value=3)
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, 2, 3, -1], mask=[0, 0, 0, 1]))
assert_equal(test[1], [0, 3, 5, 2])
assert_equal(test[2], [0, 0, 3, 1, 3, 2])
def test_unique_allmasked(self):
# Test all masked
data = masked_array([1, 1, 1], mask=True)
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, ], mask=[True]))
assert_equal(test[1], [0])
assert_equal(test[2], [0, 0, 0])
#
# Test masked
data = masked
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array(masked))
assert_equal(test[1], [0])
assert_equal(test[2], [0])
def test_ediff1d(self):
# Tests mediff1d
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
control = array([1, 1, 1, 4], mask=[1, 0, 0, 1])
test = ediff1d(x)
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_tobegin(self):
# Test ediff1d w/ to_begin
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_begin=masked)
control = array([0, 1, 1, 1, 4], mask=[1, 1, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_begin=[1, 2, 3])
control = array([1, 2, 3, 1, 1, 1, 4], mask=[0, 0, 0, 1, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_toend(self):
# Test ediff1d w/ to_end
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_end=masked)
control = array([1, 1, 1, 4, 0], mask=[1, 0, 0, 1, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=[1, 2, 3])
control = array([1, 1, 1, 4, 1, 2, 3], mask=[1, 0, 0, 1, 0, 0, 0])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_tobegin_toend(self):
# Test ediff1d w/ to_begin and to_end
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_end=masked, to_begin=masked)
control = array([0, 1, 1, 1, 4, 0], mask=[1, 1, 0, 0, 1, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=[1, 2, 3], to_begin=masked)
control = array([0, 1, 1, 1, 4, 1, 2, 3],
mask=[1, 1, 0, 0, 1, 0, 0, 0])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_ediff1d_ndarray(self):
# Test ediff1d w/ a ndarray
x = np.arange(5)
test = ediff1d(x)
control = array([1, 1, 1, 1], mask=[0, 0, 0, 0])
assert_equal(test, control)
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=masked, to_begin=masked)
control = array([0, 1, 1, 1, 1, 0], mask=[1, 0, 0, 0, 0, 1])
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_intersect1d(self):
# Test intersect1d
x = array([1, 3, 3, 3], mask=[0, 0, 0, 1])
y = array([3, 1, 1, 1], mask=[0, 0, 0, 1])
test = intersect1d(x, y)
control = array([1, 3, -1], mask=[0, 0, 1])
assert_equal(test, control)
def test_setxor1d(self):
# Test setxor1d
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = setxor1d(a, b)
assert_equal(test, array([3, 4, 7]))
#
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = [1, 2, 3, 4, 5]
test = setxor1d(a, b)
assert_equal(test, array([3, 4, 7, -1], mask=[0, 0, 0, 1]))
#
a = array([1, 2, 3])
b = array([6, 5, 4])
test = setxor1d(a, b)
assert_(isinstance(test, MaskedArray))
assert_equal(test, [1, 2, 3, 4, 5, 6])
#
a = array([1, 8, 2, 3], mask=[0, 1, 0, 0])
b = array([6, 5, 4, 8], mask=[0, 0, 0, 1])
test = setxor1d(a, b)
assert_(isinstance(test, MaskedArray))
assert_equal(test, [1, 2, 3, 4, 5, 6])
#
assert_array_equal([], setxor1d([], []))
def test_in1d(self):
# Test in1d
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = in1d(a, b)
assert_equal(test, [True, True, True, False, True])
#
a = array([5, 5, 2, 1, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 5, -1], mask=[0, 0, 1])
test = in1d(a, b)
assert_equal(test, [True, True, False, True, True])
#
assert_array_equal([], in1d([], []))
def test_in1d_invert(self):
# Test in1d's invert parameter
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
assert_equal(np.invert(in1d(a, b)), in1d(a, b, invert=True))
a = array([5, 5, 2, 1, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 5, -1], mask=[0, 0, 1])
assert_equal(np.invert(in1d(a, b)), in1d(a, b, invert=True))
assert_array_equal([], in1d([], [], invert=True))
def test_union1d(self):
# Test union1d
a = array([1, 2, 5, 7, 5, -1], mask=[0, 0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = union1d(a, b)
control = array([1, 2, 3, 4, 5, 7, -1], mask=[0, 0, 0, 0, 0, 0, 1])
assert_equal(test, control)
#
assert_array_equal([], union1d([], []))
def test_setdiff1d(self):
# Test setdiff1d
a = array([6, 5, 4, 7, 7, 1, 2, 1], mask=[0, 0, 0, 0, 0, 0, 0, 1])
b = array([2, 4, 3, 3, 2, 1, 5])
test = setdiff1d(a, b)
assert_equal(test, array([6, 7, -1], mask=[0, 0, 1]))
#
a = arange(10)
b = arange(8)
assert_equal(setdiff1d(a, b), array([8, 9]))
a = array([], np.uint32, mask=[])
assert_equal(setdiff1d(a, []).dtype, np.uint32)
def test_setdiff1d_char_array(self):
# Test setdiff1d_charray
a = np.array(['a', 'b', 'c'])
b = np.array(['a', 'b', 's'])
assert_array_equal(setdiff1d(a, b), np.array(['c']))
class TestShapeBase(TestCase):
def test_atleast2d(self):
# Test atleast_2d
a = masked_array([0, 1, 2], mask=[0, 1, 0])
b = atleast_2d(a)
assert_equal(b.shape, (1, 3))
assert_equal(b.mask.shape, b.data.shape)
assert_equal(a.shape, (3,))
assert_equal(a.mask.shape, a.data.shape)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
frankiecjunle/yunblog | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py | 515 | 2165 | from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check via the official file-like-object way.
return obj.closed
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.")
def assert_header_parsing(headers):
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param headers: Headers to verify.
:type headers: `httplib.HTTPMessage`.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError('expected httplib.Message, got {0}.'.format(
type(headers)))
defects = getattr(headers, 'defects', None)
get_payload = getattr(headers, 'get_payload', None)
unparsed_data = None
if get_payload: # Platform-specific: Python 3.
unparsed_data = get_payload()
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
def is_response_to_head(response):
"""
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param conn:
:type conn: :class:`httplib.HTTPResponse`
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
return method.upper() == 'HEAD'
| mit |
cloudendpoints/endpoints-management-python | test/test_timestamp.py | 3 | 4311 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import datetime
import unittest2
from expects import be_below_or_equal, expect, equal, raise_error
from endpoints_management.control import timestamp
class TestToRfc3339(unittest2.TestCase):
A_LONG_TIME_AGO = datetime.datetime(1971, 12, 31, 21, 0, 20, 21000)
TESTS = [
(A_LONG_TIME_AGO, u'1971-12-31T21:00:20.021Z'),
(A_LONG_TIME_AGO - datetime.datetime(1970, 1, 1),
u'1971-12-31T21:00:20.021Z')
]
def test_should_converts_correctly(self):
for t in self.TESTS:
expect(timestamp.to_rfc3339(t[0])).to(equal(t[1]))
def test_should_fail_on_invalid_input(self):
testf = lambda: timestamp.to_rfc3339(u'this will not work')
expect(testf).to(raise_error(ValueError))
class TestFromRfc3339(unittest2.TestCase):
TOLERANCE = 10000 # 1e-5 * 1e9
TESTS = [
# Simple
(u'1971-12-31T21:00:20.021Z',
datetime.datetime(1971, 12, 31, 21, 0, 20, 21000)),
# different timezone
(u'1996-12-19T16:39:57-08:00',
datetime.datetime(1996, 12, 20, 0, 39, 57, 0)),
# microseconds
(u'1996-12-19T16:39:57.123456-08:00',
datetime.datetime(1996, 12, 20, 0, 39, 57, 123456)),
# Beyond 2038
(u'2100-01-01T00:00:00Z',
datetime.datetime(2100, 1, 1, 0, 0, 0, 0))
]
NANO_TESTS = [
# Simple
(u'1971-12-31T21:00:20.021Z',
(datetime.datetime(1971, 12, 31, 21, 0, 20, 21000), 21000000)),
# different timezone
(u'1996-12-19T16:39:57-08:00',
(datetime.datetime(1996, 12, 20, 0, 39, 57, 0), 0)),
# microseconds
(u'1996-12-19T16:39:57.123456789-08:00',
(datetime.datetime(1996, 12, 20, 0, 39, 57, 123457), 123456789)),
]
def test_should_convert_correctly_without_nanos(self):
for t in self.TESTS:
expect(timestamp.from_rfc3339(t[0])).to(equal(t[1]))
def test_should_convert_correctly_with_nanos(self):
for t in self.NANO_TESTS:
dt, nanos = timestamp.from_rfc3339(t[0], with_nanos=True)
expect(dt).to(equal(t[1][0]))
epsilon = abs(nanos - t[1][1])
# expect(epsilon).to(equal(0))
expect(epsilon).to(be_below_or_equal(self.TOLERANCE))
class TestCompare(unittest2.TestCase):
TESTS = [
# Strings
(u'1971-10-31T21:00:20.021Z', u'1971-11-30T21:00:20.021Z', -1),
(u'1971-11-30T21:00:20.021Z', u'1971-10-30T21:00:20.021Z', 1),
(u'1971-11-30T21:00:20Z', u'1971-11-30T21:00:20Z', 0),
(u'1971-11-30T21:00:20.021Z', u'1971-11-30T21:00:20.041Z', -1),
(u'1971-11-30T21:00:20.021Z', u'1971-11-30T21:00:20.001Z', 1),
# Datetimes
(datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
datetime.datetime(1996, 11, 20, 0, 39, 57, 0),
-1),
(datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
0),
(datetime.datetime(1996, 11, 20, 0, 39, 57, 0),
datetime.datetime(1996, 10, 20, 0, 39, 57, 0),
1)
]
def test_should_compare_correctly(self):
for t in self.TESTS:
a, b, want = t
expect(timestamp.compare(a, b)).to(equal(want))
def test_should_fail_if_inputs_do_not_have_the_same_type(self):
testf = lambda: timestamp.compare(self.TESTS[0][0],
datetime.datetime.utcnow())
expect(testf).to(raise_error(ValueError))
testf = lambda: timestamp.compare(self.TESTS[0],
datetime.datetime.utcnow())
expect(testf).to(raise_error(ValueError))
| apache-2.0 |
oppo-source/Find7-5.1-kernel-source | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
ville-k/tensorflow | tensorflow/compiler/tests/pooling_ops_3d_test.py | 85 | 12646 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for 3d pooling operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import test
# Wrapper around AvgPoolGrad that ignores extra arguments needed by
# MaxPoolGrad.
def _AvgPoolGrad(inputs, outputs, output_gradients, ksize, strides, padding):
del outputs # Unused by average-pooling gradients.
return gen_nn_ops._avg_pool3d_grad(
inputs.get_shape().as_list(),
output_gradients,
ksize=ksize,
strides=strides,
padding=padding)
class Pooling3DTest(XLATestCase):
def _VerifyValues(self, pool_func, input_sizes, window, strides, padding,
expected):
"""Verifies the output values of the pooling function.
Args:
pool_func: Function to be called: co.MaxPool, co.AvgPool.
input_sizes: Input tensor dimensions.
window: Tuple of kernel dims: planes, rows, cols.
strides: Tuple of strides for dims: planes, rows, cols.
padding: Padding type.
expected: An array containing the expected operation outputs.
"""
total_size = 1
for s in input_sizes:
total_size *= s
# Initializes the input tensor with array containing incrementing
# numbers from 1.
x = np.arange(1.0, total_size + 1, dtype=np.float32)
x = x.reshape(input_sizes)
with self.test_session() as sess, self.test_scope():
inputs = array_ops.placeholder(dtypes.float32)
t = pool_func(
inputs,
ksize=[1] + window + [1],
strides=[1] + strides + [1],
padding=padding)
vals = sess.run(t, {inputs: x})
# Verifies values.
actual = vals.flatten()
self.assertAllClose(expected, actual)
def testAvgPool3dValidPadding(self):
expected_output = [20.5, 21.5, 22.5]
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 3, 3, 3, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID",
expected=expected_output)
def testAvgPool3dSamePadding(self):
expected_output = [20.5, 21.5, 22.5, 26.5, 27.5, 28.5]
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 2, 2, 4, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME",
expected=expected_output)
def testAvgPool3dSamePaddingDifferentStrides(self):
expected_output = [1.5, 4.5, 7.5, 17.5, 20.5, 23.5, 33.5, 36.5, 39.5]
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 5, 8, 1, 1],
window=[1, 2, 3],
strides=[2, 3, 1],
padding="SAME",
expected=expected_output)
def testMaxPool3dValidPadding(self):
expected_output = [40.0, 41.0, 42.0]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 3, 3, 3, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID",
expected=expected_output)
def testMaxPool3dSamePadding(self):
expected_output = [31., 32., 33., 34., 35., 36.]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 2, 2, 3, 3],
window=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME",
expected=expected_output)
def testMaxPool3dSamePaddingDifferentStrides(self):
expected_output = [2., 5., 8., 18., 21., 24., 34., 37., 40.]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 5, 8, 1, 1],
window=[1, 2, 3],
strides=[2, 3, 1],
padding="SAME",
expected=expected_output)
# Test pooling on a larger input, with different stride and kernel
# size for the 'z' dimension.
# Simulate max pooling in numpy to get the expected output.
input_data = np.arange(1, 5 * 27 * 27 * 64 + 1).reshape((5, 27, 27, 64))
input_data = np.pad(input_data, [[0, 0], [0, 1], [0, 1], [0, 0]],
mode="constant")
expected_output = input_data[:, 1::2, 1::2, :]
expected_output[:, -1, :, :] = input_data[:, -2, 1::2, :]
expected_output[:, :, -1, :] = input_data[:, 1::2, -2, :]
expected_output[:, -1, -1, :] = input_data[:, -2, -2, :]
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 5, 27, 27, 64],
window=[1, 2, 2],
strides=[1, 2, 2],
padding="SAME",
expected=expected_output.flatten())
def testKernelSmallerThanStride(self):
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 3, 3, 3, 1],
window=[1, 1, 1],
strides=[2, 2, 2],
padding="SAME",
expected=[1, 3, 7, 9, 19, 21, 25, 27])
self._VerifyValues(
nn_ops.max_pool3d,
input_sizes=[1, 7, 7, 7, 1],
window=[2, 2, 2],
strides=[3, 3, 3],
padding="VALID",
expected=[58, 61, 79, 82, 205, 208, 226, 229])
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 3, 3, 3, 1],
window=[1, 1, 1],
strides=[2, 2, 2],
padding="SAME",
expected=[1, 3, 7, 9, 19, 21, 25, 27])
self._VerifyValues(
nn_ops.avg_pool3d,
input_sizes=[1, 7, 7, 7, 1],
window=[2, 2, 2],
strides=[3, 3, 3],
padding="VALID",
expected=[29.5, 32.5, 50.5, 53.5, 176.5, 179.5, 197.5, 200.5])
def _VerifyGradient(self, pool_func, pool_grad_func, input_sizes, ksize,
strides, padding):
"""Verifies the output values of the pooling gradient function.
Args:
pool_func: Forward pooling function
pool_grad_func: Pooling gradient function for pool_grad_func
input_sizes: Input tensor dimensions.
ksize: The kernel size dimensions
strides: The stride dimensions
padding: Padding type.
"""
ksize = [1] + ksize + [1]
strides = [1] + strides + [1]
total_size = np.prod(input_sizes)
x = np.arange(1, total_size + 1, dtype=np.float32).reshape(input_sizes)
with self.test_session() as sess:
# Use the forward pool function to compute some corresponding outputs
# (needed for the CPU device, and we need the shape in both cases).
with ops.device("CPU"):
inputs = array_ops.placeholder(dtypes.float32, shape=input_sizes)
outputs = pool_func(
inputs,
ksize=ksize,
strides=strides,
padding=padding)
output_vals = np.array(sess.run(outputs, {inputs: x}))
output_gradient_vals = np.arange(
1, output_vals.size + 1, dtype=np.float32)
output_gradient_vals = output_gradient_vals.reshape(output_vals.shape)
# Use the Tensorflow CPU pooling gradient to compute the expected input
# gradients.
with ops.device("CPU"):
output_gradients = array_ops.placeholder(
dtypes.float32, shape=output_vals.shape)
expected_input_gradients = pool_grad_func(
inputs,
outputs,
output_gradients,
ksize=ksize,
strides=strides,
padding=padding)
expected_input_gradient_vals = sess.run(
expected_input_gradients,
{inputs: x,
output_gradients: output_gradient_vals})
# Run the gradient op on the XLA device
with self.test_scope():
outputs = array_ops.placeholder(dtypes.float32, shape=output_vals.shape)
actual_input_gradients = pool_grad_func(
inputs,
outputs,
output_gradients,
ksize=ksize,
strides=strides,
padding=padding)
actual = sess.run(actual_input_gradients, {
inputs: x,
outputs: output_vals,
output_gradients: output_gradient_vals
})
# Compare the Tensorflow and XLA results.
self.assertAllClose(
expected_input_gradient_vals.flatten(),
actual.flatten(),
rtol=1e-5,
atol=1e-6)
self.assertShapeEqual(actual, inputs)
def testMaxPoolGradValidPadding1_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[1, 3, 3, 3, 1],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="VALID")
def testMaxPoolGradValidPadding2_1_6_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 3, 6, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="VALID")
def testMaxPoolGradValidPadding2_1_7_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 5, 7, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="VALID")
def testMaxPoolGradValidPadding2_2_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 2, 2, 2, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID")
def testMaxPoolGradSamePadding1_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 2, 4, 1],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="SAME")
def testMaxPoolGradSamePadding2_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 3, 2, 4, 1],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="SAME")
def testMaxPoolGradSamePadding2_2_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[2, 5, 2, 4, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME")
def testMaxPoolGradSamePadding3_1_3d(self):
self._VerifyGradient(
nn_ops.max_pool3d,
gen_nn_ops._max_pool3d_grad,
input_sizes=[1, 3, 3, 7, 1],
ksize=[3, 3, 3],
strides=[1, 1, 1],
padding="SAME")
def testAvgPoolGradValidPadding1_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 3, 3, 3, 3],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="VALID")
def testAvgPoolGradValidPadding2_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 3, 3, 3, 3],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="VALID")
def testAvgPoolGradValidPadding2_2_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 2, 2, 2, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="VALID")
def testAvgPoolGradSamePadding1_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 3, 2, 4, 3],
ksize=[1, 1, 1],
strides=[1, 1, 1],
padding="SAME")
def testAvgPoolGradSamePadding2_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[1, 2, 2, 2, 1],
ksize=[2, 2, 2],
strides=[1, 1, 1],
padding="SAME")
def testAvgPoolGradSamePadding2_2_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[2, 5, 2, 4, 3],
ksize=[2, 2, 2],
strides=[2, 2, 2],
padding="SAME")
def testAvgPoolGradSamePadding3_1_3d(self):
self._VerifyGradient(
nn_ops.avg_pool3d,
_AvgPoolGrad,
input_sizes=[1, 3, 6, 7, 1],
ksize=[3, 3, 3],
strides=[1, 1, 1],
padding="SAME")
if __name__ == "__main__":
test.main()
| apache-2.0 |
teamtuga4/teamtuga4ever.repository | plugin.video.traquinas/resources/lib/resolvers/v_vids.py | 23 | 1385 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib
from resources.lib.libraries import client
def resolve(url):
try:
result = client.request(url)
post = {}
f = client.parseDOM(result, 'Form', attrs = {'name': 'F1'})[0]
k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'})
for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]})
post.update({'method_free': '', 'method_premium': ''})
result = client.request(url, post=post)
url = client.parseDOM(result, 'a', ret='href', attrs = {'id': 'downloadbutton'})[0]
return url
except:
return
| gpl-2.0 |
uzh/vm-mad | vmmad/provider/libcloud.py | 1 | 9309 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Interfaces to cloud providers, using `Apache LibCloud <http://libcloud.apache.org>`
"""
# Copyright (C) 2011, 2012 ETH Zurich and University of Zurich. All rights reserved.
#
# Authors:
# Riccardo Murri <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
__docformat__ = 'reStructuredText'
__version__ = '$Revision$'
# stdlib imports
from abc import abstractmethod
from copy import copy
import os
import sys
# libcloud imports
import libcloud.compute.types
import libcloud.compute.providers
# local imports
from vmmad import log
from vmmad.orchestrator import VmInfo
from vmmad.provider import NodeProvider
class CloudNodeProvider(NodeProvider):
"""
Abstract base class implementing common functionality for all
LibCloud providers.
"""
@staticmethod
def _vminfo_state_from_libcloud_status(status):
"""
Return the `orchestrator.VmInfo` state word corresponding to
LibCloud's `NodeState`.
"""
return {
libcloud.compute.types.NodeState.PENDING: VmInfo.STARTING,
libcloud.compute.types.NodeState.REBOOTING: VmInfo.STARTING,
libcloud.compute.types.NodeState.RUNNING: None,
libcloud.compute.types.NodeState.TERMINATED: VmInfo.DOWN,
libcloud.compute.types.NodeState.UNKNOWN: VmInfo.OTHER,
}[status]
class DummyCloud(CloudNodeProvider):
"""
Interface `Apache LibCloud <http://libcloud.apache.org/>` "dummy" cloud provider.
"""
def __init__(self, image='1', kind='1'):
self.image = image
self.kind = kind
log.debug("Creating LibCloud's 'Dummy' provider ...")
driver = libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.DUMMY)
self.provider = driver(0)
# LibCloud's "dummy" provider always starts two instances; remove them
for node in copy(self.provider.list_nodes()):
node.destroy()
log.info("Using cloud provider '%s'.", self.provider.__class__.__name__)
log.info("Listing available images ...")
self._images = dict((img.id, img) for img in self.provider.list_images())
log.debug("Available images: %s", self._images.keys())
if image not in self._images:
raise RuntimeError("Image '%s' not available on %s"
% (image, self.provider.__class__.__name__))
log.info("... done: %d images available.", len(self._images))
log.info("Listing available kinds ...")
self._kinds = dict((kind.id, kind) for kind in self.provider.list_sizes())
log.debug("Available kinds: %s", self._kinds.keys())
if kind not in self._kinds:
raise RuntimeError("Kind '%s' not available on %s"
% (kind, self.provider.__class__.__name__))
log.info("... done: %d kinds available.", len(self._kinds))
log.info("VMs will use image '%s' (%s) on hardware kind '%s' (%s)",
self.image, self._images[self.image].name,
self.kind, self._kinds[self.kind].name)
# associate the Node ID we get from the cloud provider with
# the VM object we get from the orchestrator
self._instance_to_vm_map = { }
def start_vm(self, vm):
vm.instance = self.provider.create_node(
name=str(vm.vmid), image=self._images[self.image], size=self._kinds[self.kind])
uuid = vm.instance.uuid
assert uuid not in self._instance_to_vm_map, (
"Instance UUID %s already registered as belonging to VM %s"
% (uuid, vm.vmid))
vm.cloud = self.provider
self._instance_to_vm_map[uuid] = vm
assert uuid in self._instance_to_vm_map, (
"BUG: Instance UUID %s has not been inserted in `self._instance_to_vm_map`"
% uuid)
def stop_vm(self, vm):
uuid = vm.instance.uuid
assert uuid in self._instance_to_vm_map, (
"Instance UUID %s (of VM %s) not registered to any instance!"
% (uuid, vm.vmid))
self.provider.destroy_node(vm.instance)
del self._instance_to_vm_map[uuid]
vm.state = VmInfo.DOWN
def update_vm_status(self, vms):
nodes = [ node for node in self.provider.list_nodes()
if node.id in self._instance_to_vm_map ]
for node in nodes:
vm = self._instance_to_vm_map[node.uuid]
vm.instance = node
state = self._vminfo_state_from_libcloud_status(node.state)
if state is not None:
vm.state = state
class EC2Cloud(CloudNodeProvider):
"""
Interface to Amazon EC2 on top of `Apache LibCloud <http://libcloud.apache.org/>`.
"""
def __init__(self, image, kind, access_id=None, secret_key=None):
self.image = image
self.kind = kind
if access_id is not None:
self.access_id = access_id
self.secret_key = secret_key
else:
# use same environment variables as Boto
self.access_id = os.environ['AWS_ACCESS_KEY_ID']
self.secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
log.debug("Creating EC2 cloud provider with access ID '%s' ...", access_id)
driver = libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.EC2)
self.provider = driver(self.access_id, self.secret_key)
log.info("Using cloud provider '%s'.", self.provider.friendly_name)
log.info("Listing available images ...")
self._images = dict((img.id, img) for img in self.provider.list_images())
if image not in self._images:
raise RuntimeError("Image '%s' not available on %s"
% (image, self.provider.friendly_name))
log.info("... done: %d images available.", len(self._images))
log.info("Listing available sizes ...")
self._kinds = dict((kind.id, kind) for kind in self.provider.list_sizes())
if kind not in self._kinds:
raise RuntimeError("Kind '%s' not available on %s"
% (kind, self.provider.friendly_name))
log.info("... done: %d kinds available.", len(self._kinds))
log.info("VMs will use image '%s' (%s) on hardware kind '%s' (%s)",
self.image, self._images[self.image].name,
self.kind, self._kinds[self.kind].name)
# log.info("Getting list of running instances ...")
# self.instances = dict((node.uuid, node) for node in self.provider.list_nodes())
# log.info("... Done: %d instances available.", len(self._sizes))
# associate the Node ID we get from the cloud provider with
# the VM object we get from the orchestrator
self._instance_to_vm_map = { }
def start_vm(self, vm):
vm.instance = self.provider.create_node(
name=str(vm.vmid), image=self._images[self.image],
size=self._kinds[self.kind],
ex_keyname='vm-mad', ex_securitygroup='vm-mad',
ex_userdata=("VMMAD_AUTH='%s'" % vm.auth))
vm.cloud = self.provider
self._instance_to_vm_map[vm.instance.uuid] = vm
def stop_vm(self, vm):
# XXX: this is tricky: we must:
# 1. gracefully shutdown the node, and (after a timeout) proceed to:
# 2. destroy the node
# In addition this should not block the main Orchestrator thread.
uuid = vm.instance.uuid
self.provider.destroy_node(vm.instance)
del self._instance_to_vm_map[uuid]
def update_vm_status(self, vms):
nodes = self.provider.list_nodes(ex_node_ids=[vm.instance.id for vm in vms])
for node in nodes:
if node.uuid in self._instance_to_vm_map:
vm = self._instance_to_vm_map[node.uuid]
vm.instance = node
state = self._vminfo_state_from_libcloud_status(node.state)
if state is not None:
vm.state = state
else:
# Ignore VMs that were not started by us. There are
# two reasons for this policy:
#
# - the same AWS account could be used for other purposes
# or `Orchestrator` instances, so we should not assume that
# all VMs are under our control.
#
# - the AWS interface keeps reporting *terminated* instances
# for some time after they have been shut down.
#
log.debug("Ignoring VM '%s', which was not started by this orchestrator.",
node.uuid)
| apache-2.0 |
ehashman/oh-mainline | vendor/packages/Django/tests/regressiontests/signals_regress/tests.py | 102 | 3907 | from __future__ import absolute_import
from django.db import models
from django.test import TestCase
from .models import Author, Book
class SignalsRegressTests(TestCase):
"""
Testing signals before/after saving and deleting.
"""
def get_signal_output(self, fn, *args, **kwargs):
# Flush any existing signal output
self.signal_output = []
fn(*args, **kwargs)
return self.signal_output
def pre_save_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('pre_save signal, %s' % instance)
if kwargs.get('raw'):
self.signal_output.append('Is raw')
def post_save_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('post_save signal, %s' % instance)
if 'created' in kwargs:
if kwargs['created']:
self.signal_output.append('Is created')
else:
self.signal_output.append('Is updated')
if kwargs.get('raw'):
self.signal_output.append('Is raw')
def pre_delete_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('pre_save signal, %s' % instance)
self.signal_output.append('instance.id is not None: %s' % (instance.id != None))
def post_delete_test(self, signal, sender, instance, **kwargs):
self.signal_output.append('post_delete signal, %s' % instance)
self.signal_output.append('instance.id is not None: %s' % (instance.id != None))
def setUp(self):
self.signal_output = []
# Save up the number of connected signals so that we can check at the end
# that all the signals we register get properly unregistered (#9989)
self.pre_signals = (len(models.signals.pre_save.receivers),
len(models.signals.post_save.receivers),
len(models.signals.pre_delete.receivers),
len(models.signals.post_delete.receivers))
models.signals.pre_save.connect(self.pre_save_test)
models.signals.post_save.connect(self.post_save_test)
models.signals.pre_delete.connect(self.pre_delete_test)
models.signals.post_delete.connect(self.post_delete_test)
def tearDown(self):
models.signals.post_delete.disconnect(self.post_delete_test)
models.signals.pre_delete.disconnect(self.pre_delete_test)
models.signals.post_save.disconnect(self.post_save_test)
models.signals.pre_save.disconnect(self.pre_save_test)
# Check that all our signals got disconnected properly.
post_signals = (len(models.signals.pre_save.receivers),
len(models.signals.post_save.receivers),
len(models.signals.pre_delete.receivers),
len(models.signals.post_delete.receivers))
self.assertEqual(self.pre_signals, post_signals)
def test_model_signals(self):
""" Model saves should throw some signals. """
a1 = Author(name='Neal Stephenson')
self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, Neal Stephenson",
"post_save signal, Neal Stephenson",
"Is created"
])
b1 = Book(name='Snow Crash')
self.assertEqual(self.get_signal_output(b1.save), [
"pre_save signal, Snow Crash",
"post_save signal, Snow Crash",
"Is created"
])
def test_m2m_signals(self):
""" Assigning and removing to/from m2m shouldn't generate an m2m signal """
b1 = Book(name='Snow Crash')
self.get_signal_output(b1.save)
a1 = Author(name='Neal Stephenson')
self.get_signal_output(a1.save)
self.assertEqual(self.get_signal_output(setattr, b1, 'authors', [a1]), [])
self.assertEqual(self.get_signal_output(setattr, b1, 'authors', []), [])
| agpl-3.0 |
tntnatbry/tensorflow | tensorflow/examples/learn/iris_custom_model.py | 50 | 2613 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example of Estimator for Iris plant dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from sklearn import cross_validation
from sklearn import datasets
from sklearn import metrics
import tensorflow as tf
layers = tf.contrib.layers
learn = tf.contrib.learn
def my_model(features, target):
"""DNN with three hidden layers, and dropout of 0.1 probability."""
# Convert the target to a one-hot tensor of shape (length of features, 3) and
# with a on-value of 1 for each one-hot vector of length 3.
target = tf.one_hot(target, 3, 1, 0)
# Create three fully connected layers respectively of size 10, 20, and 10 with
# each layer having a dropout probability of 0.1.
normalizer_fn = layers.dropout
normalizer_params = {'keep_prob': 0.9}
features = layers.stack(
features,
layers.fully_connected, [10, 20, 10],
normalizer_fn=normalizer_fn,
normalizer_params=normalizer_params)
# Compute logits (1 per class) and compute loss.
logits = layers.fully_connected(features, 3, activation_fn=None)
loss = tf.losses.softmax_cross_entropy(target, logits)
# Create a tensor for training op.
train_op = tf.contrib.layers.optimize_loss(
loss,
tf.contrib.framework.get_global_step(),
optimizer='Adagrad',
learning_rate=0.1)
return ({
'class': tf.argmax(logits, 1),
'prob': tf.nn.softmax(logits)
}, loss, train_op)
def main(unused_argv):
iris = datasets.load_iris()
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
iris.data, iris.target, test_size=0.2, random_state=42)
classifier = learn.Estimator(model_fn=my_model)
classifier.fit(x_train, y_train, steps=1000)
y_predicted = [
p['class'] for p in classifier.predict(
x_test, as_iterable=True)
]
score = metrics.accuracy_score(y_test, y_predicted)
print('Accuracy: {0:f}'.format(score))
if __name__ == '__main__':
tf.app.run()
| apache-2.0 |
ClearCorp/odoo-clearcorp | TODO-9.0/base_currency_symbol/__terp__.py | 4 | 2547 | # -*- encoding: utf-8 -*-
##############################################################################
#
# __terp__.py
# base_currency_symbol
# First author: Carlos Vásquez <[email protected]> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
{
'name': 'Base currency symbol',
'version': '0.1',
'url': 'http://launchpad.net/openerp-ccorp-addons',
'author': 'ClearCorp S.A.',
'website': 'http://clearcorp.co.cr',
'category': 'General Modules/Base',
'description': """Adds symbol to currency:
Use symbol_prefix and symbol_suffix depending on the currency standard.
""",
'depends': ['base'],
'init_xml': [],
'demo_xml': [],
'update_xml': [
'base_currency_symbol_data.xml',
'base_currency_symbol_view.xml',
],
'license': 'Other OSI approved licence',
'installable': True,
'active': True,
}
| agpl-3.0 |
matplotlib/viscm | tests.py | 1 | 4429 | from viscm.gui import *
from viscm.bezierbuilder import *
import numpy as np
import matplotlib as mpl
from matplotlib.backends.qt_compat import QtGui, QtCore
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
cms = {"viscm/examples/sample_linear.jscm",
"viscm/examples/sample_diverging.jscm",
"viscm/examples/sample_diverging_continuous.jscm"}
def test_editor_loads_native():
for k in cms:
with open(k) as f:
data = json.loads(f.read())
cm = Colormap(None, "CatmulClark", "CAM02-UCS")
cm.load(k)
viscm = viscm_editor(uniform_space=cm.uniform_space, cmtype=cm.cmtype, method=cm.method, **cm.params)
assert viscm.name == data["name"]
extensions = data["extensions"]["https://matplotlib.org/viscm"]
xp, yp, fixed = viscm.control_point_model.get_control_points()
assert extensions["fixed"] == fixed
assert len(extensions["xp"]) == len(xp)
assert len(extensions["yp"]) == len(yp)
assert len(xp) == len(yp)
for i in range(len(xp)):
assert extensions["xp"][i] == xp[i]
assert extensions["yp"][i] == yp[i]
assert extensions["min_Jp"] == viscm.min_Jp
assert extensions["max_Jp"] == viscm.max_Jp
assert extensions["filter_k"] == viscm.filter_k
assert extensions["cmtype"] == viscm.cmtype
colors = data["colors"]
colors = [[int(c[i:i + 2], 16) / 256 for i in range(0, 6, 2)] for c in [colors[i:i + 6] for i in range(0, len(colors), 6)]]
editor_colors = viscm.cmap_model.get_sRGB(num=256)[0].tolist()
for i in range(len(colors)):
for z in range(3):
assert colors[i][z] == np.rint(editor_colors[i][z] / 256)
# def test_editor_add_point():
# # Testing linear
# fig = plt.figure()
# figure_canvas = FigureCanvas(fig)
# linear = viscm_editor(min_Jp=40, max_Jp=60, xp=[-10, 10], yp=[0,0], figure=fig, cmtype="linear")
# Jp, ap, bp = linear.cmap_model.get_Jpapbp(3)
# eJp, eap, ebp = [40, 50, 60], [-10, 0, 10], [0, 0, 0]
# for i in range(3):
# assert approxeq(Jp[i], eJp[i])
# assert approxeq(ap[i], eap[i])
# assert approxeq(bp[i], ebp[i])
# rgb = linear.cmap_model.get_sRGB(3)[0]
# ergb = [[ 0.27446483, 0.37479529, 0.34722738],
# [ 0.44884374, 0.44012037, 0.43848162],
# [ 0.63153956, 0.49733664, 0.53352363]]
# for i in range(3):
# for z in range(3):
# assert approxeq(rgb[i][z], ergb[i][z])
# # Testing adding a point to linear
# linear.bezier_builder.mode = "add"
# qtEvent = QtGui.QMouseEvent(QtCore.QEvent.MouseButtonPress, QtCore.QPoint(), QtCore.Qt.LeftButton, QtCore.Qt.LeftButton, QtCore.Qt.ShiftModifier)
# event = mpl.backend_bases.MouseEvent("button_press_event", figure_canvas, 0, 10, guiEvent=qtEvent)
# event.xdata = 0
# event.ydata = 10
# event.inaxes = linear.bezier_builder.ax
# linear.bezier_builder.on_button_press(event)
# Jp, ap, bp = linear.cmap_model.get_Jpapbp(3)
# eJp, eap, ebp = [40, 50, 60], [-10, 0, 10], [0, 5, 0]
# for i in range(3):
# assert approxeq(Jp[i], eJp[i])
# assert approxeq(ap[i], eap[i])
# assert approxeq(bp[i], ebp[i])
# rgb = linear.cmap_model.get_sRGB(3)[0]
# ergb = [[ 0.27446483, 0.37479529, 0.34722738],
# [ 0.46101392, 0.44012069, 0.38783966],
# [ 0.63153956, 0.49733664, 0.53352363]]
# for i in range(3):
# for z in range(3):
# assert approxeq(rgb[i][z], ergb[i][z])
# # Removing a point from linear
# linear.bezier_builder.mode = "remove"
# qtEvent = QtGui.QMouseEvent(QtCore.QEvent.MouseButtonPress, QtCore.QPoint(), QtCore.Qt.LeftButton, QtCore.Qt.LeftButton, QtCore.Qt.ControlModifier)
# event = mpl.backend_bases.MouseEvent("button_press_event", figure_canvas, 0, 10, guiEvent=qtEvent)
# event.xdata = 0
# event.ydata = 10
# event.inaxes = linear.bezier_builder.ax
# linear.bezier_builder.on_button_press(event)
# # Jp, ap, bp = linear.cmap_model.get_Jpapbp(3)
# # print(Jp, ap, bp)
# # print(rgb)
# # use mpl transformations
# print(linear.control_point_model.get_control_points())
# # print(linear.cmap_model.get_Jpapbp(3))
def approxeq(x, y, err=0.0001):
return abs(y - x) < err
| mit |
glizer/nw.js | test/remoting/testcfg.py | 68 | 2628 | import test
import os
from os.path import join, dirname, exists, basename, isdir
import re
import utils
class RemotingTestCase(test.TestCase):
def __init__(self, path, file, arch, mode, nwdir, context, config, additional=[]):
super(RemotingTestCase, self).__init__(context, path, arch, mode, nwdir)
self.file = file
self.config = config
self.arch = arch
self.mode = mode
self.additional_flags = additional
self.nwdir = nwdir
def GetTmpDir(self):
return "%s.%d" % (self.tmpdir, self.thread_id)
def GetChromeDriver(self, arch, mode, nwdir):
if utils.IsWindows():
return os.path.abspath(join(nwdir, 'chromedriver.exe'))
return os.path.abspath(join(nwdir, 'chromedriver'))
def AfterRun(self, result):
return
def BeforeRun(self):
return
def GetLabel(self):
return "%s %s" % (self.mode, self.GetName())
def GetName(self):
return self.path[-1]
def GetEnv(self):
libpath = join(self.file, '..', '..', '..', '..', '..', 'third_party/webdriver/pylib')
return {'PYTHONPATH': libpath, 'CHROMEDRIVER': self.GetChromeDriver(self.arch, self.mode, self.nwdir)}
def GetCommand(self):
result = ['python']
result += [self.file + '/test.py']
return result
def IsFailureOutput(self, output):
return output.exit_code != 0
def GetSource(self):
return open(self.file).read()
class RemotingTestConfiguration(test.TestConfiguration):
def __init__(self, context, root, section, additional=[]):
super(RemotingTestConfiguration, self).__init__(context, root)
self.section = section
self.additional_flags = additional
def Ls(self, path):
def SelectTest(name):
return os.path.isdir(os.path.join(path, name))
return [f[0:] for f in os.listdir(path) if SelectTest(f)]
def ListTests(self, current_path, path, arch, mode, nwdir):
all_tests = [current_path + [t] for t in self.Ls(join(self.root))]
result = []
for test in all_tests:
if self.Contains(path, test):
file_path = join(self.root, reduce(join, test[1:], ""))
result.append(RemotingTestCase(test, file_path, arch, mode, nwdir, self.context,
self, self.additional_flags))
return result
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, '%s.status' % (self.section))
if exists(status_file):
test.ReadConfigurationInto(status_file, sections, defs)
def GetConfiguration(context, root):
return RemotingTestConfiguration(context, root, 'remoting')
| mit |
etingof/pysnmp | pysnmp/smi/mibs/TRANSPORT-ADDRESS-MIB.py | 1 | 27607 | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pysnmp/license.html
#
# ASN.1 source http://mibs.snmplabs.com:80/asn1/TRANSPORT-ADDRESS-MIB
# Produced by pysmi-0.4.0 at Sun Feb 17 08:56:38 2019
#
# Parts of otherwise autogenerated MIB has been updated manually.
#
import socket
from pyasn1.compat.octets import int2oct
from pyasn1.compat.octets import oct2int
from pysnmp import error
has_ipv6 = socket.has_ipv6
if hasattr(socket, 'inet_ntop') and hasattr(socket, 'inet_pton'):
inet_ntop = socket.inet_ntop
inet_pton = socket.inet_pton
else:
import sys
if sys.platform != "win32":
from socket import inet_ntoa, inet_aton
inet_ntop = lambda x, y: inet_ntoa(y)
inet_pton = lambda x, y: inet_aton(y)
has_ipv6 = False
elif has_ipv6:
import struct # The case of old Python at old Windows
def inet_pton(address_family, ip_string):
if address_family == socket.AF_INET:
return socket.inet_aton(ip_string)
elif address_family != socket.AF_INET6:
raise socket.error(
'Unknown address family %s' % (address_family,)
)
groups = ip_string.split(":")
spaces = groups.count('')
if '.' in groups[-1]:
groups[-1:] = ["%x" % x for x in struct.unpack("!HH", socket.inet_aton(groups[-1]))]
if spaces == 1:
idx = groups.index('')
groups[idx:idx + 1] = ['0'] * (8 - len(groups) + 1)
elif spaces == 2:
zeros = ['0'] * (8 - len(groups) + 2)
if ip_string.startswith('::'):
groups[:2] = zeros
elif ip_string.endswith('::'):
groups[-2:] = zeros
else:
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
elif spaces == 3:
if ip_string != '::':
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
return '\x00' * 16
elif spaces > 3:
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
groups = [t for t in [int(t, 16) for t in groups] if t & 0xFFFF == t]
if len(groups) != 8:
raise socket.error(
'Invalid IPv6 address: "%s"' % (ip_string,)
)
return struct.pack('!8H', *groups)
def inet_ntop(address_family, packed_ip):
if address_family == socket.AF_INET:
return socket.inet_ntop(packed_ip)
elif address_family != socket.AF_INET6:
raise socket.error(
'Unknown address family %s' % (address_family,)
)
if len(packed_ip) != 16:
raise socket.error(
'incorrect address length: %s' % len(packed_ip)
)
groups = list(struct.unpack('!8H', packed_ip))
cur_base = best_base = cur_len = best_len = -1
for idx in range(8):
if groups[idx]:
if cur_base != -1:
if best_base == -1 or cur_len > best_len:
best_base, best_len = cur_base, cur_len
cur_base = -1
else:
if cur_base == -1:
cur_base, cur_len = idx, 1
else:
cur_len += 1
if cur_base != -1:
if best_base == -1 or cur_len > best_len:
best_base, best_len = cur_base, cur_len
if best_base != -1 and best_len > 1:
groups[best_base:best_base + best_len] = [':']
if groups[0] == ':':
groups.insert(0, ':')
if groups[-1] == ':':
groups.append(':')
f = lambda x: x != ':' and '%x' % x or ''
return ':'.join([f(x) for x in groups])
if 'mibBuilder' not in globals():
import sys
sys.stderr.write(__doc__)
sys.exit(1)
(Integer,
OctetString,
ObjectIdentifier) = mibBuilder.importSymbols(
"ASN1",
"Integer",
"OctetString",
"ObjectIdentifier")
(NamedValues,) = mibBuilder.importSymbols(
"ASN1-ENUMERATION",
"NamedValues")
(ConstraintsIntersection,
SingleValueConstraint,
ValueRangeConstraint,
ValueSizeConstraint,
ConstraintsUnion) = mibBuilder.importSymbols(
"ASN1-REFINEMENT",
"ConstraintsIntersection",
"SingleValueConstraint",
"ValueRangeConstraint",
"ValueSizeConstraint",
"ConstraintsUnion")
(ModuleCompliance,
NotificationGroup) = mibBuilder.importSymbols(
"SNMPv2-CONF",
"ModuleCompliance",
"NotificationGroup")
(Counter32,
TimeTicks,
MibScalar,
MibTable,
MibTableRow,
MibTableColumn,
Counter64,
IpAddress,
ObjectIdentity,
Gauge32,
MibIdentifier,
iso,
Unsigned32,
Bits,
NotificationType,
Integer32,
ModuleIdentity,
mib_2) = mibBuilder.importSymbols(
"SNMPv2-SMI",
"Counter32",
"TimeTicks",
"MibScalar",
"MibTable",
"MibTableRow",
"MibTableColumn",
"Counter64",
"IpAddress",
"ObjectIdentity",
"Gauge32",
"MibIdentifier",
"iso",
"Unsigned32",
"Bits",
"NotificationType",
"Integer32",
"ModuleIdentity",
"mib-2")
(DisplayString,
TextualConvention) = mibBuilder.importSymbols(
"SNMPv2-TC",
"DisplayString",
"TextualConvention")
transportAddressMIB = ModuleIdentity(
(1, 3, 6, 1, 2, 1, 100)
)
transportAddressMIB.setRevisions(
("2002-11-01 00:00",)
)
transportAddressMIB.setLastUpdated("200211010000Z")
if mibBuilder.loadTexts:
transportAddressMIB.setOrganization("""\
IETF Operations and Management Area
""")
transportAddressMIB.setContactInfo("""\
Juergen Schoenwaelder (Editor) TU Braunschweig Bueltenweg 74/75 38106
Braunschweig, Germany Phone: +49 531 391-3289 EMail: [email protected]
Send comments to <[email protected]>.
""")
if mibBuilder.loadTexts:
transportAddressMIB.setDescription("""\
This MIB module provides commonly used transport address definitions. Copyright
(C) The Internet Society (2002). This version of this MIB module is part of RFC
3419; see the RFC itself for full legal notices.
""")
class TransportDomain(TextualConvention, ObjectIdentifier):
status = "current"
if mibBuilder.loadTexts:
description = """\
A value that represents a transport domain. Some possible values, such as
transportDomainUdpIpv4, are defined in this module. Other possible values can
be defined in other MIB modules.
"""
class TransportAddressType(TextualConvention, Integer32):
status = "current"
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(
SingleValueConstraint(
*(0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16)
)
)
namedValues = NamedValues(
*(("local", 13),
("sctpDns", 16),
("sctpIpv4", 9),
("sctpIpv4z", 11),
("sctpIpv6", 10),
("sctpIpv6z", 12),
("tcpDns", 15),
("tcpIpv4", 5),
("tcpIpv4z", 7),
("tcpIpv6", 6),
("tcpIpv6z", 8),
("udpDns", 14),
("udpIpv4", 1),
("udpIpv4z", 3),
("udpIpv6", 2),
("udpIpv6z", 4),
("unknown", 0))
)
if mibBuilder.loadTexts:
description = """\
A value that represents a transport domain. This is the enumerated version of
the transport domain registrations in this MIB module. The enumerated values
have the following meaning: unknown(0) unknown transport address type
udpIpv4(1) transportDomainUdpIpv4 udpIpv6(2) transportDomainUdpIpv6 udpIpv4z(3)
transportDomainUdpIpv4z udpIpv6z(4) transportDomainUdpIpv6z tcpIpv4(5)
transportDomainTcpIpv4 tcpIpv6(6) transportDomainTcpIpv6 tcpIpv4z(7)
transportDomainTcpIpv4z tcpIpv6z(8) transportDomainTcpIpv6z sctpIpv4(9)
transportDomainSctpIpv4 sctpIpv6(10) transportDomainSctpIpv6 sctpIpv4z(11)
transportDomainSctpIpv4z sctpIpv6z(12) transportDomainSctpIpv6z local(13)
transportDomainLocal udpDns(14) transportDomainUdpDns tcpDns(15)
transportDomainTcpDns sctpDns(16) transportDomainSctpDns This textual
convention can be used to represent transport domains in situations where a
syntax of TransportDomain is unwieldy (for example, when used as an index). The
usage of this textual convention implies that additional transport domains can
only be supported by updating this MIB module. This extensibility restriction
does not apply for the TransportDomain textual convention which allows MIB
authors to define additional transport domains independently in other MIB
modules.
"""
class TransportAddress(TextualConvention, OctetString):
status = "current"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(0, 255),
)
if mibBuilder.loadTexts:
description = """\
Denotes a generic transport address. A TransportAddress value is always
interpreted within the context of a TransportAddressType or TransportDomain
value. Every usage of the TransportAddress textual convention MUST specify the
TransportAddressType or TransportDomain object which provides the context.
Furthermore, MIB authors SHOULD define a separate TransportAddressType or
TransportDomain object for each TransportAddress object. It is suggested that
the TransportAddressType or TransportDomain is logically registered before the
object(s) which use the TransportAddress textual convention if they appear in
the same logical row. The value of a TransportAddress object must always be
consistent with the value of the associated TransportAddressType or
TransportDomain object. Attempts to set a TransportAddress object to a value
which is inconsistent with the associated TransportAddressType or
TransportDomain must fail with an inconsistentValue error. When this textual
convention is used as a syntax of an index object, there may be issues with the
limit of 128 sub-identifiers specified in SMIv2, STD 58. In this case, the
OBJECT-TYPE declaration MUST include a 'SIZE' clause to limit the number of
potential instance sub-identifiers.
"""
class TransportAddressIPv4(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(6, 6),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv4 address and a port number
(as used for example by UDP, TCP and SCTP): octets contents encoding 1-4 IPv4
address network-byte order 5-6 port number network-byte order This textual
convention SHOULD NOT be used directly in object definitions since it restricts
addresses to a specific format. However, if it is used, it MAY be used either
on its own or in conjunction with TransportAddressType or TransportDomain as a
pair.
"""
fixedLength = 6
def prettyIn(self, value):
if isinstance(value, tuple):
# Wild hack -- need to implement TextualConvention.prettyIn
value = inet_pton(socket.AF_INET, value[0]) + int2oct((value[1] >> 8) & 0xff) + int2oct(value[1] & 0xff)
return OctetString.prettyIn(self, value)
# Socket address syntax coercion
def __asSocketAddress(self):
if not hasattr(self, '__tuple_value'):
v = self.asOctets()
self.__tuple_value = (
inet_ntop(socket.AF_INET, v[:4]),
oct2int(v[4]) << 8 | oct2int(v[5]),
)
return self.__tuple_value
def __iter__(self):
return iter(self.__asSocketAddress())
def __getitem__(self, item):
return self.__asSocketAddress()[item]
class TransportAddressIPv6(TextualConvention, OctetString):
status = "current"
displayHint = "0a[2x:2x:2x:2x:2x:2x:2x:2x]0a:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(18, 18),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv6 address and a port number
(as used for example by UDP, TCP and SCTP): octets contents encoding 1-16 IPv6
address network-byte order 17-18 port number network-byte order This textual
convention SHOULD NOT be used directly in object definitions since it restricts
addresses to a specific format. However, if it is used, it MAY be used either
on its own or in conjunction with TransportAddressType or TransportDomain as a
pair.
"""
fixedLength = 18
def prettyIn(self, value):
if not has_ipv6:
raise error.PySnmpError('IPv6 not supported by platform')
if isinstance(value, tuple):
value = inet_pton(socket.AF_INET6, value[0]) + int2oct((value[1] >> 8) & 0xff) + int2oct(value[1] & 0xff)
return OctetString.prettyIn(self, value)
# Socket address syntax coercion
def __asSocketAddress(self):
if not hasattr(self, '__tuple_value'):
if not has_ipv6:
raise error.PySnmpError('IPv6 not supported by platform')
v = self.asOctets()
self.__tuple_value = (
inet_ntop(socket.AF_INET6, v[:16]),
oct2int(v[16]) << 8 | oct2int(v[17]),
0, # flowinfo
0) # scopeid
return self.__tuple_value
def __iter__(self):
return iter(self.__asSocketAddress())
def __getitem__(self, item):
return self.__asSocketAddress()[item]
class TransportAddressIPv4z(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d%4d:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(10, 10),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv4 address, a zone index and
a port number (as used for example by UDP, TCP and SCTP): octets contents
encoding 1-4 IPv4 address network-byte order 5-8 zone index network-byte order
9-10 port number network-byte order This textual convention SHOULD NOT be used
directly in object definitions since it restricts addresses to a specific
format. However, if it is used, it MAY be used either on its own or in
conjunction with TransportAddressType or TransportDomain as a pair.
"""
fixedLength = 10
class TransportAddressIPv6z(TextualConvention, OctetString):
status = "current"
displayHint = "0a[2x:2x:2x:2x:2x:2x:2x:2x%4d]0a:2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(22, 22),
)
if mibBuilder.loadTexts:
description = """\
Represents a transport address consisting of an IPv6 address, a zone index and
a port number (as used for example by UDP, TCP and SCTP): octets contents
encoding 1-16 IPv6 address network-byte order 17-20 zone index network-byte
order 21-22 port number network-byte order This textual convention SHOULD NOT
be used directly in object definitions since it restricts addresses to a
specific format. However, if it is used, it MAY be used either on its own or in
conjunction with TransportAddressType or TransportDomain as a pair.
"""
fixedLength = 22
class TransportAddressLocal(TextualConvention, OctetString):
status = "current"
displayHint = "1a"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 255),
)
if mibBuilder.loadTexts:
description = """\
Represents a POSIX Local IPC transport address: octets contents encoding all
POSIX Local IPC address string The Posix Local IPC transport domain subsumes
UNIX domain sockets. This textual convention SHOULD NOT be used directly in
object definitions since it restricts addresses to a specific format. However,
if it is used, it MAY be used either on its own or in conjunction with
TransportAddressType or TransportDomain as a pair. When this textual convention
is used as a syntax of an index object, there may be issues with the limit of
128 sub-identifiers specified in SMIv2, STD 58. In this case, the OBJECT-TYPE
declaration MUST include a 'SIZE' clause to limit the number of potential
instance sub-identifiers.
"""
class TransportAddressDns(TextualConvention, OctetString):
status = "current"
displayHint = "1a"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 255),
)
if mibBuilder.loadTexts:
description = """\
Represents a DNS domain name followed by a colon ':' (ASCII character 0x3A) and
a port number in ASCII. The name SHOULD be fully qualified whenever possible.
Values of this textual convention are not directly useable as transport-layer
addressing information, and require runtime resolution. As such, applications
that write them must be prepared for handling errors if such values are not
supported, or cannot be resolved (if resolution occurs at the time of the
management operation). The DESCRIPTION clause of TransportAddress objects that
may have TransportAddressDns values must fully describe how (and when) such
names are to be resolved to IP addresses and vice versa. This textual
convention SHOULD NOT be used directly in object definitions since it restricts
addresses to a specific format. However, if it is used, it MAY be used either
on its own or in conjunction with TransportAddressType or TransportDomain as a
pair. When this textual convention is used as a syntax of an index object,
there may be issues with the limit of 128 sub-identifiers specified in SMIv2,
STD 58. In this case, the OBJECT-TYPE declaration MUST include a 'SIZE' clause
to limit the number of potential instance sub-identifiers.
"""
_TransportDomains_ObjectIdentity = ObjectIdentity
transportDomains = _TransportDomains_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1)
)
_TransportDomainUdpIpv4_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv4 = _TransportDomainUdpIpv4_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 1)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv4.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv4.setDescription("""\
The UDP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4 for global IPv4 addresses.
""")
_TransportDomainUdpIpv6_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv6 = _TransportDomainUdpIpv6_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 2)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv6.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv6.setDescription("""\
The UDP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6 for global IPv6 addresses.
""")
_TransportDomainUdpIpv4z_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv4z = _TransportDomainUdpIpv4z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 3)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv4z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv4z.setDescription("""\
The UDP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4z for scoped IPv4 addresses with a zone index.
""")
_TransportDomainUdpIpv6z_ObjectIdentity = ObjectIdentity
transportDomainUdpIpv6z = _TransportDomainUdpIpv6z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 4)
)
if mibBuilder.loadTexts:
transportDomainUdpIpv6z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpIpv6z.setDescription("""\
The UDP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6z for scoped IPv6 addresses with a zone index.
""")
_TransportDomainTcpIpv4_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv4 = _TransportDomainTcpIpv4_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 5)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv4.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv4.setDescription("""\
The TCP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4 for global IPv4 addresses.
""")
_TransportDomainTcpIpv6_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv6 = _TransportDomainTcpIpv6_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 6)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv6.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv6.setDescription("""\
The TCP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6 for global IPv6 addresses.
""")
_TransportDomainTcpIpv4z_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv4z = _TransportDomainTcpIpv4z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 7)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv4z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv4z.setDescription("""\
The TCP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4z for scoped IPv4 addresses with a zone index.
""")
_TransportDomainTcpIpv6z_ObjectIdentity = ObjectIdentity
transportDomainTcpIpv6z = _TransportDomainTcpIpv6z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 8)
)
if mibBuilder.loadTexts:
transportDomainTcpIpv6z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpIpv6z.setDescription("""\
The TCP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6z for scoped IPv6 addresses with a zone index.
""")
_TransportDomainSctpIpv4_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv4 = _TransportDomainSctpIpv4_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 9)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv4.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv4.setDescription("""\
The SCTP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4 for global IPv4 addresses. This transport domain
usually represents the primary address on multihomed SCTP endpoints.
""")
_TransportDomainSctpIpv6_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv6 = _TransportDomainSctpIpv6_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 10)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv6.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv6.setDescription("""\
The SCTP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6 for global IPv6 addresses. This transport domain
usually represents the primary address on multihomed SCTP endpoints.
""")
_TransportDomainSctpIpv4z_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv4z = _TransportDomainSctpIpv4z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 11)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv4z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv4z.setDescription("""\
The SCTP over IPv4 transport domain. The corresponding transport address is of
type TransportAddressIPv4z for scoped IPv4 addresses with a zone index. This
transport domain usually represents the primary address on multihomed SCTP
endpoints.
""")
_TransportDomainSctpIpv6z_ObjectIdentity = ObjectIdentity
transportDomainSctpIpv6z = _TransportDomainSctpIpv6z_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 12)
)
if mibBuilder.loadTexts:
transportDomainSctpIpv6z.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpIpv6z.setDescription("""\
The SCTP over IPv6 transport domain. The corresponding transport address is of
type TransportAddressIPv6z for scoped IPv6 addresses with a zone index. This
transport domain usually represents the primary address on multihomed SCTP
endpoints.
""")
_TransportDomainLocal_ObjectIdentity = ObjectIdentity
transportDomainLocal = _TransportDomainLocal_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 13)
)
if mibBuilder.loadTexts:
transportDomainLocal.setStatus("current")
if mibBuilder.loadTexts:
transportDomainLocal.setDescription("""\
The Posix Local IPC transport domain. The corresponding transport address is of
type TransportAddressLocal. The Posix Local IPC transport domain incorporates
the well-known UNIX domain sockets.
""")
_TransportDomainUdpDns_ObjectIdentity = ObjectIdentity
transportDomainUdpDns = _TransportDomainUdpDns_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 14)
)
if mibBuilder.loadTexts:
transportDomainUdpDns.setStatus("current")
if mibBuilder.loadTexts:
transportDomainUdpDns.setDescription("""\
The UDP transport domain using fully qualified domain names. The corresponding
transport address is of type TransportAddressDns.
""")
_TransportDomainTcpDns_ObjectIdentity = ObjectIdentity
transportDomainTcpDns = _TransportDomainTcpDns_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 15)
)
if mibBuilder.loadTexts:
transportDomainTcpDns.setStatus("current")
if mibBuilder.loadTexts:
transportDomainTcpDns.setDescription("""\
The TCP transport domain using fully qualified domain names. The corresponding
transport address is of type TransportAddressDns.
""")
_TransportDomainSctpDns_ObjectIdentity = ObjectIdentity
transportDomainSctpDns = _TransportDomainSctpDns_ObjectIdentity(
(1, 3, 6, 1, 2, 1, 100, 1, 16)
)
if mibBuilder.loadTexts:
transportDomainSctpDns.setStatus("current")
if mibBuilder.loadTexts:
transportDomainSctpDns.setDescription("""\
The SCTP transport domain using fully qualified domain names. The corresponding
transport address is of type TransportAddressDns.
""")
mibBuilder.exportSymbols(
"TRANSPORT-ADDRESS-MIB",
**{"TransportDomain": TransportDomain,
"TransportAddressType": TransportAddressType,
"TransportAddress": TransportAddress,
"TransportAddressIPv4": TransportAddressIPv4,
"TransportAddressIPv6": TransportAddressIPv6,
"TransportAddressIPv4z": TransportAddressIPv4z,
"TransportAddressIPv6z": TransportAddressIPv6z,
"TransportAddressLocal": TransportAddressLocal,
"TransportAddressDns": TransportAddressDns,
"transportAddressMIB": transportAddressMIB,
"transportDomains": transportDomains,
"transportDomainUdpIpv4": transportDomainUdpIpv4,
"transportDomainUdpIpv6": transportDomainUdpIpv6,
"transportDomainUdpIpv4z": transportDomainUdpIpv4z,
"transportDomainUdpIpv6z": transportDomainUdpIpv6z,
"transportDomainTcpIpv4": transportDomainTcpIpv4,
"transportDomainTcpIpv6": transportDomainTcpIpv6,
"transportDomainTcpIpv4z": transportDomainTcpIpv4z,
"transportDomainTcpIpv6z": transportDomainTcpIpv6z,
"transportDomainSctpIpv4": transportDomainSctpIpv4,
"transportDomainSctpIpv6": transportDomainSctpIpv6,
"transportDomainSctpIpv4z": transportDomainSctpIpv4z,
"transportDomainSctpIpv6z": transportDomainSctpIpv6z,
"transportDomainLocal": transportDomainLocal,
"transportDomainUdpDns": transportDomainUdpDns,
"transportDomainTcpDns": transportDomainTcpDns,
"transportDomainSctpDns": transportDomainSctpDns}
)
| bsd-2-clause |
jantman/pelican-plugins | pelican_comment_system/avatars.py | 25 | 2732 | # -*- coding: utf-8 -*-
"""
Author: Bernhard Scheirle
"""
from __future__ import unicode_literals
import logging
import os
import hashlib
logger = logging.getLogger(__name__)
_log = "pelican_comment_system: avatars: "
try:
from . identicon import identicon
_identiconImported = True
except ImportError as e:
logger.warning(_log + "identicon deactivated: " + str(e))
_identiconImported = False
# Global Variables
_identicon_save_path = None
_identicon_output_path = None
_identicon_data = None
_identicon_size = None
_initialized = False
_authors = None
_missingAvatars = []
def _ready():
if not _initialized:
logger.warning(_log + "Module not initialized. use init")
if not _identicon_data:
logger.debug(_log + "No identicon data set")
return _identiconImported and _initialized and _identicon_data
def init(pelican_output_path, identicon_output_path, identicon_data,
identicon_size, authors):
global _identicon_save_path
global _identicon_output_path
global _identicon_data
global _identicon_size
global _initialized
global _authors
global _missingAvatars
_identicon_save_path = os.path.join(pelican_output_path,
identicon_output_path)
_identicon_output_path = identicon_output_path
_identicon_data = identicon_data
_identicon_size = identicon_size
_authors = authors
_missingAvatars = []
_initialized = True
def _createIdenticonOutputFolder():
if not _ready():
return
if not os.path.exists(_identicon_save_path):
os.makedirs(_identicon_save_path)
def getAvatarPath(comment_id, metadata):
if not _ready():
return ''
md5 = hashlib.md5()
author = tuple()
for data in _identicon_data:
if data in metadata:
string = "{}".format(metadata[data])
md5.update(string.encode('utf-8'))
author += tuple([string])
else:
logger.warning(_log + data +
" is missing in comment: " + comment_id)
if author in _authors:
return _authors[author]
global _missingAvatars
code = md5.hexdigest()
if not code in _missingAvatars:
_missingAvatars.append(code)
return os.path.join(_identicon_output_path, '%s.png' % code)
def generateAndSaveMissingAvatars():
_createIdenticonOutputFolder()
global _missingAvatars
for code in _missingAvatars:
avatar_path = '%s.png' % code
avatar = identicon.render_identicon(int(code, 16), _identicon_size)
avatar_save_path = os.path.join(_identicon_save_path, avatar_path)
avatar.save(avatar_save_path, 'PNG')
_missingAvatars = []
| agpl-3.0 |
Potato42/primitivepyg | primitivepyg/convertcolors.py | 1 | 1734 | """
convertcolors.py provides functions for converting various color formats to (red, green, blue, alpha)
"""
RED_MASK = 0xff000000
GREEN_MASK = 0x00ff0000
BLUE_MASK = 0x0000ff00
ALPHA_MASK = 0x000000ff
def color_from_hex(hex_color:int)->(int,int,int,int):
"""
Takes a hex value in either of the form 0xRRGGBBAA and returns a tuple containing the R, G, B, and A components from 0 to 255.
"""
# this commented-out code adds ambiguity as there would be no way to have a 0 for red and also have an alpha.
# if hex_color <= 0xffffff: # if alpha value not included
# # fill in the AA part with 255
# hex_color <<= 8
# hex_color |= 0xff
return (hex_color & RED_MASK) >> 24, (hex_color & GREEN_MASK) >> 16,\
(hex_color & BLUE_MASK) >> 8, (hex_color & ALPHA_MASK)
def get_color(color)->(int,int,int,int):
"""
Return an R, G, B, A tuple from color, which can be any of the following formats:
0xRRGGBBAA
(grey,)
(grey,alpha)
(red,green,blue)
(red,green,blue,alpha)
"""
if isinstance(color, int):
# color is a hex color
return color_from_hex(color)
if isinstance(color, tuple): # todo: perhaps allow all tuple-like types?
# color is already a tuple, but may need to be formatted correctly
if len(color) == 1: # grey scale
return color[0],color[0],color[0],255
if len(color) == 2: # alpha grey scale
return color[0],color[0],color[0],color[1]
if len(color) == 3: # R,G,B
return color+(255,)
if len(color) == 4: # R,G,B,A
return color
raise ValueError("color has invalid length")
raise ValueError("color must be int or tuple") | mit |
jostschmithals/three.js | utils/exporters/blender/addons/io_three/exporter/texture.py | 173 | 1407 | from .. import constants, logger
from . import base_classes, image, api
class Texture(base_classes.BaseNode):
"""Class that wraps a texture node"""
def __init__(self, node, parent):
logger.debug("Texture().__init__(%s)", node)
base_classes.BaseNode.__init__(self, node, parent, constants.TEXTURE)
num = constants.NUMERIC
img_inst = self.scene.image(api.texture.file_name(self.node))
if not img_inst:
image_node = api.texture.image_node(self.node)
img_inst = image.Image(image_node.name, self.scene)
self.scene[constants.IMAGES].append(img_inst)
self[constants.IMAGE] = img_inst[constants.UUID]
wrap = api.texture.wrap(self.node)
self[constants.WRAP] = (num[wrap[0]], num[wrap[1]])
if constants.WRAPPING.REPEAT in wrap:
self[constants.REPEAT] = api.texture.repeat(self.node)
self[constants.ANISOTROPY] = api.texture.anisotropy(self.node)
self[constants.MAG_FILTER] = num[api.texture.mag_filter(self.node)]
self[constants.MIN_FILTER] = num[api.texture.min_filter(self.node)]
self[constants.MAPPING] = num[api.texture.mapping(self.node)]
@property
def image(self):
"""
:return: the image object of the current texture
:rtype: image.Image
"""
return self.scene.image(self[constants.IMAGE])
| mit |
ricotabor/opendrop | opendrop/__main__.py | 2 | 1861 | # Copyright © 2020, Joseph Berry, Rico Tabor ([email protected])
# OpenDrop is released under the GNU GPL License. You are free to
# modify and distribute the code, but always under the same license
# (i.e. you cannot make commercial derivatives).
#
# If you use this software in your research, please cite the following
# journal articles:
#
# J. D. Berry, M. J. Neeson, R. R. Dagastine, D. Y. C. Chan and
# R. F. Tabor, Measurement of surface and interfacial tension using
# pendant drop tensiometry. Journal of Colloid and Interface Science 454
# (2015) 226–237. https://doi.org/10.1016/j.jcis.2015.05.012
#
# E. Huang, T. Denning, A. Skoufis, J. Qi, R. R. Dagastine, R. F. Tabor
# and J. D. Berry, OpenDrop: Open-source software for pendant drop
# tensiometry & contact angle measurements, submitted to the Journal of
# Open Source Software
#
# These citations help us not only to understand who is using and
# developing OpenDrop, and for what purpose, but also to justify
# continued development of this code and other open source resources.
#
# OpenDrop is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this software. If not, see <https://www.gnu.org/licenses/>.
import sys
from opendrop.app import OpendropApplication
from opendrop.appfw import Injector
def main(*argv) -> int:
# https://stackoverflow.com/questions/13514031/py2exe-with-multiprocessing-fails-to-run-the-processes#27547300
import multiprocessing
multiprocessing.freeze_support()
injector = Injector()
app = injector.create_object(OpendropApplication)
return app.run(argv)
if __name__ == '__main__':
sys.exit(main(*sys.argv))
| gpl-2.0 |
rx2130/Leetcode | python/37 Sudoku Solver.py | 1 | 1671 | class Solution(object):
def solveSudoku(self, board):
"""
:type board: List[List[str]]
:rtype: void Do not return anything, modify board in-place instead.
"""
if board is None or len(board) != 9 or len(board[0]) != 9:
return
for row in range(9):
board[row] = list(board[row])
self.solve(board)
for row in board:
print(row)
def solve(self, board):
for i in range(9):
for j in range(9):
if board[i][j] == '.':
for c in range(1, 10):
if self.isValid(board, i, j, c):
board[i][j] = str(c)
if self.solve(board):
return True
else:
board[i][j] = '.'
return False
return True
def isValid(self, board, i, j, c):
for row in range(9):
if board[row][j] == str(c):
return False
for col in range(9):
if board[i][col] == str(c):
return False
for row in range(i // 3 * 3, i // 3 * 3 + 3):
for col in range(j // 3 * 3, j // 3 * 3 + 3):
if board[row][col] == str(c):
return False
return True
board = ["..9748...", "7........", ".2.1.9...", "..7...24.", ".64.1.59.",
".98...3..", "...8.3.2.", "........6", "...2759.."]
test = Solution()
test.solveSudoku(board)
# ["519748632","783652419","426139875","357986241","264317598","198524367","975863124","832491756","641275983"]
| apache-2.0 |
ericdwang/django-analytical | analytical/tests/test_tag_spring_metrics.py | 4 | 2549 | """
Tests for the Spring Metrics template tags and filters.
"""
import re
from django.contrib.auth.models import User, AnonymousUser
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from analytical.templatetags.spring_metrics import SpringMetricsNode
from analytical.tests.utils import TagTestCase
from analytical.utils import AnalyticalException
@override_settings(SPRING_METRICS_TRACKING_ID='12345678')
class SpringMetricsTagTestCase(TagTestCase):
"""
Tests for the ``spring_metrics`` template tag.
"""
def test_tag(self):
r = self.render_tag('spring_metrics', 'spring_metrics')
self.assertTrue("_springMetq.push(['id', '12345678']);" in r, r)
def test_node(self):
r = SpringMetricsNode().render(Context({}))
self.assertTrue("_springMetq.push(['id', '12345678']);" in r, r)
@override_settings(SPRING_METRICS_TRACKING_ID=None)
def test_no_site_id(self):
self.assertRaises(AnalyticalException, SpringMetricsNode)
@override_settings(SPRING_METRICS_TRACKING_ID='123xyz')
def test_wrong_site_id(self):
self.assertRaises(AnalyticalException, SpringMetricsNode)
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify(self):
r = SpringMetricsNode().render(Context({'user':
User(email='[email protected]')}))
self.assertTrue("_springMetq.push(['setdata', "
"{'email': '[email protected]'}]);" in r, r)
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_anonymous_user(self):
r = SpringMetricsNode().render(Context({'user': AnonymousUser()}))
self.assertFalse("_springMetq.push(['setdata', {'email':" in r, r)
def test_custom(self):
r = SpringMetricsNode().render(Context({'spring_metrics_var1': 'val1',
'spring_metrics_var2': 'val2'}))
self.assertTrue("_springMetq.push(['setdata', {'var1': 'val1'}]);" in r,
r)
self.assertTrue("_springMetq.push(['setdata', {'var2': 'val2'}]);" in r,
r)
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = SpringMetricsNode().render(context)
self.assertTrue(r.startswith(
'<!-- Spring Metrics disabled on internal IP address'), r)
self.assertTrue(r.endswith('-->'), r)
| mit |
ceph/autotest | frontend/settings.py | 2 | 4228 | # Django settings for frontend project.
import os
import common
from autotest_lib.client.common_lib import global_config
DEBUG = True
TEMPLATE_DEBUG = DEBUG
FULL_ADMIN = False
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql',
# 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_PORT = '' # Set to empty string for default.
# Not used with sqlite3.
c = global_config.global_config
_section = 'AUTOTEST_WEB'
DATABASE_HOST = c.get_config_value(_section, "host")
# Or path to database file if using sqlite3.
DATABASE_NAME = c.get_config_value(_section, "database")
# The following not used with sqlite3.
DATABASE_USER = c.get_config_value(_section, "user")
DATABASE_PASSWORD = c.get_config_value(_section, "password", default='')
DATABASE_READONLY_HOST = c.get_config_value(_section, "readonly_host",
default=DATABASE_HOST)
DATABASE_READONLY_USER = c.get_config_value(_section, "readonly_user",
default=DATABASE_USER)
if DATABASE_READONLY_USER != DATABASE_USER:
DATABASE_READONLY_PASSWORD = c.get_config_value(_section,
"readonly_password",
default='')
else:
DATABASE_READONLY_PASSWORD = DATABASE_PASSWORD
# prefix applied to all URLs - useful if requests are coming through apache,
# and you need this app to coexist with others
URL_PREFIX = 'afe/server/'
TKO_URL_PREFIX = 'new_tko/server/'
PLANNER_URL_PREFIX = 'planner/server/'
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'pn-t15u(epetamdflb%dqaaxw+5u&2#0u-jah70w1l*_9*)=n7'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'frontend.apache_auth.ApacheAuthMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
'frontend.shared.json_html_formatter.JsonToHtmlMiddleware',
)
ROOT_URLCONF = 'frontend.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.abspath(os.path.dirname(__file__) + '/templates')
)
INSTALLED_APPS = (
'frontend.afe',
'frontend.tko',
'frontend.planner',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
)
AUTHENTICATION_BACKENDS = (
'frontend.apache_auth.SimpleAuthBackend',
)
| gpl-2.0 |
40223236/40223236-50 | static/Brython3.1.1-20150328-091302/Lib/unittest/main.py | 739 | 10385 | """Unittest main program"""
import sys
import optparse
import os
from . import loader, runner
from .signals import installHandler
__unittest = True
FAILFAST = " -f, --failfast Stop on first failure\n"
CATCHBREAK = " -c, --catch Catch control-C and display results\n"
BUFFEROUTPUT = " -b, --buffer Buffer stdout and stderr during test runs\n"
USAGE_AS_MAIN = """\
Usage: %(progName)s [options] [tests]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
%(failfast)s%(catchbreak)s%(buffer)s
Examples:
%(progName)s test_module - run tests from test_module
%(progName)s module.TestClass - run tests from module.TestClass
%(progName)s module.Class.test_method - run specified test method
[tests] can be a list of any number of test modules, classes and test
methods.
Alternative Usage: %(progName)s discover [options]
Options:
-v, --verbose Verbose output
%(failfast)s%(catchbreak)s%(buffer)s -s directory Directory to start discovery ('.' default)
-p pattern Pattern to match test files ('test*.py' default)
-t directory Top level directory of project (default to
start directory)
For test discovery all test modules must be importable from the top
level directory of the project.
"""
USAGE_FROM_MODULE = """\
Usage: %(progName)s [options] [test] [...]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
%(failfast)s%(catchbreak)s%(buffer)s
Examples:
%(progName)s - run default set of tests
%(progName)s MyTestSuite - run suite 'MyTestSuite'
%(progName)s MyTestCase.testSomething - run MyTestCase.testSomething
%(progName)s MyTestCase - run all 'test*' test methods
in MyTestCase
"""
def _convert_name(name):
# on Linux / Mac OS X 'foo.PY' is not importable, but on
# Windows it is. Simpler to do a case insensitive match
# a better check would be to check that the name is a
# valid Python module name.
if os.path.isfile(name) and name.lower().endswith('.py'):
if os.path.isabs(name):
rel_path = os.path.relpath(name, os.getcwd())
if os.path.isabs(rel_path) or rel_path.startswith(os.pardir):
return name
name = rel_path
# on Windows both '\' and '/' are used as path
# separators. Better to replace both than rely on os.path.sep
return name[:-3].replace('\\', '.').replace('/', '.')
return name
def _convert_names(names):
return [_convert_name(name) for name in names]
class TestProgram(object):
"""A command-line program that runs a set of tests; this is primarily
for making test modules conveniently executable.
"""
USAGE = USAGE_FROM_MODULE
# defaults for testing
failfast = catchbreak = buffer = progName = warnings = None
def __init__(self, module='__main__', defaultTest=None, argv=None,
testRunner=None, testLoader=loader.defaultTestLoader,
exit=True, verbosity=1, failfast=None, catchbreak=None,
buffer=None, warnings=None):
if isinstance(module, str):
self.module = __import__(module)
for part in module.split('.')[1:]:
self.module = getattr(self.module, part)
else:
self.module = module
if argv is None:
argv = sys.argv
self.exit = exit
self.failfast = failfast
self.catchbreak = catchbreak
self.verbosity = verbosity
self.buffer = buffer
if warnings is None and not sys.warnoptions:
# even if DreprecationWarnings are ignored by default
# print them anyway unless other warnings settings are
# specified by the warnings arg or the -W python flag
self.warnings = 'default'
else:
# here self.warnings is set either to the value passed
# to the warnings args or to None.
# If the user didn't pass a value self.warnings will
# be None. This means that the behavior is unchanged
# and depends on the values passed to -W.
self.warnings = warnings
self.defaultTest = defaultTest
self.testRunner = testRunner
self.testLoader = testLoader
self.progName = os.path.basename(argv[0])
self.parseArgs(argv)
self.runTests()
def usageExit(self, msg=None):
if msg:
print(msg)
usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '',
'buffer': ''}
if self.failfast != False:
usage['failfast'] = FAILFAST
if self.catchbreak != False:
usage['catchbreak'] = CATCHBREAK
if self.buffer != False:
usage['buffer'] = BUFFEROUTPUT
print(self.USAGE % usage)
sys.exit(2)
def parseArgs(self, argv):
if ((len(argv) > 1 and argv[1].lower() == 'discover') or
(len(argv) == 1 and self.module is None)):
self._do_discovery(argv[2:])
return
parser = self._getOptParser()
options, args = parser.parse_args(argv[1:])
self._setAttributesFromOptions(options)
if len(args) == 0 and self.module is None:
# this allows "python -m unittest -v" to still work for
# test discovery. This means -c / -b / -v / -f options will
# be handled twice, which is harmless but not ideal.
self._do_discovery(argv[1:])
return
if len(args) == 0 and self.defaultTest is None:
# createTests will load tests from self.module
self.testNames = None
elif len(args) > 0:
self.testNames = _convert_names(args)
if __name__ == '__main__':
# to support python -m unittest ...
self.module = None
else:
self.testNames = (self.defaultTest,)
self.createTests()
def createTests(self):
if self.testNames is None:
self.test = self.testLoader.loadTestsFromModule(self.module)
else:
self.test = self.testLoader.loadTestsFromNames(self.testNames,
self.module)
def _getOptParser(self):
import optparse
parser = optparse.OptionParser()
parser.prog = self.progName
parser.add_option('-v', '--verbose', dest='verbose', default=False,
help='Verbose output', action='store_true')
parser.add_option('-q', '--quiet', dest='quiet', default=False,
help='Quiet output', action='store_true')
if self.failfast != False:
parser.add_option('-f', '--failfast', dest='failfast', default=False,
help='Stop on first fail or error',
action='store_true')
if self.catchbreak != False:
parser.add_option('-c', '--catch', dest='catchbreak', default=False,
help='Catch ctrl-C and display results so far',
action='store_true')
if self.buffer != False:
parser.add_option('-b', '--buffer', dest='buffer', default=False,
help='Buffer stdout and stderr during tests',
action='store_true')
return parser
def _setAttributesFromOptions(self, options):
# only set options from the parsing here
# if they weren't set explicitly in the constructor
if self.failfast is None:
self.failfast = options.failfast
if self.catchbreak is None:
self.catchbreak = options.catchbreak
if self.buffer is None:
self.buffer = options.buffer
if options.verbose:
self.verbosity = 2
elif options.quiet:
self.verbosity = 0
def _addDiscoveryOptions(self, parser):
parser.add_option('-s', '--start-directory', dest='start', default='.',
help="Directory to start discovery ('.' default)")
parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',
help="Pattern to match tests ('test*.py' default)")
parser.add_option('-t', '--top-level-directory', dest='top', default=None,
help='Top level directory of project (defaults to start directory)')
def _do_discovery(self, argv, Loader=None):
if Loader is None:
Loader = lambda: self.testLoader
# handle command line args for test discovery
self.progName = '%s discover' % self.progName
parser = self._getOptParser()
self._addDiscoveryOptions(parser)
options, args = parser.parse_args(argv)
if len(args) > 3:
self.usageExit()
for name, value in zip(('start', 'pattern', 'top'), args):
setattr(options, name, value)
self._setAttributesFromOptions(options)
start_dir = options.start
pattern = options.pattern
top_level_dir = options.top
loader = Loader()
self.test = loader.discover(start_dir, pattern, top_level_dir)
def runTests(self):
if self.catchbreak:
installHandler()
if self.testRunner is None:
self.testRunner = runner.TextTestRunner
if isinstance(self.testRunner, type):
try:
testRunner = self.testRunner(verbosity=self.verbosity,
failfast=self.failfast,
buffer=self.buffer,
warnings=self.warnings)
except TypeError:
# didn't accept the verbosity, buffer or failfast arguments
testRunner = self.testRunner()
else:
# it is assumed to be a TestRunner instance
testRunner = self.testRunner
self.result = testRunner.run(self.test)
if self.exit:
sys.exit(not self.result.wasSuccessful())
main = TestProgram
| gpl-3.0 |
drcapulet/sentry | src/sentry/migrations/0120_auto__add_grouprulestatus.py | 36 | 23983 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GroupRuleStatus'
db.create_table('sentry_grouprulestatus', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'])),
('rule', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Rule'])),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'])),
('status', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('date_added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal('sentry', ['GroupRuleStatus'])
def backwards(self, orm):
# Deleting model 'GroupRuleStatus'
db.delete_table('sentry_grouprulestatus')
models = {
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.TeamMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['sentry.User']"})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
}, 'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
tony0924/itriqemu | scripts/tracetool/backend/stderr.py | 94 | 1162 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Stderr built-in backend.
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
PUBLIC = True
def c(events):
pass
def h(events):
out('#include <stdio.h>',
'#include "trace/control.h"',
'',
)
for e in events:
argnames = ", ".join(e.args.names())
if len(e.args) > 0:
argnames = ", " + argnames
out('static inline void trace_%(name)s(%(args)s)',
'{',
' bool _state = trace_event_get_state(%(event_id)s);',
' if (_state) {',
' fprintf(stderr, "%(name)s " %(fmt)s "\\n" %(argnames)s);',
' }',
'}',
name = e.name,
args = e.args,
event_id = "TRACE_" + e.name.upper(),
fmt = e.fmt.rstrip("\n"),
argnames = argnames,
)
| gpl-2.0 |
eldipa/athena | athenaproj/athena/models.py | 1 | 7528 | from django.db import models
import json, string
from django.core.exceptions import ValidationError
class Objective(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=240, blank=True)
def __unicode__(self):
return self.name
class How(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=240, blank=True)
def __unicode__(self):
return self.name
class Plan(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
def __unicode__(self):
return self.name
class TestSet(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
objectives = models.ManyToManyField(Objective, null=False)
how = models.ManyToManyField(How, null=False)
plan = models.ForeignKey(Plan, null=False, blank=False)
parameters = models.TextField(blank=True)
keyword_count = 8
class TestTemplate(string.Template):
delimiter = '{{'
pattern = r'''
\{\{(?:
(?P<escaped>!)|
(?P<named>[_a-z][_a-z0-9]*)\}\}|
(?P<braced>[_a-z][_a-z0-9]*)\}\}|
(?P<invalid>)
)
'''
def required(self):
required = []
for escaped, named, braced, invalid in self.pattern.findall(self.template):
if named or braced:
required.append(named if named else braced)
return required
def __unicode__(self):
test_count = self.test_count()
is_parametric = self.is_parametric()
return "%s (%s)" % (self.name, "singleton set" if not is_parametric else ("%i test%s in the set" % (test_count, "" if test_count == 1 else "s")))
def _get_keywords_and_rows(self):
try:
data = json.loads(self.parameters)
except:
data = []
return filter(None, data[:1][0]), [filter(None, row) for row in data[1:]]
def _get_row_count(self):
_, rows = self._get_keywords_and_rows()
return len(rows)
def test_count(self):
row_count = self._get_row_count()
return row_count if row_count else 1
def is_parametric(self):
row_count = self._get_row_count()
return row_count > 0
def tests(self):
k = ("name", "description")
if not self.is_parametric():
return [dict(zip(k, (self.name, self.description)))]
keywords, rows = self._get_keywords_and_rows()
results = []
name_template = self.TestTemplate(self.name)
description_template = self.TestTemplate(self.description)
for r in rows:
params = dict(zip(keywords, r))
results.append(dict(zip(k, (name_template.substitute(params), description_template.substitute(params)))))
return results
def clean(self):
models.Model.clean(self)
# JSON integrity -------------------------------
try:
data = json.loads(self.parameters)
except Exception, e:
raise ValidationError("Internal Error (possible BUG): The parameters are not a valid json. '%s'" % str(e))
# Clean up --------------------------
empties = []
for i, row in enumerate(data[1:]):
if not filter(None, map(lambda s: s.strip(), row)):
empties.append(i)
for i in empties:
data[i+1] = []
data[1:] = filter(None, data[1:]) #remove empties rows (tests)
for i, l in enumerate(data):
for j, s in enumerate(l):
data[i][j] = data[i][j].strip()
self.parameters = json.dumps(data) #restore
# Template integrity --------------------------
try:
descr_template = self.TestTemplate(self.description)
except Exception, e:
raise ValidationError("The description is an invalid template: '%s'" % str(e))
try:
name_template = self.TestTemplate(self.name)
except Exception, e:
raise ValidationError("The name is an invalid template: '%s'" % str(e))
# JSON integrity -------------------------------
try:
# expected list of lists.
# the first is a list of column names (keywords)
# then, each list is the list of values of each row
# each value of the object is string.
# the list must have the first object (list of columns)
assert isinstance(data, list), "The data returned is not a list (expected list of objects). Found '%s'" % data.__class__
assert data, "The data cannot be empty."
column_names = data[0]
assert isinstance(column_names, list), "The first object in data must be a list of column names (keywords). Found '%s'" % column_names.__class__
rows = data[1:]
for row in rows:
assert isinstance(row, list), "The data is a list of non-lists (one object is the type '%s'). Expected a list of list." % row.__class__
assert len(row) == len(column_names), "The row size %i is distinct of the column count %i." % (len(row), len(column_names))
except Exception, e:
raise ValidationError("Internal Error (possible BUG): The parameters is a valid json but has a diferent format. '%s'" % str(e))
# Table integrity ---------------------------
empty_columns = [i for i, c in enumerate(column_names) if not c.strip()]
for j, r in enumerate(rows):
for i in empty_columns:
if r[i].strip():
raise ValidationError("The test (row) %i, keyword (column) %i has the value '%s' but that column has not name (not keyword defined)." % (j+1, i+1, r[i]))
normalized_column_names = filter(None, map(lambda s: s.strip(), column_names))
if len(set(normalized_column_names)) != len(normalized_column_names):
raise ValidationError("Two or more column names (keywords) are repeated.")
# Template - Table relationship ---------------------
column_names = set(normalized_column_names)
required = set(descr_template.required())
required_but_not_defined = required.difference(column_names)
used_in_name_but_not_defined = set(name_template.required()).difference(column_names)
defined_but_not_required = column_names.difference(required.union(set(name_template.required())))
errors = []
if required_but_not_defined:
errors.append(("Some keywords are required (in the description) to be defined: %s" % str(map(str, required_but_not_defined))))
if defined_but_not_required:
errors.append(("Some keywords were defined but they are not used: %s" % str(map(str, defined_but_not_required))))
if used_in_name_but_not_defined:
errors.append(("Some keywords are required (in the name) to be defined: %s" % str(map(str, used_in_name_but_not_defined))))
if column_names and not rows:
errors.append(("You are using keywords but you don't define any test."))
if errors:
raise ValidationError("\n".join(errors))
class Comment(models.Model):
test = models.ForeignKey(TestSet, null=True, blank=True)
responding_to = models.ForeignKey('self', blank=True, null=True)
text = models.TextField()
def clean(self):
if self.test is None and self.responding_to is None:
raise ValidationError("The comment must refer to a test or to another comment.")
models.Model.clean(self)
def __unicode__(self):
t = self.text[:64]
if len(self.text) > 64:
t += "..."
return t
| gpl-2.0 |
brianlsharp/MissionPlanner | Lib/encodings/unicode_escape.py | 103 | 1229 | """ Python 'unicode-escape' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_escape_encode
decode = codecs.unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| gpl-3.0 |
varunkamra/kuma | vendor/packages/pygments/lexers/algebra.py | 72 | 6167 | # -*- coding: utf-8 -*-
"""
pygments.lexers.algebra
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for computer algebra systems.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer']
class GAPLexer(RegexLexer):
"""
For `GAP <http://www.gap-system.org>`_ source code.
.. versionadded:: 2.0
"""
name = 'GAP'
aliases = ['gap']
filenames = ['*.g', '*.gd', '*.gi', '*.gap']
tokens = {
'root': [
(r'#.*$', Comment.Single),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
if|then|elif|else|fi|
for|while|do|od|
repeat|until|
break|continue|
function|local|return|end|
rec|
quit|QUIT|
IsBound|Unbind|
TryNextMethod|
Info|Assert
)\b''', Keyword),
(r'''(?x)\b(?:
true|false|fail|infinity
)\b''',
Name.Constant),
(r'''(?x)\b(?:
(Declare|Install)([A-Z][A-Za-z]+)|
BindGlobal|BIND_GLOBAL
)\b''',
Name.Builtin),
(r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
(r'''(?x)\b(?:
and|or|not|mod|in
)\b''',
Operator.Word),
(r'''(?x)
(?:\w+|`[^`]*`)
(?:::\w+|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
]
}
class MathematicaLexer(RegexLexer):
"""
Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Mathematica'
aliases = ['mathematica', 'mma', 'nb']
filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
mimetypes = ['application/mathematica',
'application/vnd.wolfram.mathematica',
'application/vnd.wolfram.mathematica.package',
'application/vnd.wolfram.cdf']
# http://reference.wolfram.com/mathematica/guide/Syntax.html
operators = (
";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
"^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
"@@@", "~~", "===", "&", "<", ">", "<=", ">=",
)
punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
def _multi_escape(entries):
return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
tokens = {
'root': [
(r'(?s)\(\*.*?\*\)', Comment),
(r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
(r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
(r'#\d*', Name.Variable),
(r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
(r'-?[0-9]+\.[0-9]*', Number.Float),
(r'-?[0-9]*\.[0-9]+', Number.Float),
(r'-?[0-9]+', Number.Integer),
(words(operators), Operator),
(words(punctuation), Punctuation),
(r'".*?"', String),
(r'\s+', Text.Whitespace),
],
}
class MuPADLexer(RegexLexer):
"""
A `MuPAD <http://www.mupad.com>`_ lexer.
Contributed by Christopher Creutzig <[email protected]>.
.. versionadded:: 0.8
"""
name = 'MuPAD'
aliases = ['mupad']
filenames = ['*.mu']
tokens = {
'root': [
(r'//.*?$', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
next|break|end|
axiom|end_axiom|category|end_category|domain|end_domain|inherits|
if|%if|then|elif|else|end_if|
case|of|do|otherwise|end_case|
while|end_while|
repeat|until|end_repeat|
for|from|to|downto|step|end_for|
proc|local|option|save|begin|end_proc|
delete|frame
)\b''', Keyword),
(r'''(?x)\b(?:
DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
)\b''', Name.Class),
(r'''(?x)\b(?:
PI|EULER|E|CATALAN|
NIL|FAIL|undefined|infinity|
TRUE|FALSE|UNKNOWN
)\b''',
Name.Constant),
(r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
(r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
(r'''(?x)\b(?:
and|or|not|xor|
assuming|
div|mod|
union|minus|intersect|in|subset
)\b''',
Operator.Word),
(r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
# (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
(r'''(?x)
((?:[a-zA-Z_#][\w#]*|`[^`]*`)
(?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
bygroups(Name.Function, Text, Punctuation)),
(r'''(?x)
(?:[a-zA-Z_#][\w#]*|`[^`]*`)
(?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
]
}
| mpl-2.0 |
dtroyer/python-openstacksdk | openstack/tests/functional/cloud/test_groups.py | 1 | 4031 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_groups
----------------------------------
Functional tests for `shade` keystone group resource.
"""
import openstack.cloud
from openstack.tests.functional.cloud import base
class TestGroup(base.BaseFunctionalTestCase):
def setUp(self):
super(TestGroup, self).setUp()
i_ver = self.operator_cloud.config.get_api_version('identity')
if i_ver in ('2', '2.0'):
self.skipTest('Identity service does not support groups')
self.group_prefix = self.getUniqueString('group')
self.addCleanup(self._cleanup_groups)
def _cleanup_groups(self):
exception_list = list()
for group in self.operator_cloud.list_groups():
if group['name'].startswith(self.group_prefix):
try:
self.operator_cloud.delete_group(group['id'])
except Exception as e:
exception_list.append(str(e))
continue
if exception_list:
# Raise an error: we must make users aware that something went
# wrong
raise openstack.cloud.OpenStackCloudException(
'\n'.join(exception_list))
def test_create_group(self):
group_name = self.group_prefix + '_create'
group = self.operator_cloud.create_group(group_name, 'test group')
for key in ('id', 'name', 'description', 'domain_id'):
self.assertIn(key, group)
self.assertEqual(group_name, group['name'])
self.assertEqual('test group', group['description'])
def test_delete_group(self):
group_name = self.group_prefix + '_delete'
group = self.operator_cloud.create_group(group_name, 'test group')
self.assertIsNotNone(group)
self.assertTrue(self.operator_cloud.delete_group(group_name))
results = self.operator_cloud.search_groups(
filters=dict(name=group_name))
self.assertEqual(0, len(results))
def test_delete_group_not_exists(self):
self.assertFalse(self.operator_cloud.delete_group('xInvalidGroupx'))
def test_search_groups(self):
group_name = self.group_prefix + '_search'
# Shouldn't find any group with this name yet
results = self.operator_cloud.search_groups(
filters=dict(name=group_name))
self.assertEqual(0, len(results))
# Now create a new group
group = self.operator_cloud.create_group(group_name, 'test group')
self.assertEqual(group_name, group['name'])
# Now we should find only the new group
results = self.operator_cloud.search_groups(
filters=dict(name=group_name))
self.assertEqual(1, len(results))
self.assertEqual(group_name, results[0]['name'])
def test_update_group(self):
group_name = self.group_prefix + '_update'
group_desc = 'test group'
group = self.operator_cloud.create_group(group_name, group_desc)
self.assertEqual(group_name, group['name'])
self.assertEqual(group_desc, group['description'])
updated_group_name = group_name + '_xyz'
updated_group_desc = group_desc + ' updated'
updated_group = self.operator_cloud.update_group(
group_name,
name=updated_group_name,
description=updated_group_desc)
self.assertEqual(updated_group_name, updated_group['name'])
self.assertEqual(updated_group_desc, updated_group['description'])
| apache-2.0 |
SophieTh/und_Sophie_2016 | pySRU/tests/SimulationTest.py | 1 | 5980 | # coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2016 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
__authors__ = ["S Thery, M Glass, M Sanchez del Rio - ESRF ISDD Advanced Analysis and Modelling"]
__license__ = "MIT"
__date__ = "31/08/2016"
import unittest
import numpy as np
import scipy.constants as codata
from pySRU.MagneticStructureUndulatorPlane import MagneticStructureUndulatorPlane as Undulator
from pySRU.ElectronBeam import ElectronBeam
from pySRU.Source import Source
from pySRU.TrajectoryFactory import TrajectoryFactory,TRAJECTORY_METHOD_ANALYTIC, TRAJECTORY_METHOD_ODE
from pySRU.RadiationFactory import RadiationFactory , RADIATION_METHOD_APPROX_FARFIELD,RADIATION_METHOD_NEAR_FIELD
from pySRU.Simulation import Simulation,create_simulation
class UndulatorSimulationTest(unittest.TestCase):
def test_simulation(self):
electron_beam_test = ElectronBeam(Electron_energy=1.3, I_current=1.0)
beam_ESRF = ElectronBeam(Electron_energy=6.0, I_current=0.2)
undulator_test = Undulator(K=1.87, period_length=0.035, length=0.035 * 14)
ESRF18 = Undulator(K=1.68, period_length=0.018, length=2.0)
sim_test = create_simulation(magnetic_structure=undulator_test,electron_beam=electron_beam_test,
traj_method=TRAJECTORY_METHOD_ANALYTIC,rad_method=RADIATION_METHOD_NEAR_FIELD)
self.assertFalse(sim_test.radiation.distance == None)
source_test=sim_test.source
self.assertFalse(all(sim_test.trajectory_fact.initial_condition==
source_test.choose_initial_contidion_automatic()))
ref=sim_test.copy()
rad_max = sim_test.radiation.max()
# test change
sim_test.change_radiation_method(RADIATION_METHOD_APPROX_FARFIELD)
self.assertEqual(sim_test.radiation_fact.method, RADIATION_METHOD_APPROX_FARFIELD)
self.assertFalse(ref.radiation_fact.method==sim_test.radiation_fact.method)
self.assertFalse(np.all(ref.radiation.intensity == sim_test.radiation.intensity))
self.assertAlmostEqual(ref.radiation.intensity[0][0]/rad_max, sim_test.radiation.intensity[0][0]/rad_max, 3)
sim_test.change_trajectory_method(TRAJECTORY_METHOD_ODE)
self.assertEqual(sim_test.trajectory_fact.method, TRAJECTORY_METHOD_ODE)
self.assertFalse(ref.trajectory_fact.method==sim_test.trajectory_fact.method)
time_diff=np.abs(ref.trajectory.t - sim_test.trajectory.t)
self.assertTrue(np.all(time_diff<=1e-19))
self.assertFalse(np.all(ref.trajectory.x == sim_test.trajectory.x))
self.assertFalse(np.all(ref.radiation.intensity == sim_test.radiation.intensity))
rad_max = sim_test.radiation.max()
self.assertAlmostEqual(ref.radiation.intensity[0][0]/rad_max, sim_test.radiation.intensity[0][0]/rad_max, 1)
sim_test.change_Nb_pts_trajectory(ref.trajectory_fact.Nb_pts+1)
self.assertEqual(sim_test.trajectory_fact.Nb_pts,ref.trajectory_fact.Nb_pts+1)
self.assertEqual(sim_test.trajectory.nb_points(), ref.trajectory_fact.Nb_pts+1)
self.assertFalse(ref.trajectory_fact.Nb_pts == sim_test.trajectory_fact.Nb_pts)
self.assertAlmostEqual(ref.radiation.intensity[0][0]/rad_max,sim_test.radiation.intensity[0][0]/rad_max,1)
sim_test.change_Nb_pts_radiation(100)
self.assertEqual(sim_test.radiation_fact.Nb_pts,100)
self.assertFalse(np.all(ref.radiation.X == sim_test.radiation.X))
self.assertTrue(ref.radiation.X.min() == sim_test.radiation.X.min())
self.assertTrue(ref.radiation.X.max() == sim_test.radiation.X.max())
self.assertTrue(ref.radiation.Y.min() == sim_test.radiation.Y.min())
self.assertTrue(ref.radiation.Y.max() == sim_test.radiation.Y.max())
self.assertFalse(len(ref.radiation.X) == len(sim_test.radiation.X))
sim_test.change_distance(50)
self.assertEqual(sim_test.radiation.distance,50)
self.assertFalse(ref.radiation.distance == sim_test.radiation.distance)
sim_test.change_photon_frequency(source_test.harmonic_frequency(1) * 0.8)
self.assertEqual(sim_test.radiation_fact.photon_frequency,source_test.harmonic_frequency(1)*0.8)
self.assertFalse(ref.radiation_fact.photon_frequency == sim_test.radiation_fact.photon_frequency)
# nb_pts=np.arange(500,2001,500,dtype='int')
# err=sim_test.error_radiation_method_nb_pts_traj(RADIATION_METHOD_APPROX_FARFIELD,nb_pts=nb_pts)
# self.assertLessEqual((err.max() / rad_max), 1e-2, 1)
#
# distance=np.arange(20,101,20,dtype='int')
# err = sim_test.error_radiation_method_distance(RADIATION_METHOD_APPROX_FARFIELD, D=distance)
# self.assertLessEqual(err.max()/rad_max, 1e-2, 1)
| mit |
sanja7s/SR_Twitter | src_SR/articles_in_CVs.py | 1 | 3380 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Join article names for the popular concepts found in each community
"""
from collections import defaultdict
import glob, os
IN_DIR = "../../../DATA/CV"
#########################################################
# SR
#########################################################
X = "0.6"
working_subfolder = "SR_communities/"
# the communities we analyze (from the SR graph)
spec_users = working_subfolder + "communitiesSR_" + str(X) + ".txt"
#########################################################
#########################################################
# Mention
#########################################################
#X = "" #dummy
#working_subfolder = "mention_communities/"
# the communities we analyze (from the mention graph)
#spec_users = working_subfolder + "communitiesMent" + str(X) + ".txt"
#########################################################
# to return top sizeN communities, as many as there are
# in a form of a dictionary: {community_id: defaultdict{id_usr1:1, id_usr2:1, ...}}
# and also another dict, as a map (res3) to tell us the community id of a user
# and finally the whole set of communities (not limited in size) and similar map in res4
def read_in_communities(sizeN=300):
res = defaultdict(int)
res7s = defaultdict(int)
res3 = defaultdict(int)
res3 = defaultdict(lambda: -1, res3)
res4 = defaultdict(int)
res4 = defaultdict(lambda: -1, res4)
f = open(spec_users, "r")
for line in f:
line = line.split()
user_id = line[0]
com_id = line[1]
if com_id not in res:
res[com_id] = defaultdict(int)
res[com_id][user_id] = 1
for com in res:
if len(res[com]) >= sizeN:
res7s[com] = res[com]
for usr in res[com]:
res4[usr] = com
for com in res7s:
for usr in res7s[com]:
res3[usr] = com
return res7s, res3, res, res4
f_in_article_IDs = "articles_selected"
#
# read in all article IDs
#
def read_article_IDs(): #TODO fin
article_IDs = defaultdict(int)
cnt = 0
with open(f_in_article_IDs) as f:
for line in f:
line = line[:-1].split('\t')
aid = line[0]
aname = line[1]
article_IDs[aid] = aname
if cnt % 10000 == 0:
print line
cnt += 1
return article_IDs
#
# extract all concepts in a community and sort them by popularity
#
def save_popular_articles(com_id, article_IDs):
f_in = working_subfolder + "top_concepts_SR_" + str(X) + "_COM_" + com_id + ".tab"
f = open(f_in, "r")
f_out = working_subfolder + com_id + "_COM_" + str(X) + "top_articles.tab"
f2 = open(f_out, "w")
for line in f:
line = line[:-1].split('\t')
aid = line[0]
aTF = line[1]
aname = article_IDs[aid]
f2.write(str(aTF) + '\t' + str(aname) + '\t' + str(aid) + '\n')
print "Processed community %s " % (com_id)
###
### call the others
###
def main():
os.chdir(IN_DIR)
# number of nodes in a community
sizeN = 300
top_communities, com_id_map, all_communities, all_com_id_map = read_in_communities(sizeN)
N = len(top_communities)
print N, "top communities found ", "of size larger than ", sizeN
NALL = len(all_communities)
print NALL, "all communities found"
article_IDs = read_article_IDs()
#################################################################
for community in top_communities:
save_popular_articles(community, article_IDs)
#################################################################
main()
| mit |
nextgis-extra/tests | lib_gdal/gdrivers/isis2.py | 1 | 3417 | #!/usr/bin/env python
###############################################################################
# $Id: isis2.py 32163 2015-12-13 17:44:50Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test read functionality for ISIS2 driver.
# Author: Even Rouault <even dot rouault @ mines-paris dot org>
#
###############################################################################
# Copyright (c) 2008, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
###############################################################################
# Read a truncated and modified version of arvidson_original.cub from
# ftp://ftpflag.wr.usgs.gov/dist/pigpen/venus/venustopo_download/ovda_dtm.zip
def isis2_1():
tst = gdaltest.GDALTest( 'ISIS2', 'arvidson_original_truncated.cub', 1, 382 )
expected_prj = """PROJCS["SIMPLE_CYLINDRICAL VENUS",
GEOGCS["GCS_VENUS",
DATUM["D_VENUS",
SPHEROID["VENUS",6051000,0]],
PRIMEM["Reference_Meridian",0],
UNIT["degree",0.0174532925199433]],
PROJECTION["Equirectangular"],
PARAMETER["latitude_of_origin",0],
PARAMETER["central_meridian",0],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0]],
PARAMETER["standard_parallel_1",-6.5]"""
expected_gt = (10157400.403618813, 1200.0000476837158, 0.0, -585000.02324581146, 0.0, -1200.0000476837158)
return tst.testOpen( check_prj = expected_prj,
check_gt = expected_gt )
###############################################################################
# Test simple creation on disk.
def isis2_2():
tst = gdaltest.GDALTest( 'ISIS2', 'byte.tif', 1, 4672 )
return tst.testCreate()
###############################################################################
# Test a different data type with some options.
def isis2_3():
tst = gdaltest.GDALTest( 'ISIS2', 'float32.tif', 1, 4672,
options = ['LABELING_METHOD=DETACHED', 'IMAGE_EXTENSION=qub'] )
return tst.testCreateCopy( vsimem=1 )
gdaltest_list = [
isis2_1,
isis2_2,
isis2_3 ]
if __name__ == '__main__':
gdaltest.setup_run( 'isis2' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| gpl-2.0 |
selwin/Django-facebook | open_facebook/api.py | 1 | 32830 | # -*- coding: utf-8 -*-
'''
Open Facebook allows you to use Facebook's open graph API with simple python code
**Features**
* Supported and maintained
* Tested so people can contribute
* Facebook exceptions are mapped
* Logging
**Basic examples**::
facebook = OpenFacebook(access_token)
# Getting info about me
facebook.get('me')
# Learning some more about fashiolista
facebook.get('fashiolista')
# Writing your first comment
facebook.set('fashiolista/comments', message='I love Fashiolista!')
# Posting to a users wall
facebook.set('me/feed', message='check out fashiolista',
url='http://www.fashiolista.com')
# Liking a page
facebook.set('fashiolista/likes')
# Getting who likes cocacola
facebook.set('cocacola/likes')
# Use fql to retrieve your name
facebook.fql('SELECT name FROM user WHERE uid = me()')
# Executing fql in batch
facebook.batch_fql([
'SELECT uid, name, pic_square FROM user WHERE uid = me()',
'SELECT uid, rsvp_status FROM event_member WHERE eid=12345678',
])
# Uploading pictures
photo_urls = [
'http://e.fashiocdn.com/images/entities/0/7/B/I/9/0.365x365.jpg',
'http://e.fashiocdn.com/images/entities/0/5/e/e/r/0.365x365.jpg',
]
for photo in photo_urls:
print facebook.set('me/feed', message='Check out Fashiolista',
picture=photo, url='http://www.fashiolista.com')
**Getting an access token**
Once you get your access token, Open Facebook gives you access to the Facebook API
There are 3 ways of getting a facebook access_token and these are currently
implemented by Django Facebook.
1. code is passed as request parameter and traded for an
access_token using the api
2. code is passed through a signed cookie and traded for an access_token
3. access_token is passed directly (retrieved through javascript, which
would be bad security, or through one of the mobile flows.)
If you are looking to develop your own flow for a different framework have a look at
Facebook's documentation:
http://developers.facebook.com/docs/authentication/
Also have a look at the :class:`.FacebookRequired` decorator and :func:`get_persistent_graph` function to
understand the required functionality
**Api docs**:
'''
from django.http import QueryDict
from django.utils import six
from django.utils.http import urlencode
from django_facebook import settings as facebook_settings
from open_facebook import exceptions as facebook_exceptions
from open_facebook.utils import json, encode_params, send_warning, memoized, \
stop_statsd, start_statsd
import logging
from django_facebook.utils import to_int
import ssl
import re
try:
# python 2 imports
from urlparse import urlparse
from urllib2 import build_opener, HTTPError, URLError
except ImportError:
# python 3 imports
from urllib.error import HTTPError, URLError
from urllib.parse import urlparse
from urllib.request import build_opener
logger = logging.getLogger(__name__)
# base timeout, actual timeout will increase when requests fail
REQUEST_TIMEOUT = 10
# two retries was too little, sometimes facebook is a bit flaky
REQUEST_ATTEMPTS = 3
class FacebookConnection(object):
'''
Shared utility class implementing the parsing
of Facebook API responses
'''
api_url = 'https://graph.facebook.com/'
# this older url is still used for fql requests
old_api_url = 'https://api.facebook.com/method/'
@classmethod
def request(cls, path='', post_data=None, old_api=False, **params):
'''
Main function for sending the request to facebook
**Example**::
FacebookConnection.request('me')
:param path:
The path to request, examples: /me/friends/, /me/likes/
:param post_data:
A dictionary of data to post
:param parms:
The get params to include
'''
api_base_url = cls.old_api_url if old_api else cls.api_url
if getattr(cls, 'access_token', None):
params['access_token'] = cls.access_token
url = '%s%s?%s' % (api_base_url, path, urlencode(params))
response = cls._request(url, post_data)
return response
@classmethod
def _request(cls, url, post_data=None, timeout=REQUEST_TIMEOUT,
attempts=REQUEST_ATTEMPTS):
# change fb__explicitly_shared to fb:explicitly_shared
if post_data:
post_data = dict(
(k.replace('__', ':'), v) for k, v in post_data.items())
logger.info('requesting url %s with post data %s', url, post_data)
post_request = (post_data is not None or 'method=post' in url)
if post_request and facebook_settings.FACEBOOK_READ_ONLY:
logger.info('running in readonly mode')
response = dict(id=123456789, setting_read_only=True)
return response
# nicely identify ourselves before sending the request
opener = build_opener()
opener.addheaders = [('User-agent', 'Open Facebook Python')]
# get the statsd path to track response times with
path = urlparse(url).path
statsd_path = path.replace('.', '_')
# give it a few shots, connection is buggy at times
timeout_mp = 0
while attempts:
# gradually increase the timeout upon failure
timeout_mp += 1
extended_timeout = timeout * timeout_mp
response_file = None
encoded_params = encode_params(post_data) if post_data else None
post_string = (urlencode(encoded_params).encode('utf-8')
if post_data else None)
try:
start_statsd('facebook.%s' % statsd_path)
try:
response_file = opener.open(
url, post_string, timeout=extended_timeout)
response = response_file.read().decode('utf8')
except (HTTPError,) as e:
response_file = e
response = response_file.read().decode('utf8')
# Facebook sents error codes for many of their flows
# we still want the json to allow for proper handling
msg_format = 'FB request, error type %s, code %s'
logger.warn(msg_format, type(e), getattr(e, 'code', None))
# detect if its a server or application error
server_error = cls.is_server_error(e, response)
if server_error:
# trigger a retry
raise URLError(
'Facebook is down %s' % response)
break
except (HTTPError, URLError, ssl.SSLError) as e:
# These are often temporary errors, so we will retry before
# failing
error_format = 'Facebook encountered a timeout (%ss) or error %s'
logger.warn(error_format, extended_timeout, str(e))
attempts -= 1
if not attempts:
# if we have no more attempts actually raise the error
error_instance = facebook_exceptions.convert_unreachable_exception(
e)
error_msg = 'Facebook request failed after several retries, raising error %s'
logger.warn(error_msg, error_instance)
raise error_instance
finally:
if response_file:
response_file.close()
stop_statsd('facebook.%s' % statsd_path)
# Faceboook response is either
# Valid json
# A string which is a querydict (a=b&c=d...etc)
# A html page stating FB is having trouble (but that shouldnt reach
# this part of the code)
try:
parsed_response = json.loads(response)
logger.info('facebook send response %s' % parsed_response)
except Exception as e:
# using exception because we need to support multiple json libs :S
parsed_response = QueryDict(response, True)
logger.info('facebook send response %s' % parsed_response)
if parsed_response and isinstance(parsed_response, dict):
# of course we have two different syntaxes
if parsed_response.get('error'):
cls.raise_error(parsed_response['error']['type'],
parsed_response['error']['message'],
parsed_response['error'].get('code'))
elif parsed_response.get('error_code'):
cls.raise_error(parsed_response['error_code'],
parsed_response['error_msg'])
return parsed_response
@classmethod
def is_server_error(cls, e, response):
'''
Checks an HTTPError to see if Facebook is down or we are using the
API in the wrong way
Facebook doesn't clearly distinquish between the two, so this is a bit
of a hack
'''
from open_facebook.utils import is_json
server_error = False
if hasattr(e, 'code') and e.code == 500:
server_error = True
# Facebook status codes are used for application logic
# http://fbdevwiki.com/wiki/Error_codes#User_Permission_Errors
# The only way I know to detect an actual server error is to check if
# it looks like their error page
# TODO: think of a better solution....
error_matchers = [
'<title>Facebook | Error</title>',
'Sorry, something went wrong.'
]
is_error_page = all(
[matcher in response for matcher in error_matchers])
if is_error_page:
server_error = True
# if it looks like json, facebook is probably not down
if is_json(response):
server_error = False
return server_error
@classmethod
def raise_error(cls, error_type, message, error_code=None):
'''
Lookup the best error class for the error and raise it
**Example**::
FacebookConnection.raise_error(10, 'OAuthException')
:param error_type:
the error type from the facebook api call
:param message:
the error message from the facebook api call
:param error_code:
optionally the error code which facebook send
'''
default_error_class = facebook_exceptions.OpenFacebookException
# get the error code
error_code = error_code or cls.get_code_from_message(message)
# also see http://fbdevwiki.com/wiki/Error_codes#User_Permission_Errors
logger.info('Trying to match error code %s to error class', error_code)
# lookup by error code takes precedence
error_class = cls.match_error_code(error_code)
# try to get error class by direct lookup
if not error_class:
if not isinstance(error_type, int):
error_class = getattr(facebook_exceptions, error_type, None)
if error_class and not issubclass(error_class, default_error_class):
error_class = None
# hack for missing parameters
if 'Missing' in message and 'parameter' in message:
error_class = facebook_exceptions.MissingParameter
# hack for Unsupported delete request
if 'Unsupported delete request' in message:
error_class = facebook_exceptions.UnsupportedDeleteRequest
# fallback to the default
if not error_class:
error_class = default_error_class
logger.info('Matched error to class %s', error_class)
error_message = message
if error_code:
# this is handy when adding new exceptions for facebook errors
error_message = u'%s (error code %s)' % (message, error_code)
raise error_class(error_message)
@classmethod
def get_code_from_message(cls, message):
# map error classes to facebook error codes
# find the error code
error_code = None
error_code_re = re.compile('\(#(\d+)\)')
matches = error_code_re.match(message)
matching_groups = matches.groups() if matches else None
if matching_groups:
error_code = to_int(matching_groups[0]) or None
return error_code
@classmethod
def get_sorted_exceptions(cls):
from open_facebook.exceptions import get_exception_classes
exception_classes = get_exception_classes()
exception_classes.sort(key=lambda e: e.range())
return exception_classes
@classmethod
def match_error_code(cls, error_code):
'''
Return the right exception class for the error code
'''
exception_classes = cls.get_sorted_exceptions()
error_class = None
for class_ in exception_classes:
codes_list = class_.codes_list()
# match the error class
matching_error_class = None
for code in codes_list:
if isinstance(code, tuple):
start, stop = code
if error_code and start <= error_code <= stop:
matching_error_class = class_
logger.info('Matched error on code %s', code)
elif isinstance(code, (int, six.integer_types)):
if int(code) == error_code:
matching_error_class = class_
logger.info('Matched error on code %s', code)
else:
raise(
ValueError, 'Dont know how to handle %s of '
'type %s' % (code, type(code)))
# tell about the happy news if we found something
if matching_error_class:
error_class = matching_error_class
break
return error_class
class FacebookAuthorization(FacebookConnection):
'''
Methods for getting us an access token
There are several flows we must support
* js authentication flow (signed cookie)
* facebook app authentication flow (signed cookie)
* facebook oauth redirect (code param in url)
These 3 options need to be converted to an access token
Also handles several testing scenarios
* get app access token
* create test user
* get_or_create_test_user
'''
@classmethod
def convert_code(cls, code,
redirect_uri='http://local.mellowmorning.com:8000/facebook/connect/'):
'''
Turns a code into an access token
**Example**::
FacebookAuthorization.convert_code(code)
:param code:
The code to convert
:param redirect_uri:
The redirect uri with which the code was requested
:returns: dict
'''
kwargs = cls._client_info()
kwargs['code'] = code
kwargs['redirect_uri'] = redirect_uri
response = cls.request('oauth/access_token', **kwargs)
return response
@classmethod
def extend_access_token(cls, access_token):
'''
https://developers.facebook.com/roadmap/offline-access-removal/
We can extend the token only once per day
Normal short lived tokens last 1-2 hours
Long lived tokens (given by extending) last 60 days
**Example**::
FacebookAuthorization.extend_access_token(access_token)
:param access_token:
The access_token to extend
:returns: dict
'''
kwargs = cls._client_info()
kwargs['grant_type'] = 'fb_exchange_token'
kwargs['fb_exchange_token'] = access_token
response = cls.request('oauth/access_token', **kwargs)
return response
@classmethod
def _client_info(cls):
kwargs = dict(client_id=facebook_settings.FACEBOOK_APP_ID)
kwargs['client_secret'] = facebook_settings.FACEBOOK_APP_SECRET
return kwargs
@classmethod
def parse_signed_data(cls, signed_request,
secret=facebook_settings.FACEBOOK_APP_SECRET):
'''
Thanks to
http://stackoverflow.com/questions/3302946/how-to-base64-url-decode-in-python
and
http://sunilarora.org/parsing-signedrequest-parameter-in-python-bas
'''
from open_facebook.utils import base64_url_decode_php_style, smart_str
l = signed_request.split('.', 2)
encoded_sig = l[0]
payload = l[1]
from open_facebook.utils import json
sig = base64_url_decode_php_style(encoded_sig)
import hmac
import hashlib
data = json.loads(base64_url_decode_php_style(payload).decode('utf-8'))
algo = data.get('algorithm').upper()
if algo != 'HMAC-SHA256':
error_format = 'Unknown algorithm we only support HMAC-SHA256 user asked for %s'
error_message = error_format % algo
send_warning(error_message)
logger.error('Unknown algorithm')
return None
else:
expected_sig = hmac.new(smart_str(secret), msg=smart_str(payload),
digestmod=hashlib.sha256).digest()
if not sig == expected_sig:
error_format = 'Signature %s didnt match the expected signature %s'
error_message = error_format % (sig, expected_sig)
send_warning(error_message)
return None
else:
logger.debug('valid signed request received..')
return data
@classmethod
def get_app_access_token(cls):
'''
Get the access_token for the app that can be used for
insights and creating test users
application_id = retrieved from the developer page
application_secret = retrieved from the developer page
returns the application access_token
'''
kwargs = {
'grant_type': 'client_credentials',
'client_id': facebook_settings.FACEBOOK_APP_ID,
'client_secret': facebook_settings.FACEBOOK_APP_SECRET,
}
response = cls.request('oauth/access_token', **kwargs)
return response['access_token']
@memoized
@classmethod
def get_cached_app_access_token(cls):
'''
Caches the access token in memory, good for speeding up testing
'''
app_access_token = cls.get_app_access_token()
return app_access_token
@classmethod
def create_test_user(cls, app_access_token, permissions=None, name=None):
'''
Creates a test user with the given permissions and name
:param app_access_token:
The application's access token
:param permissions:
The list of permissions to request for the test user
:param name:
Optionally specify the name
'''
if not permissions:
permissions = ['read_stream', 'publish_stream',
'user_photos,offline_access']
if isinstance(permissions, list):
permissions = ','.join(permissions)
default_name = 'Permissions %s' % permissions.replace(
',', ' ').replace('_', '')
name = name or default_name
kwargs = {
'access_token': app_access_token,
'installed': True,
'name': name,
'method': 'post',
'permissions': permissions,
}
path = '%s/accounts/test-users' % facebook_settings.FACEBOOK_APP_ID
# add the test user data to the test user data class
test_user_data = cls.request(path, **kwargs)
test_user_data['name'] = name
test_user = TestUser(test_user_data)
return test_user
@classmethod
def get_or_create_test_user(cls, app_access_token, name=None, permissions=None, force_create=False):
'''
There is no supported way of get or creating a test user
However
- creating a test user takes around 5s
- you an only create 500 test users
So this slows your testing flow quite a bit.
This method checks your test users
Queries their names (stores the permissions in the name)
'''
if not permissions:
permissions = ['read_stream', 'publish_stream', 'publish_actions',
'user_photos,offline_access']
if isinstance(permissions, list):
permissions = ','.join(permissions)
# hacking the permissions into the name of the test user
default_name = 'Permissions %s' % permissions.replace(
',', ' ').replace('_', '')
name = name or default_name
# retrieve all test users
test_users = cls.get_test_users(app_access_token)
user_id_dict = dict([(int(u['id']), u) for u in test_users])
user_ids = map(str, user_id_dict.keys())
# use fql to figure out their names
facebook = OpenFacebook(app_access_token)
users = facebook.fql('SELECT uid, name FROM user WHERE uid in (%s)' %
','.join(user_ids))
users_dict = dict([(u['name'], u['uid']) for u in users])
user_id = users_dict.get(name)
if force_create and user_id:
# we need the users access_token, the app access token doesn't
# always work, seems to be a bug in the Facebook api
test_user_data = user_id_dict[user_id]
cls.delete_test_user(test_user_data['access_token'], user_id)
user_id = None
if user_id:
# we found our user, extend the data a bit
test_user_data = user_id_dict[user_id]
test_user_data['name'] = name
test_user = TestUser(test_user_data)
else:
# create the user
test_user = cls.create_test_user(
app_access_token, permissions, name)
return test_user
@classmethod
def get_test_users(cls, app_access_token):
kwargs = dict(access_token=app_access_token)
path = '%s/accounts/test-users' % facebook_settings.FACEBOOK_APP_ID
# retrieve all test users
response = cls.request(path, **kwargs)
test_users = response['data']
return test_users
@classmethod
def delete_test_user(cls, app_access_token, test_user_id):
kwargs = dict(access_token=app_access_token, method='delete')
path = '%s/' % test_user_id
# retrieve all test users
response = cls.request(path, **kwargs)
return response
@classmethod
def delete_test_users(cls, app_access_token):
# retrieve all test users
test_users = cls.get_test_users(app_access_token)
test_user_ids = [u['id'] for u in test_users]
for test_user_id in test_user_ids:
cls.delete_test_user(app_access_token, test_user_id)
class OpenFacebook(FacebookConnection):
'''
The main api class, initialize using
**Example**::
graph = OpenFacebook(access_token)
print(graph.get('me'))
'''
def __init__(self, access_token=None, prefetched_data=None,
expires=None, current_user_id=None, version=None):
'''
:param access_token:
The facebook Access token
'''
self.access_token = access_token
# extra data coming from signed cookies
self.prefetched_data = prefetched_data
# store to enable detection for offline usage
self.expires = expires
# hook to store the current user id if representing the
# facebook connection to a logged in user :)
self.current_user_id = current_user_id
if version is None:
version = 'v1.0'
self.version = version
def __getstate__(self):
'''
Turns the object into something easy to serialize
'''
state = dict(
access_token=self.access_token,
prefetched_data=self.prefetched_data,
expires=self.expires,
)
return state
def __setstate__(self, state):
'''
Restores the object from the state dict
'''
self.access_token = state['access_token']
self.prefetched_data = state['prefetched_data']
self.expires = state['expires']
def is_authenticated(self):
'''
Ask facebook if we have access to the users data
:returns: bool
'''
try:
me = self.me()
except facebook_exceptions.OpenFacebookException as e:
if isinstance(e, facebook_exceptions.OAuthException):
raise
me = None
authenticated = bool(me)
return authenticated
def get(self, path, version=None, **kwargs):
'''
Make a Facebook API call
**Example**::
open_facebook.get('me')
open_facebook.get('me', fields='id,name')
:param path:
The path to use for making the API call
:returns: dict
'''
version = version or self.version
kwargs['version'] = version
response = self.request(path, **kwargs)
return response
def get_many(self, *ids, **kwargs):
'''
Make a batched Facebook API call
For multiple ids
**Example**::
open_facebook.get_many('me', 'starbucks')
open_facebook.get_many('me', 'starbucks', fields='id,name')
:param path:
The path to use for making the API call
:returns: dict
'''
kwargs['ids'] = ','.join(ids)
return self.request(**kwargs)
def set(self, path, params=None, version=None, **post_data):
'''
Write data to facebook
**Example**::
open_facebook.set('me/feed', message='testing open facebook')
:param path:
The path to use for making the API call
:param params:
A dictionary of get params
:param post_data:
The kwargs for posting to facebook
:returns: dict
'''
version = version or self.version
assert self.access_token, 'Write operations require an access token'
if not params:
params = {}
params['method'] = 'post'
params['version'] = version
response = self.request(path, post_data=post_data, **params)
return response
def delete(self, path, *args, **kwargs):
'''
Delete the given bit of data
**Example**::
graph.delete(12345)
:param path:
the id of the element to remove
'''
kwargs['method'] = 'delete'
self.request(path, *args, **kwargs)
def fql(self, query, **kwargs):
'''
Runs the specified query against the Facebook FQL API.
**Example**::
open_facebook.fql('SELECT name FROM user WHERE uid = me()')
:param query:
The query to execute
:param kwargs:
Extra options to send to facebook
:returns: dict
'''
kwargs['q'] = query
path = 'fql'
response = self.request(path, **kwargs)
# return only the data for backward compatability
return response['data']
def batch_fql(self, queries_dict):
'''
queries_dict a dict with the required queries
returns the query results in:
**Example**::
response = facebook.batch_fql({
name: 'SELECT uid, name, pic_square FROM user WHERE uid = me()',
rsvp: 'SELECT uid, rsvp_status FROM event_member WHERE eid=12345678',
})
# accessing the results
response['fql_results']['name']
response['fql_results']['rsvp']
:param queries_dict:
A dictiontary of queries to execute
:returns: dict
'''
query = json.dumps(queries_dict)
query_results = self.fql(query)
named_results = dict(
[(r['name'], r['fql_result_set']) for r in query_results])
return named_results
def me(self):
'''
Cached method of requesting information about me
'''
me = getattr(self, '_me', None)
if me is None:
# self._me = me = self.get('me')
self._me = me = self.get('me', fields="id,name,email,verified")
return me
def permissions(self):
'''
Shortcut for self.get('me/permissions') with some extra parsing
to turn it into a dictionary of booleans
:returns: dict
'''
permissions_dict = {}
try:
permissions = {}
permissions_response = self.get('me/permissions')
# determine whether we're dealing with 1.0 or 2.0+
for permission in permissions_response.get('data', []):
# graph api 2.0+, returns multiple dicts with keys 'status' and
# 'permission'
if any(value in ['granted', 'declined'] for value in permission.values()):
for perm in permissions_response['data']:
grant = perm.get('status') == 'granted'
name = perm.get('permission')
# just in case something goes sideways
if grant and name:
permissions_dict[name] = grant
# graph api 1.0, returns single dict as {permission: intval}
elif any(value in [0, 1, '0', '1'] for value in permission.values()):
permissions = permissions_response['data'][0]
permissions_dict = dict([(k, bool(int(v)))
for k, v in permissions.items()
if v == '1' or v == 1])
break
except facebook_exceptions.OAuthException:
pass
return permissions_dict
def has_permissions(self, required_permissions):
'''
Validate if all the required_permissions are currently given
by the user
**Example**::
open_facebook.has_permissions(['publish_actions','read_stream'])
:param required_permissions:
A list of required permissions
:returns: bool
'''
permissions_dict = self.permissions()
# see if we have all permissions
has_permissions = True
for permission in required_permissions:
if permission not in permissions_dict:
has_permissions = False
return has_permissions
def my_image_url(self, size='large'):
'''
Returns the image url from your profile
Shortcut for me/picture
:param size:
the type of the image to request, see facebook for available formats
:returns: string
'''
query_dict = QueryDict('', True)
query_dict['type'] = size
query_dict['access_token'] = self.access_token
url = '%sme/picture?%s' % (self.api_url, query_dict.urlencode())
return url
def request(self, path='', post_data=None, old_api=False, version=None, **params):
url = self.get_request_url(path=path, old_api=old_api, version=version,
**params)
logger.info('requesting url %s', url)
response = self._request(url, post_data)
return response
def get_request_url(self, path='', old_api=False, version=None, **params):
'''
Gets the url for the request.
'''
api_base_url = self.old_api_url if old_api else self.api_url
version = version or self.version
if getattr(self, 'access_token', None):
params['access_token'] = self.access_token
if api_base_url.endswith('/'):
api_base_url = api_base_url[:-1]
if path and path.startswith('/'):
path = path[1:]
url = '/'.join([api_base_url, version, path])
return '%s?%s' % (url, urlencode(params))
class TestUser(object):
'''
Simple wrapper around test users
'''
def __init__(self, data):
self.name = data['name']
self.id = data['id']
self.access_token = data['access_token']
self.data = data
def graph(self):
graph = OpenFacebook(self.access_token)
return graph
def __repr__(self):
return 'Test user %s' % self.name
| bsd-3-clause |
Kromey/roglick | roglick/dungeon/maps/conway_dungeon.py | 1 | 6436 | from roglick.dungeon.base import Map,Room,Tile
from roglick.dungeon import tiles
from roglick.lib import libtcod
class Cave(object):
def __init__(self):
self.cells = []
@property
def center(self):
if 0 >= self.size:
return None
sum_x = 0
sum_y = 0
for cell in self.cells:
sum_x += cell[0]
sum_y += cell[1]
return (int(sum_x/self.size), int(sum_y/self.size))
@property
def size(self):
return len(self.cells)
class ConwayDungeon(Map):
def __init__(self, width, height, random, *args, **kwargs):
super().__init__(width, height, random)
self.make_map(*args, **kwargs)
def make_map(self, open_prob=45, close_neighbors=4, visits=1, smoothing_passes=2, cave_min_size=10):
super().make_map()
self._caves = []
self._open_cells(open_prob)
for x in range(int(visits * self.width * self.height)):
self._visit_random_cell(close_neighbors)
self._smooth_cave(smoothing_passes)
self._find_caves(cave_min_size)
self._connect_caves()
def _open_cells(self, open_prob):
for x in range(self.width):
if x == 0 or x == self.width-1:
continue
for y in range(self.height):
if y == 0 or y == self.height-1:
continue
if self._random.get_int(0,100) < open_prob:
self.tiles[x][y] = Tile(**tiles.floor)
def _visit_random_cell(self, close_neighbors):
x = self._random.get_int(1, self.width-2)
y = self._random.get_int(1, self.height-2)
neighbors = self._count_neighbors(x, y)
if neighbors > close_neighbors:
self.tiles[x][y] = Tile(**tiles.wall)
else:
self.tiles[x][y] = Tile(**tiles.floor)
def _smooth_cave(self, smoothing_passes):
for i in range(smoothing_passes):
for x in range(self.width):
if x == 0 or x == self.width-1:
continue
for y in range(self.height):
if y == 0 or y == self.height-1:
continue
if not self.tiles[x][y].passable and self._count_orth_neighbors(x, y) <= 1:
self.tiles[x][y] = Tile(**tiles.floor)
for i in range(smoothing_passes):
for x in range(self.width):
if x == 0 or x == self.width-1:
continue
for y in range(self.height):
if y == 0 or y == self.height-1:
continue
if self.tiles[x][y].passable and self._count_orth_neighbors(x, y) >= 4:
self.tiles[x][y] = Tile(**tiles.wall)
def _find_caves(self, cave_min_size):
visited = [[False for y in range(self.height)]
for x in range(self.width)]
for x in range(self.width):
for y in range(self.height):
if visited[x][y]:
continue
if self.tiles[x][y].passable:
cave = self._map_cave(visited, x, y)
if cave.size < cave_min_size:
# Too small, fill it in
for cx,cy in cave.cells:
self.tiles[cx][cy] = Tile(**tiles.wall)
else:
self._caves.append(cave)
else:
visited[x][y] = True
def _connect_caves(self):
while len(self._caves) > 1:
cave = self._caves.pop()
cx,cy = cave.center
# Find the "nearest" cave to this one by comparing centers
other = self._caves[0]
ox,oy = other.center
d2 = self.distance_squared(cx, cy, ox, oy)
for i in range(1, len(self._caves)):
ox,oy = self._caves[i].center
i_d2 = self.distance_squared(cx, cy, ox, oy)
if i_d2 < d2:
other = self._caves[i]
d2 = i_d2
# Found nearest cave, find nearest points
# Not necessarily actually nearest two points; first we find the
# point in one cave nearest the other's center, then the point in
# the other nearest to that one.
x1,y1 = cave.center
x2,y2 = other.center
d2 = self.distance_squared(x1, y1, x2, y2)
for i_x2,i_y2 in other.cells:
i_d2 = self.distance_squared(x1, y1, i_x2, i_y2)
if i_d2 < d2:
x2,y2 = i_x2,i_y2
d2 = i_d2
for i_x1,i_y1 in cave.cells:
i_d2 = self.distance_squared(i_x1, i_y1, x2, y2)
if i_d2 < d2:
x1,y1 = i_x1,i_y1
d2 = i_d2
self.create_tunnel(x1, y1, x2, y2)
def _count_neighbors(self, x, y):
neighbors = 0
for dx in range(-1,2):
for dy in range(-1,2):
if dx or dy:
tx = x+dx
ty = y+dy
if tx >=0 and tx < self.width and ty >= 0 and ty < self.height:
if not self.tiles[tx][ty].passable:
neighbors += 1
return neighbors
def _count_orth_neighbors(self, x, y):
neighbors = 0
for dx in range(-1,2):
if dx:
tx = x+dx
ty = y
if tx >=0 and tx < self.width and ty >= 0 and ty < self.height:
if not self.tiles[tx][ty].passable:
neighbors += 1
for dy in range(-1,2):
if dy:
tx = x
ty = y+dy
if tx >=0 and tx < self.width and ty >= 0 and ty < self.height:
if not self.tiles[tx][ty].passable:
neighbors += 1
return neighbors
def _map_cave(self, visited, x, y):
if visited[x][y] or not self.tiles[x][y].passable:
raise ValueError("Cell ({x},{y}) not valid start for cave".format(
x=x, y=y))
cave = Cave()
for cell in self.flood_fill(x, y):
cave.cells.append(cell)
x,y = cell
visited[x][y] = True
return cave
| mit |
tumbl3w33d/ansible | lib/ansible/modules/network/aci/aci_rest.py | 13 | 14126 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_rest
short_description: Direct access to the Cisco APIC REST API
description:
- Enables the management of the Cisco ACI fabric through direct access to the Cisco APIC REST API.
- Thanks to the idempotent nature of the APIC, this module is idempotent and reports changes.
version_added: '2.4'
requirements:
- lxml (when using XML payload)
- xmljson >= 0.1.8 (when using XML payload)
- python 2.7+ (when using xmljson)
options:
method:
description:
- The HTTP method of the request.
- Using C(delete) is typically used for deleting objects.
- Using C(get) is typically used for querying objects.
- Using C(post) is typically used for modifying objects.
type: str
choices: [ delete, get, post ]
default: get
aliases: [ action ]
path:
description:
- URI being used to execute API calls.
- Must end in C(.xml) or C(.json).
type: str
required: yes
aliases: [ uri ]
content:
description:
- When used instead of C(src), sets the payload of the API request directly.
- This may be convenient to template simple requests.
- For anything complex use the C(template) lookup plugin (see examples)
or the M(template) module with parameter C(src).
type: raw
src:
description:
- Name of the absolute path of the filename that includes the body
of the HTTP request being sent to the ACI fabric.
- If you require a templated payload, use the C(content) parameter
together with the C(template) lookup plugin, or use M(template).
type: path
aliases: [ config_file ]
extends_documentation_fragment: aci
notes:
- Certain payloads are known not to be idempotent, so be careful when constructing payloads,
e.g. using C(status="created") will cause idempotency issues, use C(status="modified") instead.
More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- Certain payloads (and used paths) are known to report no changes happened when changes did happen.
This is a known APIC problem and has been reported to the vendor. A workaround for this issue exists.
More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- XML payloads require the C(lxml) and C(xmljson) python libraries. For JSON payloads nothing special is needed.
seealso:
- module: aci_tenant
- name: Cisco APIC REST API Configuration Guide
description: More information about the APIC REST API.
link: http://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/2-x/rest_cfg/2_1_x/b_Cisco_APIC_REST_API_Configuration_Guide.html
author:
- Dag Wieers (@dagwieers)
'''
EXAMPLES = r'''
- name: Add a tenant using certificate authentication
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/aci_config.xml
delegate_to: localhost
- name: Add a tenant from a templated payload file from templates/
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
content: "{{ lookup('template', 'aci/tenant.xml.j2') }}"
delegate_to: localhost
- name: Add a tenant using inline YAML
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/mo/uni.json
method: post
content:
fvTenant:
attributes:
name: Sales
descr: Sales department
delegate_to: localhost
- name: Add a tenant using a JSON string
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/mo/uni.json
method: post
content:
{
"fvTenant": {
"attributes": {
"name": "Sales",
"descr": "Sales department"
}
}
}
delegate_to: localhost
- name: Add a tenant using an XML string
aci_rest:
host: apic
username: admin
private_key: pki/{{ aci_username }}.key
validate_certs: no
path: /api/mo/uni.xml
method: post
content: '<fvTenant name="Sales" descr="Sales departement"/>'
delegate_to: localhost
- name: Get tenants using password authentication
aci_rest:
host: apic
username: admin
password: SomeSecretPassword
method: get
path: /api/node/class/fvTenant.json
delegate_to: localhost
register: query_result
- name: Configure contracts
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/contract_config.xml
delegate_to: localhost
- name: Register leaves and spines
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
method: post
path: /api/mo/uni/controller/nodeidentpol.xml
content: |
<fabricNodeIdentPol>
<fabricNodeIdentP name="{{ item.name }}" nodeId="{{ item.nodeid }}" status="{{ item.status }}" serial="{{ item.serial }}"/>
</fabricNodeIdentPol>
with_items:
- '{{ apic_leavesspines }}'
delegate_to: localhost
- name: Wait for all controllers to become ready
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/node/class/topSystem.json?query-target-filter=eq(topSystem.role,"controller")
register: apics
until: "'totalCount' in apics and apics.totalCount|int >= groups['apic']|count"
retries: 120
delay: 30
delegate_to: localhost
run_once: yes
'''
RETURN = r'''
error_code:
description: The REST ACI return code, useful for troubleshooting on failure
returned: always
type: int
sample: 122
error_text:
description: The REST ACI descriptive text, useful for troubleshooting on failure
returned: always
type: str
sample: unknown managed object class foo
imdata:
description: Converted output returned by the APIC REST (register this for post-processing)
returned: always
type: str
sample: [{"error": {"attributes": {"code": "122", "text": "unknown managed object class foo"}}}]
payload:
description: The (templated) payload send to the APIC REST API (xml or json)
returned: always
type: str
sample: '<foo bar="boo"/>'
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
response:
description: HTTP response string
returned: always
type: str
sample: 'HTTP Error 400: Bad Request'
status:
description: HTTP status code
returned: always
type: int
sample: 400
totalCount:
description: Number of items in the imdata array
returned: always
type: str
sample: '0'
url:
description: URL used for APIC REST call
returned: success
type: str
sample: https://1.2.3.4/api/mo/uni/tn-[Dag].json?rsp-subtree=modified
'''
import json
import os
try:
from ansible.module_utils.six.moves.urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
HAS_URLPARSE = True
except Exception:
HAS_URLPARSE = False
# Optional, only used for XML payload
try:
import lxml.etree # noqa
HAS_LXML_ETREE = True
except ImportError:
HAS_LXML_ETREE = False
# Optional, only used for XML payload
try:
from xmljson import cobra # noqa
HAS_XMLJSON_COBRA = True
except ImportError:
HAS_XMLJSON_COBRA = False
# Optional, only used for YAML validation
try:
import yaml
HAS_YAML = True
except Exception:
HAS_YAML = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_text
def update_qsl(url, params):
''' Add or update a URL query string '''
if HAS_URLPARSE:
url_parts = list(urlparse(url))
query = dict(parse_qsl(url_parts[4]))
query.update(params)
url_parts[4] = urlencode(query)
return urlunparse(url_parts)
elif '?' in url:
return url + '&' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
else:
return url + '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
class ACIRESTModule(ACIModule):
def changed(self, d):
''' Check ACI response for changes '''
if isinstance(d, dict):
for k, v in d.items():
if k == 'status' and v in ('created', 'modified', 'deleted'):
return True
elif self.changed(v) is True:
return True
elif isinstance(d, list):
for i in d:
if self.changed(i) is True:
return True
return False
def response_type(self, rawoutput, rest_type='xml'):
''' Handle APIC response output '''
if rest_type == 'json':
self.response_json(rawoutput)
else:
self.response_xml(rawoutput)
# Use APICs built-in idempotency
if HAS_URLPARSE:
self.result['changed'] = self.changed(self.imdata)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
path=dict(type='str', required=True, aliases=['uri']),
method=dict(type='str', default='get', choices=['delete', 'get', 'post'], aliases=['action']),
src=dict(type='path', aliases=['config_file']),
content=dict(type='raw'),
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['content', 'src']],
)
content = module.params.get('content')
path = module.params.get('path')
src = module.params.get('src')
# Report missing file
file_exists = False
if src:
if os.path.isfile(src):
file_exists = True
else:
module.fail_json(msg="Cannot find/access src '%s'" % src)
# Find request type
if path.find('.xml') != -1:
rest_type = 'xml'
if not HAS_LXML_ETREE:
module.fail_json(msg='The lxml python library is missing, or lacks etree support.')
if not HAS_XMLJSON_COBRA:
module.fail_json(msg='The xmljson python library is missing, or lacks cobra support.')
elif path.find('.json') != -1:
rest_type = 'json'
else:
module.fail_json(msg='Failed to find REST API payload type (neither .xml nor .json).')
aci = ACIRESTModule(module)
aci.result['status'] = -1 # Ensure we always return a status
# We include the payload as it may be templated
payload = content
if file_exists:
with open(src, 'r') as config_object:
# TODO: Would be nice to template this, requires action-plugin
payload = config_object.read()
# Validate payload
if rest_type == 'json':
if content and isinstance(content, dict):
# Validate inline YAML/JSON
payload = json.dumps(payload)
elif payload and isinstance(payload, str) and HAS_YAML:
try:
# Validate YAML/JSON string
payload = json.dumps(yaml.safe_load(payload))
except Exception as e:
module.fail_json(msg='Failed to parse provided JSON/YAML payload: %s' % to_text(e), exception=to_text(e), payload=payload)
elif rest_type == 'xml' and HAS_LXML_ETREE:
if content and isinstance(content, dict) and HAS_XMLJSON_COBRA:
# Validate inline YAML/JSON
# FIXME: Converting from a dictionary to XML is unsupported at this time
# payload = etree.tostring(payload)
pass
elif payload and isinstance(payload, str):
try:
# Validate XML string
payload = lxml.etree.tostring(lxml.etree.fromstring(payload))
except Exception as e:
module.fail_json(msg='Failed to parse provided XML payload: %s' % to_text(e), payload=payload)
# Perform actual request using auth cookie (Same as aci.request(), but also supports XML)
if 'port' in aci.params and aci.params.get('port') is not None:
aci.url = '%(protocol)s://%(host)s:%(port)s/' % aci.params + path.lstrip('/')
else:
aci.url = '%(protocol)s://%(host)s/' % aci.params + path.lstrip('/')
if aci.params.get('method') != 'get':
path += '?rsp-subtree=modified'
aci.url = update_qsl(aci.url, {'rsp-subtree': 'modified'})
# Sign and encode request as to APIC's wishes
if aci.params.get('private_key') is not None:
aci.cert_auth(path=path, payload=payload)
aci.method = aci.params.get('method').upper()
# Perform request
resp, info = fetch_url(module, aci.url,
data=payload,
headers=aci.headers,
method=aci.method,
timeout=aci.params.get('timeout'),
use_proxy=aci.params.get('use_proxy'))
aci.response = info.get('msg')
aci.status = info.get('status')
# Report failure
if info.get('status') != 200:
try:
# APIC error
aci.response_type(info.get('body'), rest_type)
aci.fail_json(msg='APIC Error %(code)s: %(text)s' % aci.error)
except KeyError:
# Connection error
aci.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
aci.response_type(resp.read(), rest_type)
aci.result['imdata'] = aci.imdata
aci.result['totalCount'] = aci.totalCount
# Report success
aci.exit_json(**aci.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
hengqujushi/shadowsocks | shadowsocks/asyncdns.py | 655 | 17416 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_IPV4 = 0
STATUS_IPV6 = 1
class DNSResolver(object):
def __init__(self, server_list=None):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if line:
if line.startswith(b'nameserver'):
parts = line.split()
if len(parts) >= 2:
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) >= 2:
ip = parts[0]
if common.is_ip(ip):
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_IPV6) \
== STATUS_IPV4:
self._hostname_status[hostname] = STATUS_IPV6
self._send_req(hostname, QTYPE_AAAA)
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) == STATUS_IPV6:
for question in response.questions:
if question[1] == QTYPE_AAAA:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_IPV4
self._send_req(hostname, QTYPE_A)
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, QTYPE_A)
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&[email protected]', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
| apache-2.0 |
sanjayankur31/nest-simulator | pynest/examples/balancedneuron.py | 8 | 7344 | # -*- coding: utf-8 -*-
#
# balancedneuron.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Balanced neuron example
-----------------------
This script simulates a neuron driven by an excitatory and an
inhibitory population of neurons firing Poisson spike trains. The aim
is to find a firing rate for the inhibitory population that will make
the neuron fire at the same rate as the excitatory population.
Optimization is performed using the ``bisection`` method from Scipy,
simulating the network repeatedly.
This example is also shown in the article [1]_
References
~~~~~~~~~~
.. [1] Eppler JM, Helias M, Mulller E, Diesmann M, Gewaltig MO (2009). PyNEST: A convenient interface to the NEST
simulator, Front. Neuroinform.
http://dx.doi.org/10.3389/neuro.11.012.2008
"""
###############################################################################
# First, we import all necessary modules for simulation, analysis and
# plotting. Scipy should be imported before nest.
from scipy.optimize import bisect
import nest
import nest.voltage_trace
import matplotlib.pyplot as plt
###############################################################################
# Additionally, we set the verbosity using ``set_verbosity`` to
# suppress info messages.
nest.set_verbosity("M_WARNING")
nest.ResetKernel()
###############################################################################
# Second, the simulation parameters are assigned to variables.
t_sim = 25000.0 # how long we simulate
n_ex = 16000 # size of the excitatory population
n_in = 4000 # size of the inhibitory population
r_ex = 5.0 # mean rate of the excitatory population
r_in = 20.5 # initial rate of the inhibitory population
epsc = 45.0 # peak amplitude of excitatory synaptic currents
ipsc = -45.0 # peak amplitude of inhibitory synaptic currents
d = 1.0 # synaptic delay
lower = 15.0 # lower bound of the search interval
upper = 25.0 # upper bound of the search interval
prec = 0.01 # how close need the excitatory rates be
###############################################################################
# Third, the nodes are created using ``Create``. We store the returned
# handles in variables for later reference.
neuron = nest.Create("iaf_psc_alpha")
noise = nest.Create("poisson_generator", 2)
voltmeter = nest.Create("voltmeter")
spikerecorder = nest.Create("spike_recorder")
###################################################################################
# Fourth, the ``poisson_generator`` (`noise`) is configured.
# Note that we need not set parameters for the neuron, the spike recorder, and
# the voltmeter, since they have satisfactory defaults.
noise.rate = [n_ex * r_ex, n_in * r_in]
###############################################################################
# Fifth, the ``iaf_psc_alpha`` is connected to the ``spike_recorder`` and the
# ``voltmeter``, as are the two Poisson generators to the neuron. The command
# ``Connect`` has different variants. Plain `Connect` just takes the handles of
# pre- and postsynaptic nodes and uses the default values for weight and
# delay. It can also be called with a list of weights, as in the connection
# of the noise below.
# Note that the connection direction for the ``voltmeter`` is reversed compared
# to the ``spike_recorder``, because it observes the neuron instead of
# receiving events from it. Thus, ``Connect`` reflects the direction of signal
# flow in the simulation kernel rather than the physical process of inserting
# an electrode into the neuron. The latter semantics is presently not
# available in NEST.
nest.Connect(neuron, spikerecorder)
nest.Connect(voltmeter, neuron)
nest.Connect(noise, neuron, syn_spec={'weight': [[epsc, ipsc]], 'delay': 1.0})
###############################################################################
# To determine the optimal rate of the neurons in the inhibitory population,
# the network is simulated several times for different values of the
# inhibitory rate while measuring the rate of the target neuron. This is done
# by calling ``Simulate`` until the rate of the target neuron matches the rate
# of the neurons in the excitatory population with a certain accuracy. The
# algorithm is implemented in two steps:
#
# First, the function ``output_rate`` is defined to measure the firing rate
# of the target neuron for a given rate of the inhibitory neurons.
def output_rate(guess):
print("Inhibitory rate estimate: %5.2f Hz" % guess)
rate = float(abs(n_in * guess))
noise[1].rate = rate
spikerecorder.n_events = 0
nest.Simulate(t_sim)
out = spikerecorder.n_events * 1000.0 / t_sim
print(" -> Neuron rate: %6.2f Hz (goal: %4.2f Hz)" % (out, r_ex))
return out
###############################################################################
# The function takes the firing rate of the inhibitory neurons as an
# argument. It scales the rate with the size of the inhibitory population and
# configures the inhibitory Poisson generator (`noise[1]`) accordingly.
# Then, the spike counter of the ``spike_recorder`` is reset to zero. The
# network is simulated using ``Simulate``, which takes the desired simulation
# time in milliseconds and advances the network state by this amount of time.
# During simulation, the ``spike_recorder`` counts the spikes of the target
# neuron and the total number is read out at the end of the simulation
# period. The return value of ``output_rate()`` is the firing rate of the
# target neuron in Hz.
#
# Second, the scipy function ``bisect`` is used to determine the optimal
# firing rate of the neurons of the inhibitory population.
in_rate = bisect(lambda x: output_rate(x) - r_ex, lower, upper, xtol=prec)
print("Optimal rate for the inhibitory population: %.2f Hz" % in_rate)
###############################################################################
# The function ``bisect`` takes four arguments: first a function whose
# zero crossing is to be determined. Here, the firing rate of the target
# neuron should equal the firing rate of the neurons of the excitatory
# population. Thus we define an anonymous function (using `lambda`) that
# returns the difference between the actual rate of the target neuron and the
# rate of the excitatory Poisson generator, given a rate for the inhibitory
# neurons. The next two arguments are the lower and upper bound of the
# interval in which to search for the zero crossing. The fourth argument of
# ``bisect`` is the desired relative precision of the zero crossing.
#
# Finally, we plot the target neuron's membrane potential as a function of
# time.
nest.voltage_trace.from_device(voltmeter)
plt.show()
| gpl-2.0 |
memkeytm/p2pool | p2pool/test/test_node.py | 198 | 10503 | from __future__ import division
import base64
import random
import tempfile
from twisted.internet import defer, reactor
from twisted.python import failure
from twisted.trial import unittest
from twisted.web import client, resource, server
from p2pool import data, node, work
from p2pool.bitcoin import data as bitcoin_data, networks, worker_interface
from p2pool.util import deferral, jsonrpc, math, variable
class bitcoind(object): # can be used as p2p factory, p2p protocol, or rpc jsonrpc proxy
def __init__(self):
self.blocks = [0x000000000000016c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89]
self.headers = {0x16c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89: {
'nonce': 1853158954,
'timestamp': 1351658517,
'merkle_root': 2282849479936278423916707524932131168473430114569971665822757638339486597658L,
'version': 1,
'previous_block': 1048610514577342396345362905164852351970507722694242579238530L,
'bits': bitcoin_data.FloatingInteger(bits=0x1a0513c5, target=0x513c50000000000000000000000000000000000000000000000L),
}}
self.conn = variable.Variable(self)
self.new_headers = variable.Event()
self.new_block = variable.Event()
self.new_tx = variable.Event()
# p2p factory
def getProtocol(self):
return self
# p2p protocol
def send_block(self, block):
pass
def send_tx(self, tx):
pass
def get_block_header(self, block_hash):
return self.headers[block_hash]
# rpc jsonrpc proxy
def rpc_help(self):
return '\ngetblock '
def rpc_getblock(self, block_hash_hex):
block_hash = int(block_hash_hex, 16)
return dict(height=self.blocks.index(block_hash))
def __getattr__(self, name):
if name.startswith('rpc_'):
return lambda *args, **kwargs: failure.Failure(jsonrpc.Error_for_code(-32601)('Method not found'))
def rpc_getblocktemplate(self, param):
if param['mode'] == 'template':
pass
elif param['mode'] == 'submit':
result = param['data']
block = bitcoin_data.block_type.unpack(result.decode('hex'))
if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum(tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000:
print 'invalid fee'
if block['header']['previous_block'] != self.blocks[-1]:
return False
if bitcoin_data.hash256(result.decode('hex')) > block['header']['bits'].target:
return False
header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
self.blocks.append(header_hash)
self.headers[header_hash] = block['header']
reactor.callLater(0, self.new_block.happened)
return True
else:
raise jsonrpc.Error_for_code(-1)('invalid request')
txs = []
for i in xrange(100):
fee = i
txs.append(dict(
data=bitcoin_data.tx_type.pack(dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!'*100)], lock_time=0)).encode('hex'),
fee=fee,
))
return {
"version" : 2,
"previousblockhash" : '%064x' % (self.blocks[-1],),
"transactions" : txs,
"coinbaseaux" : {
"flags" : "062f503253482f"
},
"coinbasevalue" : 5000000000 + sum(tx['fee'] for tx in txs),
"target" : "0000000000000513c50000000000000000000000000000000000000000000000",
"mintime" : 1351655621,
"mutable" : [
"time",
"transactions",
"prevblock"
],
"noncerange" : "00000000ffffffff",
"sigoplimit" : 20000,
"sizelimit" : 1000000,
"curtime" : 1351659940,
"bits" : "21008000",
"height" : len(self.blocks),
}
@apply
class mm_provider(object):
def __getattr__(self, name):
print '>>>>>>>', name
def rpc_getauxblock(self, request, result1=None, result2=None):
if result1 is not None:
print result1, result2
return True
return {
"target" : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", # 2**256*2/3
"hash" : "2756ea0315d46dc3d8d974f34380873fc88863845ac01a658ef11bc3b368af52",
"chainid" : 1
}
mynet = math.Object(
NAME='mynet',
PARENT=networks.nets['litecoin_testnet'],
SHARE_PERIOD=5, # seconds
CHAIN_LENGTH=20*60//3, # shares
REAL_CHAIN_LENGTH=20*60//3, # shares
TARGET_LOOKBEHIND=200, # shares
SPREAD=3, # blocks
IDENTIFIER='cca5e24ec6408b1e'.decode('hex'),
PREFIX='ad9614f6466a39cf'.decode('hex'),
P2P_PORT=19338,
MIN_TARGET=2**256 - 1,
MAX_TARGET=2**256 - 1,
PERSIST=False,
WORKER_PORT=19327,
BOOTSTRAP_ADDRS='72.14.191.28'.split(' '),
ANNOUNCE_CHANNEL='#p2pool-alt',
VERSION_CHECK=lambda v: True,
)
class MiniNode(object):
@classmethod
@defer.inlineCallbacks
def start(cls, net, factory, bitcoind, peer_ports, merged_urls):
self = cls()
self.n = node.Node(factory, bitcoind, [], [], net)
yield self.n.start()
self.n.p2p_node = node.P2PNode(self.n, port=0, max_incoming_conns=1000000, addr_store={}, connect_addrs=[('127.0.0.1', peer_port) for peer_port in peer_ports])
self.n.p2p_node.start()
wb = work.WorkerBridge(node=self.n, my_pubkey_hash=random.randrange(2**160), donation_percentage=random.uniform(0, 10), merged_urls=merged_urls, worker_fee=3)
self.wb = wb
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
self.web_port = reactor.listenTCP(0, server.Site(web_root))
defer.returnValue(self)
@defer.inlineCallbacks
def stop(self):
yield self.web_port.stopListening()
yield self.n.p2p_node.stop()
yield self.n.stop()
del self.web_port, self.n
class Test(unittest.TestCase):
@defer.inlineCallbacks
def test_node(self):
bitd = bitcoind()
mm_root = resource.Resource()
mm_root.putChild('', jsonrpc.HTTPServer(mm_provider))
mm_port = reactor.listenTCP(0, server.Site(mm_root))
n = node.Node(bitd, bitd, [], [], mynet)
yield n.start()
wb = work.WorkerBridge(node=n, my_pubkey_hash=42, donation_percentage=2, merged_urls=[('http://127.0.0.1:%i' % (mm_port.getHost().port,), '')], worker_fee=3)
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
port = reactor.listenTCP(0, server.Site(web_root))
proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(port.getHost().port),
headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password')))
yield deferral.sleep(3)
for i in xrange(100):
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
yield deferral.sleep(3)
assert len(n.tracker.items) == 100
assert n.tracker.verified.get_height(n.best_share_var.value) == 100
wb.stop()
n.stop()
yield port.stopListening()
del n, wb, web_root, port, proxy
import gc
gc.collect()
gc.collect()
gc.collect()
yield deferral.sleep(20) # waiting for work_poller to exit
yield mm_port.stopListening()
#test_node.timeout = 15
@defer.inlineCallbacks
def test_nodes(self):
N = 3
SHARES = 600
bitd = bitcoind()
nodes = []
for i in xrange(N):
nodes.append((yield MiniNode.start(mynet, bitd, bitd, [mn.n.p2p_node.serverfactory.listen_port.getHost().port for mn in nodes], [])))
yield deferral.sleep(3)
for i in xrange(SHARES):
proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port),
headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password')))
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
yield deferral.sleep(.05)
print i
print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value])
# crawl web pages
from p2pool import web
stop_event = variable.Event()
web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable(None), stop_event)
web2_port = reactor.listenTCP(0, server.Site(web2_root))
for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]:
if name in ['web/graph_data', 'web/share', 'web/share_data']: continue
print
print name
try:
res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name))
except:
import traceback
traceback.print_exc()
else:
print repr(res)[:100]
print
yield web2_port.stopListening()
stop_event.happened()
del web2_root
yield deferral.sleep(3)
for i, n in enumerate(nodes):
assert len(n.n.tracker.items) == SHARES, (i, len(n.n.tracker.items))
assert n.n.tracker.verified.get_height(n.n.best_share_var.value) == SHARES, (i, n.n.tracker.verified.get_height(n.n.best_share_var.value))
assert type(n.n.tracker.items[nodes[0].n.best_share_var.value]) is (data.Share.SUCCESSOR if data.Share.SUCCESSOR is not None else data.Share)
assert type(n.n.tracker.items[n.n.tracker.get_nth_parent_hash(nodes[0].n.best_share_var.value, SHARES - 5)]) is data.Share
for n in nodes:
yield n.stop()
del nodes, n
import gc
gc.collect()
gc.collect()
gc.collect()
yield deferral.sleep(20) # waiting for work_poller to exit
test_nodes.timeout = 300
| gpl-3.0 |
dulems/hue | desktop/core/src/desktop/lib/view_util.py | 38 | 2507 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for views (text and number formatting, etc)"""
import math
import datetime
def big_filesizeformat(bytes):
if bytes is None or bytes is "":
return "N/A"
assert bytes >= 0
# Special case small numbers (including 0), because they're exact.
if bytes < 1024:
return "%d B" % bytes
units = ["B", "KB", "MB", "GB", "TB", "PB"]
index = int(math.floor(math.log(bytes, 1024)))
index = min(len(units) - 1, index)
return( "%.1f %s" % (bytes / math.pow(1024, index), units[index]) )
def format_time_diff(start=None, end=None):
"""
formats the difference between two times as Xd:Xh:Xm:Xs
"""
if (end is None):
end = datetime.datetime.now()
diff = end - start
minutes, seconds = divmod(diff.seconds, 60)
hours, minutes = divmod(minutes, 60)
days = diff.days
output = []
written = False
if days:
written = True
output.append("%dd" % days)
if written or hours:
written = True
output.append("%dh" % hours)
if written or minutes:
output.append("%dm" % minutes)
output.append("%ds" % seconds)
return ":".join(output)
def format_duration_in_millis(duration=0):
"""
formats the difference between two times in millis as Xd:Xh:Xm:Xs
"""
seconds, millis = divmod(duration, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
output = []
written = False
if days:
written = True
output.append("%dd" % days)
if written or hours:
written = True
output.append("%dh" % hours)
if written or minutes:
output.append("%dm" % minutes)
output.append("%ds" % seconds)
return ":".join(output)
| apache-2.0 |
malayaleecoder/servo | tests/wpt/harness/wptrunner/metadata.py | 78 | 12836 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shutil
import sys
import tempfile
import types
import uuid
from collections import defaultdict
from mozlog import reader
from mozlog import structuredlog
import expected
import manifestupdate
import testloader
import wptmanifest
import wpttest
from vcs import git
manifest = None # Module that will be imported relative to test_root
logger = structuredlog.StructuredLogger("web-platform-tests")
def load_test_manifests(serve_root, test_paths):
do_delayed_imports(serve_root)
manifest_loader = testloader.ManifestLoader(test_paths, False)
return manifest_loader.load()
def update_expected(test_paths, serve_root, log_file_names,
rev_old=None, rev_new="HEAD", ignore_existing=False,
sync_root=None, property_order=None, boolean_properties=None):
"""Update the metadata files for web-platform-tests based on
the results obtained in a previous run"""
manifests = load_test_manifests(serve_root, test_paths)
change_data = {}
if sync_root is not None:
if rev_old is not None:
rev_old = git("rev-parse", rev_old, repo=sync_root).strip()
rev_new = git("rev-parse", rev_new, repo=sync_root).strip()
if rev_old is not None:
change_data = load_change_data(rev_old, rev_new, repo=sync_root)
expected_map_by_manifest = update_from_logs(manifests,
*log_file_names,
ignore_existing=ignore_existing,
property_order=property_order,
boolean_properties=boolean_properties)
for test_manifest, expected_map in expected_map_by_manifest.iteritems():
url_base = manifests[test_manifest]["url_base"]
metadata_path = test_paths[url_base]["metadata_path"]
write_changes(metadata_path, expected_map)
results_changed = [item.test_path for item in expected_map.itervalues() if item.modified]
return unexpected_changes(manifests, change_data, results_changed)
def do_delayed_imports(serve_root):
global manifest
from manifest import manifest
def files_in_repo(repo_root):
return git("ls-tree", "-r", "--name-only", "HEAD").split("\n")
def rev_range(rev_old, rev_new, symmetric=False):
joiner = ".." if not symmetric else "..."
return "".join([rev_old, joiner, rev_new])
def paths_changed(rev_old, rev_new, repo):
data = git("diff", "--name-status", rev_range(rev_old, rev_new), repo=repo)
lines = [tuple(item.strip() for item in line.strip().split("\t", 1))
for line in data.split("\n") if line.strip()]
output = set(lines)
return output
def load_change_data(rev_old, rev_new, repo):
changes = paths_changed(rev_old, rev_new, repo)
rv = {}
status_keys = {"M": "modified",
"A": "new",
"D": "deleted"}
# TODO: deal with renames
for item in changes:
rv[item[1]] = status_keys[item[0]]
return rv
def unexpected_changes(manifests, change_data, files_changed):
files_changed = set(files_changed)
root_manifest = None
for manifest, paths in manifests.iteritems():
if paths["url_base"] == "/":
root_manifest = manifest
break
else:
return []
rv = []
return [fn for fn, tests in root_manifest if fn in files_changed and change_data.get(fn) != "M"]
# For each testrun
# Load all files and scan for the suite_start entry
# Build a hash of filename: properties
# For each different set of properties, gather all chunks
# For each chunk in the set of chunks, go through all tests
# for each test, make a map of {conditionals: [(platform, new_value)]}
# Repeat for each platform
# For each test in the list of tests:
# for each conditional:
# If all the new values match (or there aren't any) retain that conditional
# If any new values mismatch mark the test as needing human attention
# Check if all the RHS values are the same; if so collapse the conditionals
def update_from_logs(manifests, *log_filenames, **kwargs):
ignore_existing = kwargs.get("ignore_existing", False)
property_order = kwargs.get("property_order")
boolean_properties = kwargs.get("boolean_properties")
expected_map = {}
id_test_map = {}
for test_manifest, paths in manifests.iteritems():
expected_map_manifest, id_path_map_manifest = create_test_tree(
paths["metadata_path"],
test_manifest,
property_order=property_order,
boolean_properties=boolean_properties)
expected_map[test_manifest] = expected_map_manifest
id_test_map.update(id_path_map_manifest)
updater = ExpectedUpdater(manifests, expected_map, id_test_map,
ignore_existing=ignore_existing)
for log_filename in log_filenames:
with open(log_filename) as f:
updater.update_from_log(f)
for manifest_expected in expected_map.itervalues():
for tree in manifest_expected.itervalues():
for test in tree.iterchildren():
for subtest in test.iterchildren():
subtest.coalesce_expected()
test.coalesce_expected()
return expected_map
def directory_manifests(metadata_path):
rv = []
for dirpath, dirname, filenames in os.walk(metadata_path):
if "__dir__.ini" in filenames:
rel_path = os.path.relpath(dirpath, metadata_path)
rv.append(os.path.join(rel_path, "__dir__.ini"))
return rv
def write_changes(metadata_path, expected_map):
# First write the new manifest files to a temporary directory
temp_path = tempfile.mkdtemp(dir=os.path.split(metadata_path)[0])
write_new_expected(temp_path, expected_map)
# Keep all __dir__.ini files (these are not in expected_map because they
# aren't associated with a specific test)
keep_files = directory_manifests(metadata_path)
# Copy all files in the root to the temporary location since
# these cannot be ini files
keep_files.extend(item for item in os.listdir(metadata_path) if
not os.path.isdir(os.path.join(metadata_path, item)))
for item in keep_files:
dest_dir = os.path.dirname(os.path.join(temp_path, item))
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(os.path.join(metadata_path, item),
os.path.join(temp_path, item))
# Then move the old manifest files to a new location
temp_path_2 = metadata_path + str(uuid.uuid4())
os.rename(metadata_path, temp_path_2)
# Move the new files to the destination location and remove the old files
os.rename(temp_path, metadata_path)
shutil.rmtree(temp_path_2)
def write_new_expected(metadata_path, expected_map):
# Serialize the data back to a file
for tree in expected_map.itervalues():
if not tree.is_empty:
manifest_str = wptmanifest.serialize(tree.node, skip_empty_data=True)
assert manifest_str != ""
path = expected.expected_path(metadata_path, tree.test_path)
dir = os.path.split(path)[0]
if not os.path.exists(dir):
os.makedirs(dir)
with open(path, "w") as f:
f.write(manifest_str)
class ExpectedUpdater(object):
def __init__(self, test_manifests, expected_tree, id_path_map, ignore_existing=False):
self.test_manifests = test_manifests
self.expected_tree = expected_tree
self.id_path_map = id_path_map
self.ignore_existing = ignore_existing
self.run_info = None
self.action_map = {"suite_start": self.suite_start,
"test_start": self.test_start,
"test_status": self.test_status,
"test_end": self.test_end}
self.tests_visited = {}
self.test_cache = {}
def update_from_log(self, log_file):
self.run_info = None
log_reader = reader.read(log_file)
reader.each_log(log_reader, self.action_map)
def suite_start(self, data):
self.run_info = data["run_info"]
def test_id(self, id):
if type(id) in types.StringTypes:
return id
else:
return tuple(id)
def test_start(self, data):
test_id = self.test_id(data["test"])
try:
test_manifest, test = self.id_path_map[test_id]
expected_node = self.expected_tree[test_manifest][test].get_test(test_id)
except KeyError:
print "Test not found %s, skipping" % test_id
return
self.test_cache[test_id] = expected_node
if test_id not in self.tests_visited:
if self.ignore_existing:
expected_node.clear_expected()
self.tests_visited[test_id] = set()
def test_status(self, data):
test_id = self.test_id(data["test"])
test = self.test_cache.get(test_id)
if test is None:
return
test_cls = wpttest.manifest_test_cls[test.test_type]
subtest = test.get_subtest(data["subtest"])
self.tests_visited[test.id].add(data["subtest"])
result = test_cls.subtest_result_cls(
data["subtest"],
data["status"],
data.get("message"))
subtest.set_result(self.run_info, result)
def test_end(self, data):
test_id = self.test_id(data["test"])
test = self.test_cache.get(test_id)
if test is None:
return
test_cls = wpttest.manifest_test_cls[test.test_type]
if data["status"] == "SKIP":
return
result = test_cls.result_cls(
data["status"],
data.get("message"))
test.set_result(self.run_info, result)
del self.test_cache[test_id]
def create_test_tree(metadata_path, test_manifest, property_order=None,
boolean_properties=None):
expected_map = {}
id_test_map = {}
exclude_types = frozenset(["stub", "helper", "manual"])
include_types = set(manifest.item_types) - exclude_types
for test_path, tests in test_manifest.itertypes(*include_types):
expected_data = load_expected(test_manifest, metadata_path, test_path, tests,
property_order=property_order,
boolean_properties=boolean_properties)
if expected_data is None:
expected_data = create_expected(test_manifest,
test_path,
tests,
property_order=property_order,
boolean_properties=boolean_properties)
for test in tests:
id_test_map[test.id] = (test_manifest, test)
expected_map[test] = expected_data
return expected_map, id_test_map
def create_expected(test_manifest, test_path, tests, property_order=None,
boolean_properties=None):
expected = manifestupdate.ExpectedManifest(None, test_path, test_manifest.url_base,
property_order=property_order,
boolean_properties=boolean_properties)
for test in tests:
expected.append(manifestupdate.TestNode.create(test.item_type, test.id))
return expected
def load_expected(test_manifest, metadata_path, test_path, tests, property_order=None,
boolean_properties=None):
expected_manifest = manifestupdate.get_manifest(metadata_path,
test_path,
test_manifest.url_base,
property_order=property_order,
boolean_properties=boolean_properties)
if expected_manifest is None:
return
tests_by_id = {item.id: item for item in tests}
# Remove expected data for tests that no longer exist
for test in expected_manifest.iterchildren():
if not test.id in tests_by_id:
test.remove()
# Add tests that don't have expected data
for test in tests:
if not expected_manifest.has_test(test.id):
expected_manifest.append(manifestupdate.TestNode.create(test.item_type, test.id))
return expected_manifest
| mpl-2.0 |
40223246/w16b_test | static/Brython3.1.3-20150514-095342/Lib/pydoc.py | 637 | 102017 | #!/usr/bin/env python3
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on the given port on the
local machine. Port number 0 can be used to get an arbitrary unused port.
Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/X.Y/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__all__ = ['help']
__author__ = "Ka-Ping Yee <[email protected]>"
__date__ = "26 February 2001"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import builtins
import imp
import importlib.machinery
#brython fix me
import inspect
import io
import os
#brython fix me
#import pkgutil
import platform
import re
import sys
import time
import tokenize
import warnings
from collections import deque
from reprlib import Repr
#fix me brython
#from traceback import extract_tb, format_exception_only
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = doc.strip().split('\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not lines[1].rstrip():
return lines[0], '\n'.join(lines[2:])
return '', '\n'.join(lines)
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = pairs[1].join(text.split(pairs[0]))
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
#fix me brython
#return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
inspect.isbuiltin(obj) or
inspect.ismethoddescriptor(obj))
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant or internal.
if name in {'__author__', '__builtins__', '__cached__', '__credits__',
'__date__', '__doc__', '__file__', '__initializing__',
'__loader__', '__module__', '__name__', '__package__',
'__path__', '__qualname__', '__slots__', '__version__'}:
return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return True
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
results = []
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
results.append((name, kind, cls, value))
return results
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not line.strip():
line = file.readline()
if not line: break
line = line.strip()
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not line.strip():
line = file.readline()
if not line: break
result = line.split('"""')[0].strip()
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
except IOError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
binary_suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
if any(filename.endswith(x) for x in binary_suffixes):
# binary modules have to be imported
file.close()
if any(filename.endswith(x) for x in
importlib.machinery.BYTECODE_SUFFIXES):
loader = importlib.machinery.SourcelessFileLoader('__temp__',
filename)
else:
loader = importlib.machinery.ExtensionFileLoader('__temp__',
filename)
try:
module = loader.load_module('__temp__')
except:
return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else:
# text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, exc_info):
self.filename = filename
self.exc, self.value, self.tb = exc_info
def __str__(self):
exc = self.exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
with open(path, 'rb') as file:
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.seek(0)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Remove the module from sys.modules and re-import to try
# and avoid problems with partially loaded modules.
# Also remove any submodules because they won't appear
# in the newly loaded module's namespace if they're already
# in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
#fix me brython
#elif exc is ImportError and value.name == path:
elif exc is ImportError and str(value) == str(path):
# No such module in the path.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in path.split('.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
PYTHONDOCS = os.environ.get("PYTHONDOCS",
"http://docs.python.org/%d.%d/library"
% sys.version_info[:2])
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError(message)
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
basedir = os.path.join(sys.base_exec_prefix, "lib",
"python%d.%d" % sys.version_info[:2])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages')))) and
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__)
else:
docloc = os.path.join(docloc, object.__name__ + ".html")
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + '_'.join(type(x).__name__.split())
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(text.expandtabs())
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)//cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100//cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, modpkginfo):
"""Make a link for a module or package to display in an index."""
name, path, ispackage, shadowed = modpkginfo
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def filelink(self, url, path):
"""Make a link to source file."""
return '<a href="file:%s">%s</a>' % (url, path)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + ', '.join(parents) + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = name.split('.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
('.'.join(parts[:i+1]), parts[i]))
linkedname = '.'.join(links + parts[-1:])
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = self.filelink(url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % ', '.join(info)
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Reference</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda t: self.modulelink(t[1]))
result = result + self.bigsection(
'Modules', '#ffffff', '#aa55cc', contents)
if classes:
classlist = [value for (key, value) in classes]
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', ' '.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', ' '.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', '<br>\n'.join(contents))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
print('docclass')
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
pass
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
attrs.sort(key=lambda t: t[0])
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % ', '.join(parents)
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % self.classlink(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.__func__
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
# ignore a module if its name contains a surrogate character
continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
#def repr1(self, x, level):
# if hasattr(type(x), '__name__'):
# methodname = 'repr_' + '_'.join(type(x).__name__.split())
# if hasattr(self, methodname):
# return getattr(self, methodname)(x, level)
# return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return ''.join(ch + '\b' + ch for ch in text)
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = [prefix + line for line in text.split('\n')]
if lines: lines[-1] = lines[-1].rstrip()
return '\n'.join(lines)
def section(self, title, contents):
"""Format a section with a given heading."""
clean_contents = self.indent(contents).rstrip()
return self.bold(title) + '\n' + clean_contents + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = (classname(c, modname) for c in bases)
result = result + '(%s)' % ', '.join(parents)
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
def docclass(self, object, name=None, mod=None, *ignored):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % ', '.join(parents)
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value,
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(contents.rstrip(), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.__func__
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
class _PlainTextDoc(TextDoc):
"""Subclass of TextDoc which overrides string styling"""
def bold(self, text):
return text
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if not hasattr(sys.stdout, "isatty"):
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write('\n'.join(lines[:inc]) + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + '\n'.join(lines[r:r+inc]) + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in path.split('.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport('.'.join(parts[:n+1]), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
else:
object = builtins
for part in parts[n:]:
try:
object = getattr(object, part)
except AttributeError:
return None
return object
# --------------------------------------- interactive interpreter interface
text = TextDoc()
plaintext = _PlainTextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError('no Python documentation found for %r' % thing)
return object, thing
else:
name = getattr(thing, '__name__', None)
return thing, name if isinstance(name, str) else None
def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
renderer=None):
"""Render text documentation, given an object or a path to an object."""
if renderer is None:
renderer = text
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
output=None):
"""Display text documentation, given an object or a path to an object."""
try:
if output is None:
pager(render_doc(thing, title, forceload))
else:
output.write(render_doc(thing, title, forceload, plaintext))
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w', encoding='utf-8')
file.write(page)
file.close()
print('wrote', name + '.html')
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
# These dictionaries map a topic name to either an alias, or a tuple
# (label, seealso-items). The "label" is the label of the corresponding
# section in the .rst file under Doc/ and an index into the dictionary
# in pydoc_data/topics.py.
#
# CAUTION: if you change one of these dictionaries, be sure to adapt the
# list of needed labels in Doc/tools/sphinxext/pyspecific.py and
# regenerate the pydoc_data/topics.py file by running
# make pydoc-topics
# in Doc/ and copying the output file into the Lib/ directory.
keywords = {
'False': '',
'None': '',
'True': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
'break': ('break', 'while for'),
'class': ('class', 'CLASSES SPECIALMETHODS'),
'continue': ('continue', 'while for'),
'def': ('function', ''),
'del': ('del', 'BASICMETHODS'),
'elif': 'if',
'else': ('else', 'while for'),
'except': 'try',
'finally': 'try',
'for': ('for', 'break continue while'),
'from': 'import',
'global': ('global', 'nonlocal NAMESPACES'),
'if': ('if', 'TRUTHVALUE'),
'import': ('import', 'MODULES'),
'in': ('in', 'SEQUENCEMETHODS'),
'is': 'COMPARISON',
'lambda': ('lambda', 'FUNCTIONS'),
'nonlocal': ('nonlocal', 'global NAMESPACES'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('pass', ''),
'raise': ('raise', 'EXCEPTIONS'),
'return': ('return', 'FUNCTIONS'),
'try': ('try', 'EXCEPTIONS'),
'while': ('while', 'break continue if TRUTHVALUE'),
'with': ('with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('yield', ''),
}
# Either add symbols to this dictionary or to the symbols dictionary
# directly: Whichever is easier. They are merged later.
_symbols_inverse = {
'STRINGS' : ("'", "'''", "r'", "b'", '"""', '"', 'r"', 'b"'),
'OPERATORS' : ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&',
'|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>'),
'COMPARISON' : ('<', '>', '<=', '>=', '==', '!=', '<>'),
'UNARY' : ('-', '~'),
'AUGMENTEDASSIGNMENT' : ('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//='),
'BITWISE' : ('<<', '>>', '&', '|', '^', '~'),
'COMPLEX' : ('j', 'J')
}
symbols = {
'%': 'OPERATORS FORMATTING',
'**': 'POWER',
',': 'TUPLES LISTS FUNCTIONS',
'.': 'ATTRIBUTES FLOAT MODULES OBJECTS',
'...': 'ELLIPSIS',
':': 'SLICINGS DICTIONARYLITERALS',
'@': 'def class',
'\\': 'STRINGS',
'_': 'PRIVATENAMES',
'__': 'PRIVATENAMES SPECIALMETHODS',
'`': 'BACKQUOTES',
'(': 'TUPLES FUNCTIONS CALLS',
')': 'TUPLES FUNCTIONS CALLS',
'[': 'LISTS SUBSCRIPTS SLICINGS',
']': 'LISTS SUBSCRIPTS SLICINGS'
}
for topic, symbols_ in _symbols_inverse.items():
for symbol in symbols_:
topics = symbols.get(symbol, topic)
if topic not in topics:
topics = topics + ' ' + topic
symbols[symbol] = topics
topics = {
'TYPES': ('types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '
'FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('strings', 'str UNICODE SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'STRINGMETHODS': ('string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('formatstrings', 'OPERATORS'),
'UNICODE': ('strings', 'encodings unicode SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'NUMBERS': ('numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('integers', 'int range'),
'FLOAT': ('floating', 'float math'),
'COMPLEX': ('imaginary', 'complex cmath'),
'SEQUENCES': ('typesseq', 'STRINGMETHODS FORMATTING range LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('typesfunctions', 'def TYPES'),
'METHODS': ('typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('bltin-null-object', ''),
'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('specialattrs', ''),
'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('operator-summary', 'lambda or and not in is BOOLEAN '
'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '
'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '
'LISTS DICTIONARIES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('objects', 'TYPES'),
'SPECIALMETHODS': ('specialnames', 'BASICMETHODS ATTRIBUTEMETHODS '
'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '
'NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('customization', 'hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS': ('sequence-types', 'SEQUENCES SEQUENCEMETHODS '
'SPECIALMETHODS'),
'MAPPINGMETHODS': ('sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT '
'SPECIALMETHODS'),
'EXECUTION': ('execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('naming', 'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('exceptions', 'try except finally raise'),
'CONVERSIONS': ('conversions', ''),
'IDENTIFIERS': ('identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('id-classes', ''),
'PRIVATENAMES': ('atom-identifiers', ''),
'LITERALS': ('atom-literals', 'STRINGS NUMBERS TUPLELITERALS '
'LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('exprlists', 'TUPLES LITERALS'),
'LISTS': ('typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('lists', 'LISTS LITERALS'),
'DICTIONARIES': ('typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('dict', 'DICTIONARIES LITERALS'),
'ATTRIBUTES': ('attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('subscriptions', 'SEQUENCEMETHODS'),
'SLICINGS': ('slicings', 'SEQUENCEMETHODS'),
'CALLS': ('calls', 'EXPRESSIONS'),
'POWER': ('power', 'EXPRESSIONS'),
'UNARY': ('unary', 'EXPRESSIONS'),
'BINARY': ('binary', 'EXPRESSIONS'),
'SHIFTING': ('shifting', 'EXPRESSIONS'),
'BITWISE': ('bitwise', 'EXPRESSIONS'),
'COMPARISON': ('comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('compound', 'for while break continue'),
'TRUTHVALUE': ('truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('debugger', 'pdb'),
'CONTEXTMANAGERS': ('context-managers', 'with'),
}
def __init__(self, input=None, output=None):
self._input = input
self._output = output
#fix me brython
self.input = self._input or sys.stdin
self.output = self._output or sys.stdout
#fix me brython
#input = property(lambda self: self._input or sys.stdin)
#output = property(lambda self: self._output or sys.stdout)
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
_GoInteractive = object()
def __call__(self, request=_GoInteractive):
if request is not self._GoInteractive:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = replace(request, '"', '', "'", '').strip()
if request.lower() in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using input() when appropriate."""
if self.input is sys.stdin:
return input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
request = request.strip()
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'symbols': self.listsymbols()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(request.split()[1])
elif request in self.symbols: self.showsymbol(request)
elif request in ['True', 'False', 'None']:
# special case these keywords since they are objects too
doc(eval(request), 'Help on %s:')
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:', output=self._output)
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:', output=self._output)
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the interactive help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
items = list(sorted(items))
colw = width // columns
rows = (len(items) + columns - 1) // columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw - 1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listsymbols(self):
self.output.write('''
Here is a list of the punctuation symbols which Python assigns special meaning
to. Enter any symbol to get more help.
''')
self.list(self.symbols.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic, more_xrefs=''):
try:
import pydoc_data.topics
except ImportError:
self.output.write('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target, more_xrefs)
label, xrefs = target
try:
doc = pydoc_data.topics.topics[label]
except KeyError:
self.output.write('no documentation found for %s\n' % repr(topic))
return
pager(doc.strip() + '\n')
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
if xrefs:
import formatter
buffer = io.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + ', '.join(xrefs.split()) + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def _gettopic(self, topic, more_xrefs=''):
"""Return unbuffered tuple of (topic, xrefs).
If an error occurs here, the exception is caught and displayed by
the url handler.
This function duplicates the showtopic method but returns its
result directly so it can be formatted for display in an html page.
"""
try:
import pydoc_data.topics
except ImportError:
return('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''' , '')
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
raise ValueError('could not find topic')
if isinstance(target, str):
return self._gettopic(target, more_xrefs)
label, xrefs = target
doc = pydoc_data.topics.topics[label]
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
return doc, xrefs
def showsymbol(self, symbol):
target = self.symbols[symbol]
topic, _, xrefs = target.partition(' ')
self.showtopic(topic, xrefs)
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if modname.find('.') < 0:
modules[modname] = 1
def onerror(modname):
callback(None, modname, None)
ModuleScanner().run(callback, onerror=onerror)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper()
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None, onerror=None):
if key: key = key.lower()
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
name = __import__(modname).__doc__ or ''
desc = name.split('\n')[0]
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages(onerror=onerror):
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
try:
loader = importer.find_module(modname)
except SyntaxError:
# raised by tests for bad coding cookies or BOM
continue
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(modname)
except Exception:
if onerror:
onerror(modname)
continue
desc = source_synopsis(io.StringIO(source)) or ''
if hasattr(loader, 'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
try:
module = loader.load_module(modname)
except ImportError:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print(modname, desc and '- ' + desc)
def onerror(modname):
pass
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
"""Start an HTTP server thread on a specific port.
Start an HTML/text server thread, so HTML or text documents can be
browsed dynamically and interactively with a Web browser. Example use:
>>> import time
>>> import pydoc
Define a URL handler. To determine what the client is asking
for, check the URL and content_type.
Then get or generate some text or HTML code and return it.
>>> def my_url_handler(url, content_type):
... text = 'the URL sent was: (%s, %s)' % (url, content_type)
... return text
Start server thread on port 0.
If you use port 0, the server will pick a random port number.
You can then use serverthread.port to get the port number.
>>> port = 0
>>> serverthread = pydoc._start_server(my_url_handler, port)
Check that the server is really started. If it is, open browser
and get first page. Use serverthread.url as the starting page.
>>> if serverthread.serving:
... import webbrowser
The next two lines are commented out so a browser doesn't open if
doctest is run on this module.
#... webbrowser.open(serverthread.url)
#True
Let the server do its thing. We just need to monitor its status.
Use time.sleep so the loop doesn't hog the CPU.
>>> starttime = time.time()
>>> timeout = 1 #seconds
This is a short timeout for testing purposes.
>>> while serverthread.serving:
... time.sleep(.01)
... if serverthread.serving and time.time() - starttime > timeout:
... serverthread.stop()
... break
Print any errors that may have occurred.
>>> print(serverthread.error)
None
"""
import http.server
import email.message
import select
import threading
class DocHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Process a request from an HTML browser.
The URL received is in self.path.
Get an HTML page from self.urlhandler and send it.
"""
if self.path.endswith('.css'):
content_type = 'text/css'
else:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', '%s; charset=UTF-8' % content_type)
self.end_headers()
self.wfile.write(self.urlhandler(
self.path, content_type).encode('utf-8'))
def log_message(self, *args):
# Don't log messages.
pass
class DocServer(http.server.HTTPServer):
def __init__(self, port, callback):
self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
self.quit = False
def serve_until_quit(self):
while not self.quit:
rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
if rd:
self.handle_request()
self.server_close()
def server_activate(self):
self.base.server_activate(self)
if self.callback:
self.callback(self)
class ServerThread(threading.Thread):
def __init__(self, urlhandler, port):
self.urlhandler = urlhandler
self.port = int(port)
threading.Thread.__init__(self)
self.serving = False
self.error = None
def run(self):
"""Start the server."""
try:
DocServer.base = http.server.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = email.message.Message
DocHandler.urlhandler = staticmethod(self.urlhandler)
docsvr = DocServer(self.port, self.ready)
self.docserver = docsvr
docsvr.serve_until_quit()
except Exception as e:
self.error = e
def ready(self, server):
self.serving = True
self.host = server.host
self.port = server.server_port
self.url = 'http://%s:%d/' % (self.host, self.port)
def stop(self):
"""Stop the server and this thread nicely"""
self.docserver.quit = True
self.serving = False
self.url = None
thread = ServerThread(urlhandler, port)
thread.start()
# Wait until thread.serving is True to make sure we are
# really up before returning.
while not thread.error and not thread.serving:
time.sleep(.01)
return thread
def _url_handler(url, content_type="text/html"):
"""The pydoc url handler for use with the pydoc server.
If the content_type is 'text/css', the _pydoc.css style
sheet is read and returned if it exits.
If the content_type is 'text/html', then the result of
get_html_page(url) is returned.
"""
class _HTMLDoc(HTMLDoc):
def page(self, title, contents):
"""Format an HTML page."""
css_path = "pydoc_data/_pydoc.css"
css_link = (
'<link rel="stylesheet" type="text/css" href="%s">' %
css_path)
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Pydoc: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
def filelink(self, url, path):
return '<a href="getfile?key=%s">%s</a>' % (url, path)
html = _HTMLDoc()
def html_navbar():
version = html.escape("%s [%s, %s]" % (platform.python_version(),
platform.python_build()[0],
platform.python_compiler()))
return """
<div style='float:left'>
Python %s<br>%s
</div>
<div style='float:right'>
<div style='text-align:center'>
<a href="index.html">Module Index</a>
: <a href="topics.html">Topics</a>
: <a href="keywords.html">Keywords</a>
</div>
<div>
<form action="get" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Get">
</form>
<form action="search" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Search">
</form>
</div>
</div>
""" % (version, html.escape(platform.platform(terse=True)))
def html_index():
"""Module Index page."""
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
names = [name for name in sys.builtin_module_names
if name != '__main__']
contents = html.multicolumn(names, bltinlink)
contents = [heading, '<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
contents.append(html.index(dir, seen))
contents.append(
'<p align=right><font color="#909090" face="helvetica,'
'arial"><strong>pydoc</strong> by Ka-Ping Yee'
'<[email protected]></font>')
return 'Index of Modules', ''.join(contents)
def html_search(key):
"""Search results page."""
# scan for modules
search_result = []
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
search_result.append((modname, desc and '- ' + desc))
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# format page
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
results = []
heading = html.heading(
'<big><big><strong>Search Results</strong></big></big>',
'#ffffff', '#7799ee')
for name, desc in search_result:
results.append(bltinlink(name) + desc)
contents = heading + html.bigsection(
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
def html_getfile(path):
"""Get and display a source file listing safely."""
path = path.replace('%20', ' ')
with tokenize.open(path) as fp:
lines = html.escape(fp.read())
body = '<pre>%s</pre>' % lines
heading = html.heading(
'<big><big><strong>File Listing</strong></big></big>',
'#ffffff', '#7799ee')
contents = heading + html.bigsection(
'File: %s' % path, '#ffffff', '#ee77aa', body)
return 'getfile %s' % path, contents
def html_topics():
"""Index of topic texts available."""
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.topics.keys())
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Topics', '#ffffff', '#ee77aa', contents)
return 'Topics', contents
def html_keywords():
"""Index of keywords."""
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.keywords.keys())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Keywords', '#ffffff', '#ee77aa', contents)
return 'Keywords', contents
def html_topicpage(topic):
"""Topic or keyword help page."""
buf = io.StringIO()
htmlhelp = Helper(buf, buf)
contents, xrefs = htmlhelp._gettopic(topic)
if topic in htmlhelp.keywords:
title = 'KEYWORD'
else:
title = 'TOPIC'
heading = html.heading(
'<big><big><strong>%s</strong></big></big>' % title,
'#ffffff', '#7799ee')
contents = '<pre>%s</pre>' % html.markup(contents)
contents = html.bigsection(topic , '#ffffff','#ee77aa', contents)
if xrefs:
xrefs = sorted(xrefs.split())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
xrefs = html.multicolumn(xrefs, bltinlink)
xrefs = html.section('Related help topics: ',
'#ffffff', '#ee77aa', xrefs)
return ('%s %s' % (title, topic),
''.join((heading, contents, xrefs)))
def html_getobj(url):
obj = locate(url, forceload=1)
if obj is None and url != 'None':
raise ValueError('could not find object')
title = describe(obj)
content = html.document(obj, url)
return title, content
def html_error(url, exc):
heading = html.heading(
'<big><big><strong>Error</strong></big></big>',
'#ffffff', '#7799ee')
contents = '<br>'.join(html.escape(line) for line in
format_exception_only(type(exc), exc))
contents = heading + html.bigsection(url, '#ffffff', '#bb0000',
contents)
return "Error - %s" % url, contents
def get_html_page(url):
"""Generate an HTML page for url."""
complete_url = url
if url.endswith('.html'):
url = url[:-5]
try:
if url in ("", "index"):
title, content = html_index()
elif url == "topics":
title, content = html_topics()
elif url == "keywords":
title, content = html_keywords()
elif '=' in url:
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
elif op == "getfile?key":
title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
title, content = html_topicpage(url)
except ValueError:
title, content = html_getobj(url)
elif op == "get?key":
# try objects first, then topics.
if url in ("", "index"):
title, content = html_index()
else:
try:
title, content = html_getobj(url)
except ValueError:
title, content = html_topicpage(url)
else:
raise ValueError('bad pydoc url')
else:
title, content = html_getobj(url)
except Exception as exc:
# Catch any errors and display them in an error page.
title, content = html_error(complete_url, exc)
return html.page(title, content)
if url.startswith('/'):
url = url[1:]
if content_type == 'text/css':
path_here = os.path.dirname(os.path.realpath(__file__))
css_path = os.path.join(path_here, url)
with open(css_path) as fp:
return ''.join(fp.readlines())
elif content_type == 'text/html':
return get_html_page(url)
# Errors outside the url handler are caught by the server.
raise TypeError('unknown content type %r for url %s' % (content_type, url))
def browse(port=0, *, open_browser=True):
"""Start the enhanced pydoc Web server and open a Web browser.
Use port '0' to start the server on an arbitrary port.
Set open_browser to False to suppress opening a browser.
"""
import webbrowser
serverthread = _start_server(_url_handler, port)
if serverthread.error:
print(serverthread.error)
return
if serverthread.serving:
server_help_msg = 'Server commands: [b]rowser, [q]uit'
if open_browser:
webbrowser.open(serverthread.url)
try:
print('Server ready at', serverthread.url)
print(server_help_msg)
while serverthread.serving:
cmd = input('server> ')
cmd = cmd.lower()
if cmd == 'q':
break
elif cmd == 'b':
webbrowser.open(serverthread.url)
else:
print(server_help_msg)
except (KeyboardInterrupt, EOFError):
print()
finally:
if serverthread.serving:
serverthread.stop()
print('Server stopped')
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and x.find(os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage(Exception): pass
# Scripts don't get the current directory in their path by default
# unless they are run with the '-m' switch
if '' not in sys.path:
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
if opt == '-b':
start_server = True
open_browser = True
if opt == '-k':
apropos(val)
return
if opt == '-p':
start_server = True
port = val
if opt == '-w':
writing = True
if start_server:
if port is None:
port = 0
browse(port, open_browser=open_browser)
return
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print('file %r does not exist' % arg)
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport as value:
print(value)
except (getopt.error, BadUsage):
cmd = os.path.splitext(os.path.basename(sys.argv[0]))[0]
print("""pydoc - the Python documentation tool
{cmd} <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '{sep}', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
{cmd} -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
{cmd} -p <port>
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
{cmd} -b
Start an HTTP server on an arbitrary unused port and open a Web browser
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""".format(cmd=cmd, sep=os.sep))
if __name__ == '__main__':
cli()
| agpl-3.0 |
ProjectSWGCore/NGECore2 | scripts/mobiles/talus/lost_aqualish_scout.py | 2 | 1650 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('lost_aqualish_scout')
mobileTemplate.setLevel(36)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("lost aqualish")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_dressed_lost_aqualish_scout_male_01.iff')
templates.add('object/mobile/shared_dressed_lost_aqualish_scout_female_01.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/rifle/shared_rifle_t21.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
lootPoolNames_1 = ['Junk']
lootPoolChances_1 = [100]
lootGroupChance_1 = 100
mobileTemplate.addToLootGroups(lootPoolNames_1,lootPoolChances_1,lootGroupChance_1)
core.spawnService.addMobileTemplate('lost_aqualish_scout', mobileTemplate)
return | lgpl-3.0 |
djgagne/scikit-learn | sklearn/covariance/__init__.py | 389 | 1157 | """
The :mod:`sklearn.covariance` module includes methods and algorithms to
robustly estimate the covariance of features given a set of points. The
precision matrix defined as the inverse of the covariance is also estimated.
Covariance estimation is closely related to the theory of Gaussian Graphical
Models.
"""
from .empirical_covariance_ import empirical_covariance, EmpiricalCovariance, \
log_likelihood
from .shrunk_covariance_ import shrunk_covariance, ShrunkCovariance, \
ledoit_wolf, ledoit_wolf_shrinkage, \
LedoitWolf, oas, OAS
from .robust_covariance import fast_mcd, MinCovDet
from .graph_lasso_ import graph_lasso, GraphLasso, GraphLassoCV
from .outlier_detection import EllipticEnvelope
__all__ = ['EllipticEnvelope',
'EmpiricalCovariance',
'GraphLasso',
'GraphLassoCV',
'LedoitWolf',
'MinCovDet',
'OAS',
'ShrunkCovariance',
'empirical_covariance',
'fast_mcd',
'graph_lasso',
'ledoit_wolf',
'ledoit_wolf_shrinkage',
'log_likelihood',
'oas',
'shrunk_covariance']
| bsd-3-clause |
xbot/alfred-pushbullet | lib/websocket/_logging.py | 49 | 1865 | """
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
import logging
_logger = logging.getLogger('websocket')
_traceEnabled = False
__all__ = ["enableTrace", "dump", "error", "debug", "trace",
"isEnabledForError", "isEnabledForDebug"]
def enableTrace(tracable):
"""
turn on/off the tracability.
tracable: boolean value. if set True, tracability is enabled.
"""
global _traceEnabled
_traceEnabled = tracable
if tracable:
if not _logger.handlers:
_logger.addHandler(logging.StreamHandler())
_logger.setLevel(logging.DEBUG)
def dump(title, message):
if _traceEnabled:
_logger.debug("--- " + title + " ---")
_logger.debug(message)
_logger.debug("-----------------------")
def error(msg):
_logger.error(msg)
def debug(msg):
_logger.debug(msg)
def trace(msg):
if _traceEnabled:
_logger.debug(msg)
def isEnabledForError():
return _logger.isEnabledFor(logging.ERROR)
def isEnabledForDebug():
return _logger.isEnabledFor(logging.DEBUG)
| mit |
crystalhaohua0408/principles-of-computing | homework7.py | 5 | 6428 | # Homework 7 for Principles of Computing class, by k., 08/01/2014
# class Puzzle from program template at http://www.codeskulptor.org/#poc_fifteen_template.py
'''
Loyd's Fifteen puzzle - solver and visualizer
Note that solved configuration has the blank (zero) tile in upper left
Use the arrows key to swap this tile with its neighbors
'''
#import poc_fifteen_gui
class Puzzle:
"""
Class representation for the Fifteen puzzle
"""
def __init__(self, puzzle_height, puzzle_width, initial_grid=None):
"""
Initialize puzzle with default height and width
Returns a Puzzle object
"""
self._height = puzzle_height
self._width = puzzle_width
self._grid = [[col + puzzle_width * row
for col in range(self._width)]
for row in range(self._height)]
if initial_grid != None:
for row in range(puzzle_height):
for col in range(puzzle_width):
self._grid[row][col] = initial_grid[row][col]
def __str__(self):
"""
Generate string representaion for puzzle
Returns a string
"""
ans = ""
for row in range(self._height):
ans += str(self._grid[row])
ans += "\n"
return ans
#####################################
# GUI methods
def get_height(self):
"""
Getter for puzzle height
Returns an integer
"""
return self._height
def get_width(self):
"""
Getter for puzzle width
Returns an integer
"""
return self._width
def get_number(self, row, col):
"""
Getter for the number at tile position pos
Returns an integer
"""
return self._grid[row][col]
def set_number(self, row, col, value):
"""
Setter for the number at tile position pos
"""
self._grid[row][col] = value
def clone(self):
"""
Make a copy of the puzzle to update during solving
Returns a Puzzle object
"""
new_puzzle = Puzzle(self._height, self._width, self._grid)
return new_puzzle
########################################################
# Core puzzle methods
def current_position(self, solved_row, solved_col):
"""
Locate the current position of the tile that will be at
position (solved_row, solved_col) when the puzzle is solved
Returns a tuple of two integers
"""
solved_value = (solved_col + self._width * solved_row)
for row in range(self._height):
for col in range(self._width):
if self._grid[row][col] == solved_value:
return (row, col)
assert False, "Value " + str(solved_value) + " not found"
def update_puzzle(self, move_string):
"""
Updates the puzzle state based on the provided move string
"""
zero_row, zero_col = self.current_position(0, 0)
for direction in move_string:
if direction == "l":
assert zero_col > 0, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1]
self._grid[zero_row][zero_col - 1] = 0
zero_col -= 1
elif direction == "r":
assert zero_col < self._width - 1, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1]
self._grid[zero_row][zero_col + 1] = 0
zero_col += 1
elif direction == "u":
assert zero_row > 0, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col]
self._grid[zero_row - 1][zero_col] = 0
zero_row -= 1
elif direction == "d":
assert zero_row < self._height - 1, "move off grid: " + direction
self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col]
self._grid[zero_row + 1][zero_col] = 0
zero_row += 1
else:
assert False, "invalid direction: " + direction
# start interactive simulation
#poc_fifteen_gui.FifteenGUI(Puzzle(4, 4))
# Question 1
question1 = Puzzle(4, 4)
print 'Prep for Question 1 follows...\n', question1
question1.update_puzzle('drdr')
print 'Configuration after move \'drdr\':\n', question1
# Question 2
question2 = Puzzle(4, 4)
question2.update_puzzle('ddrdrudlulurrrlldluurrrdllldr')
print '\nPrep for Question 2 follows...\n', question2
question2.update_puzzle('urullddruld')
print 'Configuration after move \'urullddruld\':\n', question2
# Question 3
question3 = Puzzle(2, 2)
print '\nPrep for Question 3 follows...\n', question3
question3.update_puzzle('rdlu')
print 'First move:\n', question3
question3.update_puzzle('rdlu')
print 'Second move:\n', question3
question3.update_puzzle('rdlu')
print 'Configuration after third move:\n', question3
# Question 4
question4 = Puzzle(2, 2, [[0, 2], [3, 1]])
print '\nPrep for Question 4 follows...\n', question4
question4.update_puzzle('rdlu')
print 'Configuration after move \'rdlu\':\n', question4
# Question 5
question5 = Puzzle(2, 2, [[0, 3], [1, 2]])
print '\nPrep for Question 5 follows...\n', question5
question5.update_puzzle('drul')
print 'Configuration after move \'drul\':\n', question5
# Question 8
question8 = Puzzle(4, 4, [[4, 13, 1, 3], [5, 10, 2, 7], [8, 12, 6, 11], [9, 0, 14, 16]])
print '\nPrep for Question 8 follows...\n', question8
question8.update_puzzle('uuu')
print 'Configuration after move \'uuu\':\n', question8
question8.update_puzzle('lddru')
print 'Configuration after move \'lddru\':\n', question8
question8.update_puzzle('lddruld')
print 'Configuration after move \'lddruld\':\n', question8
# Question 9
question9 = Puzzle(3, 2, [[1, 2], [0, 4], [3, 5]])
print '\nPrep for Question 9 follows...\n', question9
question9.update_puzzle('ruldrdlurdluurddlur')
print 'Configuration after move \'ruldrdlurdluurddlur\':\n', question9
# Question 10
question10 = Puzzle(2, 3, [[3, 4, 1], [0, 2, 5]])
print '\nPrep for Question 10 follows...\n', question10
question10.update_puzzle('urdlurrdluldrruld')
print 'Configuration after move \'urdlurrdluldrruld\':\n', question10
| mit |
jindongh/boto | boto/ec2/elb/listener.py | 152 | 3377 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.ec2.elb.listelement import ListElement
class Listener(object):
"""
Represents an EC2 Load Balancer Listener tuple
"""
def __init__(self, load_balancer=None, load_balancer_port=0,
instance_port=0, protocol='', ssl_certificate_id=None, instance_protocol=None):
self.load_balancer = load_balancer
self.load_balancer_port = load_balancer_port
self.instance_port = instance_port
self.protocol = protocol
self.instance_protocol = instance_protocol
self.ssl_certificate_id = ssl_certificate_id
self.policy_names = ListElement()
def __repr__(self):
r = "(%d, %d, '%s'" % (self.load_balancer_port, self.instance_port, self.protocol)
if self.instance_protocol:
r += ", '%s'" % self.instance_protocol
if self.ssl_certificate_id:
r += ', %s' % (self.ssl_certificate_id)
r += ')'
return r
def startElement(self, name, attrs, connection):
if name == 'PolicyNames':
return self.policy_names
return None
def endElement(self, name, value, connection):
if name == 'LoadBalancerPort':
self.load_balancer_port = int(value)
elif name == 'InstancePort':
self.instance_port = int(value)
elif name == 'InstanceProtocol':
self.instance_protocol = value
elif name == 'Protocol':
self.protocol = value
elif name == 'SSLCertificateId':
self.ssl_certificate_id = value
else:
setattr(self, name, value)
def get_tuple(self):
return self.load_balancer_port, self.instance_port, self.protocol
def get_complex_tuple(self):
return self.load_balancer_port, self.instance_port, self.protocol, self.instance_protocol
def __getitem__(self, key):
if key == 0:
return self.load_balancer_port
if key == 1:
return self.instance_port
if key == 2:
return self.protocol
if key == 3:
return self.instance_protocol
if key == 4:
return self.ssl_certificate_id
raise KeyError
| mit |
dago/ansible-modules-core | cloud/amazon/cloudformation.py | 17 | 10939 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cloudformation
short_description: create a AWS CloudFormation stack
description:
- Launches an AWS CloudFormation stack and waits for it complete.
version_added: "1.1"
options:
stack_name:
description:
- name of the cloudformation stack
required: true
default: null
aliases: []
disable_rollback:
description:
- If a stacks fails to form, rollback will remove the stack
required: false
default: "false"
choices: [ "true", "false" ]
aliases: []
template_parameters:
description:
- a list of hashes of all the template variables for the stack
required: false
default: {}
aliases: []
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: true
default: null
aliases: ['aws_region', 'ec2_region']
state:
description:
- If state is "present", stack will be created. If state is "present" and if stack exists and template has changed, it will be updated.
If state is "absent", stack will be removed.
required: true
default: null
aliases: []
template:
description:
- the path of the cloudformation template
required: true
default: null
aliases: []
stack_policy:
description:
- the path of the cloudformation stack policy
required: false
default: null
aliases: []
version_added: "x.x"
tags:
description:
- Dictionary of tags to associate with stack and it's resources during stack creation. Cannot be updated later.
Requires at least Boto version 2.6.0.
required: false
default: null
aliases: []
version_added: "1.4"
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
version_added: "1.5"
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
version_added: "1.5"
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
version_added: "1.5"
requirements: [ "boto" ]
author: James S. Martin
'''
EXAMPLES = '''
# Basic task example
tasks:
- name: launch ansible cloudformation example
cloudformation:
stack_name: "ansible-cloudformation"
state: "present"
region: "us-east-1"
disable_rollback: true
template: "files/cloudformation-example.json"
template_parameters:
KeyName: "jmartin"
DiskType: "ephemeral"
InstanceType: "m1.small"
ClusterSize: 3
tags:
Stack: "ansible-cloudformation"
'''
import json
import time
try:
import boto
import boto.cloudformation.connection
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def boto_version_required(version_tuple):
parts = boto.Version.split('.')
boto_version = []
try:
for part in parts:
boto_version.append(int(part))
except:
boto_version.append(-1)
return tuple(boto_version) >= tuple(version_tuple)
def stack_operation(cfn, stack_name, operation):
'''gets the status of a stack while it is created/updated/deleted'''
existed = []
result = {}
operation_complete = False
while operation_complete == False:
try:
stack = cfn.describe_stacks(stack_name)[0]
existed.append('yes')
except:
if 'yes' in existed:
result = dict(changed=True,
output='Stack Deleted',
events=map(str, list(stack.describe_events())))
else:
result = dict(changed= True, output='Stack Not Found')
break
if '%s_COMPLETE' % operation == stack.stack_status:
result = dict(changed=True,
events = map(str, list(stack.describe_events())),
output = 'Stack %s complete' % operation)
break
if 'ROLLBACK_COMPLETE' == stack.stack_status or '%s_ROLLBACK_COMPLETE' % operation == stack.stack_status:
result = dict(changed=True, failed=True,
events = map(str, list(stack.describe_events())),
output = 'Problem with %s. Rollback complete' % operation)
break
elif '%s_FAILED' % operation == stack.stack_status:
result = dict(changed=True, failed=True,
events = map(str, list(stack.describe_events())),
output = 'Stack %s failed' % operation)
break
else:
time.sleep(5)
return result
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
stack_name=dict(required=True),
template_parameters=dict(required=False, type='dict', default={}),
state=dict(default='present', choices=['present', 'absent']),
template=dict(default=None, required=True),
stack_policy=dict(default=None, required=False),
disable_rollback=dict(default=False, type='bool'),
tags=dict(default=None)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
state = module.params['state']
stack_name = module.params['stack_name']
template_body = open(module.params['template'], 'r').read()
if module.params['stack_policy'] is not None:
stack_policy_body = open(module.params['stack_policy'], 'r').read()
else:
stack_policy_body = None
disable_rollback = module.params['disable_rollback']
template_parameters = module.params['template_parameters']
tags = module.params['tags']
ec2_url, aws_access_key, aws_secret_key, region = get_ec2_creds(module)
kwargs = dict()
if tags is not None:
if not boto_version_required((2,6,0)):
module.fail_json(msg='Module parameter "tags" requires at least Boto version 2.6.0')
kwargs['tags'] = tags
# convert the template parameters ansible passes into a tuple for boto
template_parameters_tup = [(k, v) for k, v in template_parameters.items()]
stack_outputs = {}
try:
cfn = boto.cloudformation.connect_to_region(
region,
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
update = False
result = {}
operation = None
# if state is present we are going to ensure that the stack is either
# created or updated
if state == 'present':
try:
cfn.create_stack(stack_name, parameters=template_parameters_tup,
template_body=template_body,
stack_policy_body=stack_policy_body,
disable_rollback=disable_rollback,
capabilities=['CAPABILITY_IAM'],
**kwargs)
operation = 'CREATE'
except Exception, err:
error_msg = boto_exception(err)
if 'AlreadyExistsException' in error_msg or 'already exists' in error_msg:
update = True
else:
module.fail_json(msg=error_msg)
if not update:
result = stack_operation(cfn, stack_name, operation)
# if the state is present and the stack already exists, we try to update it
# AWS will tell us if the stack template and parameters are the same and
# don't need to be updated.
if update:
try:
cfn.update_stack(stack_name, parameters=template_parameters_tup,
template_body=template_body,
stack_policy_body=stack_policy_body,
disable_rollback=disable_rollback,
capabilities=['CAPABILITY_IAM'])
operation = 'UPDATE'
except Exception, err:
error_msg = boto_exception(err)
if 'No updates are to be performed.' in error_msg:
result = dict(changed=False, output='Stack is already up-to-date.')
else:
module.fail_json(msg=error_msg)
if operation == 'UPDATE':
result = stack_operation(cfn, stack_name, operation)
# check the status of the stack while we are creating/updating it.
# and get the outputs of the stack
if state == 'present' or update:
stack = cfn.describe_stacks(stack_name)[0]
for output in stack.outputs:
stack_outputs[output.key] = output.value
result['stack_outputs'] = stack_outputs
# absent state is different because of the way delete_stack works.
# problem is it it doesn't give an error if stack isn't found
# so must describe the stack first
if state == 'absent':
try:
cfn.describe_stacks(stack_name)
operation = 'DELETE'
except Exception, err:
error_msg = boto_exception(err)
if 'Stack:%s does not exist' % stack_name in error_msg:
result = dict(changed=False, output='Stack not found.')
else:
module.fail_json(msg=error_msg)
if operation == 'DELETE':
cfn.delete_stack(stack_name)
result = stack_operation(cfn, stack_name, operation)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
cbandera/hometop_HT3 | HT3/sw/HT3_Logger.py | 1 | 1502 | #! /usr/bin/python3
#
#################################################################
## Copyright (c) 2013 Norbert S. <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#################################################################
# Ver:0.1.5 / Datum 25.05.2014
# Ver:0.1.6 / Datum 10.01.2015 'reading configuration changed'
# Ver:0.1.7.1/ Datum 04.03.2015 'socket option' activated
# logging from ht_utils added
#################################################################
import sys, time
sys.path.append('lib')
import ht3_worker
configurationfilename='./etc/config/HT3_db_cfg.xml'
logfilename="ht_logger.log"
#### reconfiguration has to be done in configuration-file ####
HT3_logger=ht3_worker.ht3_cworker(configurationfilename, hexdump_window=False, gui_active=False, logfilename_in=logfilename)
HT3_logger.run()
while True:
time.sleep(2)
| gpl-3.0 |
brave/browser-ios | Client/Assets/scrape_plugins.py | 3 | 1503 | #!/usr/bin/env python
from lxml import html
import os
import requests
import shutil
import urllib
def main():
if not os.path.exists("SearchPlugins"):
os.makedirs("SearchPlugins")
locales = getLocaleList()
for locale in locales:
files = getFileList(locale)
if files == None:
continue
print("found searchplugins")
for file in files:
downloadedFile = getFile(locale, file)
directory = os.path.join("SearchPlugins", locale)
if not os.path.exists(directory):
os.makedirs(directory)
shutil.move(downloadedFile, os.path.join(directory, file))
def getLocaleList():
response = requests.get('http://hg.mozilla.org/releases/mozilla-aurora/raw-file/default/mobile/android/locales/all-locales')
return response.text.strip().split("\n")
def getFileList(locale):
print("scraping: %s..." % locale)
url = "https://hg.mozilla.org/releases/l10n/mozilla-aurora/%s/file/default/mobile/searchplugins" % locale
response = requests.get(url)
if not response.ok:
return
tree = html.fromstring(response.content)
return tree.xpath('//a[@class="list"]/text()')
def getFile(locale, file):
print(" downloading: %s..." % file)
url = "https://hg.mozilla.org/releases/l10n/mozilla-aurora/%s/raw-file/default/mobile/searchplugins/%s" % (locale, file)
result = urllib.urlretrieve(url)
return result[0]
if __name__ == "__main__":
main()
| mpl-2.0 |
jpaalasm/pyglet | pyglet/media/drivers/openal/lib_openal.py | 45 | 27846 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Wrapper for openal
Generated with:
../tools/wraptypes/wrap.py /usr/include/AL/al.h -lopenal -olib_openal.py
.. Hacked to remove non-existent library functions.
TODO add alGetError check.
.. alListener3i and alListeneriv are present in my OS X 10.4 but not another
10.4 user's installation. They've also been removed for compatibility.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import ctypes
from ctypes import *
import sys
import pyglet.lib
_lib = pyglet.lib.load_library('openal', win32='openal32',
framework='/System/Library/Frameworks/OpenAL.framework')
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (ctypes.c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
AL_API = 0 # /usr/include/AL/al.h:39
ALAPI = 0 # /usr/include/AL/al.h:59
AL_INVALID = -1 # /usr/include/AL/al.h:61
AL_ILLEGAL_ENUM = 0 # /usr/include/AL/al.h:62
AL_ILLEGAL_COMMAND = 0 # /usr/include/AL/al.h:63
ALboolean = c_int # Better return type than c_char, as generated
ALchar = c_char # /usr/include/AL/al.h:73
ALbyte = c_char # /usr/include/AL/al.h:76
ALubyte = c_ubyte # /usr/include/AL/al.h:79
ALshort = c_short # /usr/include/AL/al.h:82
ALushort = c_ushort # /usr/include/AL/al.h:85
ALint = c_int # /usr/include/AL/al.h:88
ALuint = c_uint # /usr/include/AL/al.h:91
ALsizei = c_int # /usr/include/AL/al.h:94
ALenum = c_int # /usr/include/AL/al.h:97
ALfloat = c_float # /usr/include/AL/al.h:100
ALdouble = c_double # /usr/include/AL/al.h:103
ALvoid = None # /usr/include/AL/al.h:106
AL_NONE = 0 # /usr/include/AL/al.h:112
AL_FALSE = 0 # /usr/include/AL/al.h:115
AL_TRUE = 1 # /usr/include/AL/al.h:118
AL_SOURCE_RELATIVE = 514 # /usr/include/AL/al.h:121
AL_CONE_INNER_ANGLE = 4097 # /usr/include/AL/al.h:130
AL_CONE_OUTER_ANGLE = 4098 # /usr/include/AL/al.h:137
AL_PITCH = 4099 # /usr/include/AL/al.h:145
AL_POSITION = 4100 # /usr/include/AL/al.h:157
AL_DIRECTION = 4101 # /usr/include/AL/al.h:160
AL_VELOCITY = 4102 # /usr/include/AL/al.h:163
AL_LOOPING = 4103 # /usr/include/AL/al.h:171
AL_BUFFER = 4105 # /usr/include/AL/al.h:178
AL_GAIN = 4106 # /usr/include/AL/al.h:191
AL_MIN_GAIN = 4109 # /usr/include/AL/al.h:200
AL_MAX_GAIN = 4110 # /usr/include/AL/al.h:209
AL_ORIENTATION = 4111 # /usr/include/AL/al.h:216
AL_SOURCE_STATE = 4112 # /usr/include/AL/al.h:221
AL_INITIAL = 4113 # /usr/include/AL/al.h:222
AL_PLAYING = 4114 # /usr/include/AL/al.h:223
AL_PAUSED = 4115 # /usr/include/AL/al.h:224
AL_STOPPED = 4116 # /usr/include/AL/al.h:225
AL_BUFFERS_QUEUED = 4117 # /usr/include/AL/al.h:230
AL_BUFFERS_PROCESSED = 4118 # /usr/include/AL/al.h:231
AL_SEC_OFFSET = 4132 # /usr/include/AL/al.h:236
AL_SAMPLE_OFFSET = 4133 # /usr/include/AL/al.h:237
AL_BYTE_OFFSET = 4134 # /usr/include/AL/al.h:238
AL_SOURCE_TYPE = 4135 # /usr/include/AL/al.h:246
AL_STATIC = 4136 # /usr/include/AL/al.h:247
AL_STREAMING = 4137 # /usr/include/AL/al.h:248
AL_UNDETERMINED = 4144 # /usr/include/AL/al.h:249
AL_FORMAT_MONO8 = 4352 # /usr/include/AL/al.h:252
AL_FORMAT_MONO16 = 4353 # /usr/include/AL/al.h:253
AL_FORMAT_STEREO8 = 4354 # /usr/include/AL/al.h:254
AL_FORMAT_STEREO16 = 4355 # /usr/include/AL/al.h:255
AL_REFERENCE_DISTANCE = 4128 # /usr/include/AL/al.h:265
AL_ROLLOFF_FACTOR = 4129 # /usr/include/AL/al.h:273
AL_CONE_OUTER_GAIN = 4130 # /usr/include/AL/al.h:282
AL_MAX_DISTANCE = 4131 # /usr/include/AL/al.h:292
AL_FREQUENCY = 8193 # /usr/include/AL/al.h:300
AL_BITS = 8194 # /usr/include/AL/al.h:301
AL_CHANNELS = 8195 # /usr/include/AL/al.h:302
AL_SIZE = 8196 # /usr/include/AL/al.h:303
AL_UNUSED = 8208 # /usr/include/AL/al.h:310
AL_PENDING = 8209 # /usr/include/AL/al.h:311
AL_PROCESSED = 8210 # /usr/include/AL/al.h:312
AL_NO_ERROR = 0 # /usr/include/AL/al.h:316
AL_INVALID_NAME = 40961 # /usr/include/AL/al.h:321
AL_INVALID_ENUM = 40962 # /usr/include/AL/al.h:326
AL_INVALID_VALUE = 40963 # /usr/include/AL/al.h:331
AL_INVALID_OPERATION = 40964 # /usr/include/AL/al.h:336
AL_OUT_OF_MEMORY = 40965 # /usr/include/AL/al.h:342
AL_VENDOR = 45057 # /usr/include/AL/al.h:346
AL_VERSION = 45058 # /usr/include/AL/al.h:347
AL_RENDERER = 45059 # /usr/include/AL/al.h:348
AL_EXTENSIONS = 45060 # /usr/include/AL/al.h:349
AL_DOPPLER_FACTOR = 49152 # /usr/include/AL/al.h:356
AL_DOPPLER_VELOCITY = 49153 # /usr/include/AL/al.h:361
AL_SPEED_OF_SOUND = 49155 # /usr/include/AL/al.h:366
AL_DISTANCE_MODEL = 53248 # /usr/include/AL/al.h:375
AL_INVERSE_DISTANCE = 53249 # /usr/include/AL/al.h:376
AL_INVERSE_DISTANCE_CLAMPED = 53250 # /usr/include/AL/al.h:377
AL_LINEAR_DISTANCE = 53251 # /usr/include/AL/al.h:378
AL_LINEAR_DISTANCE_CLAMPED = 53252 # /usr/include/AL/al.h:379
AL_EXPONENT_DISTANCE = 53253 # /usr/include/AL/al.h:380
AL_EXPONENT_DISTANCE_CLAMPED = 53254 # /usr/include/AL/al.h:381
# /usr/include/AL/al.h:386
alEnable = _lib.alEnable
alEnable.restype = None
alEnable.argtypes = [ALenum]
# /usr/include/AL/al.h:388
alDisable = _lib.alDisable
alDisable.restype = None
alDisable.argtypes = [ALenum]
# /usr/include/AL/al.h:390
alIsEnabled = _lib.alIsEnabled
alIsEnabled.restype = ALboolean
alIsEnabled.argtypes = [ALenum]
# /usr/include/AL/al.h:396
alGetString = _lib.alGetString
alGetString.restype = POINTER(ALchar)
alGetString.argtypes = [ALenum]
# /usr/include/AL/al.h:398
alGetBooleanv = _lib.alGetBooleanv
alGetBooleanv.restype = None
alGetBooleanv.argtypes = [ALenum, POINTER(ALboolean)]
# /usr/include/AL/al.h:400
alGetIntegerv = _lib.alGetIntegerv
alGetIntegerv.restype = None
alGetIntegerv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:402
alGetFloatv = _lib.alGetFloatv
alGetFloatv.restype = None
alGetFloatv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:404
alGetDoublev = _lib.alGetDoublev
alGetDoublev.restype = None
alGetDoublev.argtypes = [ALenum, POINTER(ALdouble)]
# /usr/include/AL/al.h:406
alGetBoolean = _lib.alGetBoolean
alGetBoolean.restype = ALboolean
alGetBoolean.argtypes = [ALenum]
# /usr/include/AL/al.h:408
alGetInteger = _lib.alGetInteger
alGetInteger.restype = ALint
alGetInteger.argtypes = [ALenum]
# /usr/include/AL/al.h:410
alGetFloat = _lib.alGetFloat
alGetFloat.restype = ALfloat
alGetFloat.argtypes = [ALenum]
# /usr/include/AL/al.h:412
alGetDouble = _lib.alGetDouble
alGetDouble.restype = ALdouble
alGetDouble.argtypes = [ALenum]
# /usr/include/AL/al.h:419
alGetError = _lib.alGetError
alGetError.restype = ALenum
alGetError.argtypes = []
# /usr/include/AL/al.h:427
alIsExtensionPresent = _lib.alIsExtensionPresent
alIsExtensionPresent.restype = ALboolean
alIsExtensionPresent.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:429
alGetProcAddress = _lib.alGetProcAddress
alGetProcAddress.restype = POINTER(c_void)
alGetProcAddress.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:431
alGetEnumValue = _lib.alGetEnumValue
alGetEnumValue.restype = ALenum
alGetEnumValue.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:450
alListenerf = _lib.alListenerf
alListenerf.restype = None
alListenerf.argtypes = [ALenum, ALfloat]
# /usr/include/AL/al.h:452
alListener3f = _lib.alListener3f
alListener3f.restype = None
alListener3f.argtypes = [ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:454
alListenerfv = _lib.alListenerfv
alListenerfv.restype = None
alListenerfv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:456
alListeneri = _lib.alListeneri
alListeneri.restype = None
alListeneri.argtypes = [ALenum, ALint]
# /usr/include/AL/al.h:458
#alListener3i = _lib.alListener3i
#alListener3i.restype = None
#alListener3i.argtypes = [ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:460
#alListeneriv = _lib.alListeneriv
#alListeneriv.restype = None
#alListeneriv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:465
alGetListenerf = _lib.alGetListenerf
alGetListenerf.restype = None
alGetListenerf.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:467
alGetListener3f = _lib.alGetListener3f
alGetListener3f.restype = None
alGetListener3f.argtypes = [ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:469
alGetListenerfv = _lib.alGetListenerfv
alGetListenerfv.restype = None
alGetListenerfv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:471
alGetListeneri = _lib.alGetListeneri
alGetListeneri.restype = None
alGetListeneri.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:473
alGetListener3i = _lib.alGetListener3i
alGetListener3i.restype = None
alGetListener3i.argtypes = [ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:475
alGetListeneriv = _lib.alGetListeneriv
alGetListeneriv.restype = None
alGetListeneriv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:512
alGenSources = _lib.alGenSources
alGenSources.restype = None
alGenSources.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:515
alDeleteSources = _lib.alDeleteSources
alDeleteSources.restype = None
alDeleteSources.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:518
alIsSource = _lib.alIsSource
alIsSource.restype = ALboolean
alIsSource.argtypes = [ALuint]
# /usr/include/AL/al.h:523
alSourcef = _lib.alSourcef
alSourcef.restype = None
alSourcef.argtypes = [ALuint, ALenum, ALfloat]
# /usr/include/AL/al.h:525
alSource3f = _lib.alSource3f
alSource3f.restype = None
alSource3f.argtypes = [ALuint, ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:527
alSourcefv = _lib.alSourcefv
alSourcefv.restype = None
alSourcefv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:529
alSourcei = _lib.alSourcei
alSourcei.restype = None
alSourcei.argtypes = [ALuint, ALenum, ALint]
# /usr/include/AL/al.h:531
#alSource3i = _lib.alSource3i
#alSource3i.restype = None
#alSource3i.argtypes = [ALuint, ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:533
#alSourceiv = _lib.alSourceiv
#alSourceiv.restype = None
#alSourceiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:538
alGetSourcef = _lib.alGetSourcef
alGetSourcef.restype = None
alGetSourcef.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:540
alGetSource3f = _lib.alGetSource3f
alGetSource3f.restype = None
alGetSource3f.argtypes = [ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:542
alGetSourcefv = _lib.alGetSourcefv
alGetSourcefv.restype = None
alGetSourcefv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:544
alGetSourcei = _lib.alGetSourcei
alGetSourcei.restype = None
alGetSourcei.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:546
#alGetSource3i = _lib.alGetSource3i
#alGetSource3i.restype = None
#alGetSource3i.argtypes = [ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:548
alGetSourceiv = _lib.alGetSourceiv
alGetSourceiv.restype = None
alGetSourceiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:556
alSourcePlayv = _lib.alSourcePlayv
alSourcePlayv.restype = None
alSourcePlayv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:559
alSourceStopv = _lib.alSourceStopv
alSourceStopv.restype = None
alSourceStopv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:562
alSourceRewindv = _lib.alSourceRewindv
alSourceRewindv.restype = None
alSourceRewindv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:565
alSourcePausev = _lib.alSourcePausev
alSourcePausev.restype = None
alSourcePausev.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:572
alSourcePlay = _lib.alSourcePlay
alSourcePlay.restype = None
alSourcePlay.argtypes = [ALuint]
# /usr/include/AL/al.h:575
alSourceStop = _lib.alSourceStop
alSourceStop.restype = None
alSourceStop.argtypes = [ALuint]
# /usr/include/AL/al.h:578
alSourceRewind = _lib.alSourceRewind
alSourceRewind.restype = None
alSourceRewind.argtypes = [ALuint]
# /usr/include/AL/al.h:581
alSourcePause = _lib.alSourcePause
alSourcePause.restype = None
alSourcePause.argtypes = [ALuint]
# /usr/include/AL/al.h:586
alSourceQueueBuffers = _lib.alSourceQueueBuffers
alSourceQueueBuffers.restype = None
alSourceQueueBuffers.argtypes = [ALuint, ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:588
alSourceUnqueueBuffers = _lib.alSourceUnqueueBuffers
alSourceUnqueueBuffers.restype = None
alSourceUnqueueBuffers.argtypes = [ALuint, ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:606
alGenBuffers = _lib.alGenBuffers
alGenBuffers.restype = None
alGenBuffers.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:609
alDeleteBuffers = _lib.alDeleteBuffers
alDeleteBuffers.restype = None
alDeleteBuffers.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:612
alIsBuffer = _lib.alIsBuffer
alIsBuffer.restype = ALboolean
alIsBuffer.argtypes = [ALuint]
# /usr/include/AL/al.h:615
alBufferData = _lib.alBufferData
alBufferData.restype = None
alBufferData.argtypes = [ALuint, ALenum, POINTER(ALvoid), ALsizei, ALsizei]
# /usr/include/AL/al.h:620
alBufferf = _lib.alBufferf
alBufferf.restype = None
alBufferf.argtypes = [ALuint, ALenum, ALfloat]
# /usr/include/AL/al.h:622
alBuffer3f = _lib.alBuffer3f
alBuffer3f.restype = None
alBuffer3f.argtypes = [ALuint, ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:624
alBufferfv = _lib.alBufferfv
alBufferfv.restype = None
alBufferfv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:626
alBufferi = _lib.alBufferi
alBufferi.restype = None
alBufferi.argtypes = [ALuint, ALenum, ALint]
# /usr/include/AL/al.h:628
alBuffer3i = _lib.alBuffer3i
alBuffer3i.restype = None
alBuffer3i.argtypes = [ALuint, ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:630
alBufferiv = _lib.alBufferiv
alBufferiv.restype = None
alBufferiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:635
alGetBufferf = _lib.alGetBufferf
alGetBufferf.restype = None
alGetBufferf.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:637
alGetBuffer3f = _lib.alGetBuffer3f
alGetBuffer3f.restype = None
alGetBuffer3f.argtypes = [ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:639
alGetBufferfv = _lib.alGetBufferfv
alGetBufferfv.restype = None
alGetBufferfv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:641
alGetBufferi = _lib.alGetBufferi
alGetBufferi.restype = None
alGetBufferi.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:643
alGetBuffer3i = _lib.alGetBuffer3i
alGetBuffer3i.restype = None
alGetBuffer3i.argtypes = [ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:645
alGetBufferiv = _lib.alGetBufferiv
alGetBufferiv.restype = None
alGetBufferiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:651
alDopplerFactor = _lib.alDopplerFactor
alDopplerFactor.restype = None
alDopplerFactor.argtypes = [ALfloat]
# /usr/include/AL/al.h:653
alDopplerVelocity = _lib.alDopplerVelocity
alDopplerVelocity.restype = None
alDopplerVelocity.argtypes = [ALfloat]
# /usr/include/AL/al.h:655
alSpeedOfSound = _lib.alSpeedOfSound
alSpeedOfSound.restype = None
alSpeedOfSound.argtypes = [ALfloat]
# /usr/include/AL/al.h:657
alDistanceModel = _lib.alDistanceModel
alDistanceModel.restype = None
alDistanceModel.argtypes = [ALenum]
LPALENABLE = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:662
LPALDISABLE = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:663
LPALISENABLED = CFUNCTYPE(ALboolean, ALenum) # /usr/include/AL/al.h:664
LPALGETSTRING = CFUNCTYPE(POINTER(ALchar), ALenum) # /usr/include/AL/al.h:665
LPALGETBOOLEANV = CFUNCTYPE(None, ALenum, POINTER(ALboolean)) # /usr/include/AL/al.h:666
LPALGETINTEGERV = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:667
LPALGETFLOATV = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:668
LPALGETDOUBLEV = CFUNCTYPE(None, ALenum, POINTER(ALdouble)) # /usr/include/AL/al.h:669
LPALGETBOOLEAN = CFUNCTYPE(ALboolean, ALenum) # /usr/include/AL/al.h:670
LPALGETINTEGER = CFUNCTYPE(ALint, ALenum) # /usr/include/AL/al.h:671
LPALGETFLOAT = CFUNCTYPE(ALfloat, ALenum) # /usr/include/AL/al.h:672
LPALGETDOUBLE = CFUNCTYPE(ALdouble, ALenum) # /usr/include/AL/al.h:673
LPALGETERROR = CFUNCTYPE(ALenum) # /usr/include/AL/al.h:674
LPALISEXTENSIONPRESENT = CFUNCTYPE(ALboolean, POINTER(ALchar)) # /usr/include/AL/al.h:675
LPALGETPROCADDRESS = CFUNCTYPE(POINTER(c_void), POINTER(ALchar)) # /usr/include/AL/al.h:676
LPALGETENUMVALUE = CFUNCTYPE(ALenum, POINTER(ALchar)) # /usr/include/AL/al.h:677
LPALLISTENERF = CFUNCTYPE(None, ALenum, ALfloat) # /usr/include/AL/al.h:678
LPALLISTENER3F = CFUNCTYPE(None, ALenum, ALfloat, ALfloat, ALfloat) # /usr/include/AL/al.h:679
LPALLISTENERFV = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:680
LPALLISTENERI = CFUNCTYPE(None, ALenum, ALint) # /usr/include/AL/al.h:681
LPALLISTENER3I = CFUNCTYPE(None, ALenum, ALint, ALint, ALint) # /usr/include/AL/al.h:682
LPALLISTENERIV = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:683
LPALGETLISTENERF = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:684
LPALGETLISTENER3F = CFUNCTYPE(None, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:685
LPALGETLISTENERFV = CFUNCTYPE(None, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:686
LPALGETLISTENERI = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:687
LPALGETLISTENER3I = CFUNCTYPE(None, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:688
LPALGETLISTENERIV = CFUNCTYPE(None, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:689
LPALGENSOURCES = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:690
LPALDELETESOURCES = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:691
LPALISSOURCE = CFUNCTYPE(ALboolean, ALuint) # /usr/include/AL/al.h:692
LPALSOURCEF = CFUNCTYPE(None, ALuint, ALenum, ALfloat) # /usr/include/AL/al.h:693
LPALSOURCE3F = CFUNCTYPE(None, ALuint, ALenum, ALfloat, ALfloat, ALfloat) # /usr/include/AL/al.h:694
LPALSOURCEFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:695
LPALSOURCEI = CFUNCTYPE(None, ALuint, ALenum, ALint) # /usr/include/AL/al.h:696
LPALSOURCE3I = CFUNCTYPE(None, ALuint, ALenum, ALint, ALint, ALint) # /usr/include/AL/al.h:697
LPALSOURCEIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:698
LPALGETSOURCEF = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:699
LPALGETSOURCE3F = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:700
LPALGETSOURCEFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:701
LPALGETSOURCEI = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:702
LPALGETSOURCE3I = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:703
LPALGETSOURCEIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:704
LPALSOURCEPLAYV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:705
LPALSOURCESTOPV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:706
LPALSOURCEREWINDV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:707
LPALSOURCEPAUSEV = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:708
LPALSOURCEPLAY = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:709
LPALSOURCESTOP = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:710
LPALSOURCEREWIND = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:711
LPALSOURCEPAUSE = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:712
LPALSOURCEQUEUEBUFFERS = CFUNCTYPE(None, ALuint, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:713
LPALSOURCEUNQUEUEBUFFERS = CFUNCTYPE(None, ALuint, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:714
LPALGENBUFFERS = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:715
LPALDELETEBUFFERS = CFUNCTYPE(None, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:716
LPALISBUFFER = CFUNCTYPE(ALboolean, ALuint) # /usr/include/AL/al.h:717
LPALBUFFERDATA = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALvoid), ALsizei, ALsizei) # /usr/include/AL/al.h:718
LPALBUFFERF = CFUNCTYPE(None, ALuint, ALenum, ALfloat) # /usr/include/AL/al.h:719
LPALBUFFER3F = CFUNCTYPE(None, ALuint, ALenum, ALfloat, ALfloat, ALfloat) # /usr/include/AL/al.h:720
LPALBUFFERFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:721
LPALBUFFERI = CFUNCTYPE(None, ALuint, ALenum, ALint) # /usr/include/AL/al.h:722
LPALBUFFER3I = CFUNCTYPE(None, ALuint, ALenum, ALint, ALint, ALint) # /usr/include/AL/al.h:723
LPALBUFFERIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:724
LPALGETBUFFERF = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:725
LPALGETBUFFER3F = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:726
LPALGETBUFFERFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat)) # /usr/include/AL/al.h:727
LPALGETBUFFERI = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:728
LPALGETBUFFER3I = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:729
LPALGETBUFFERIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint)) # /usr/include/AL/al.h:730
LPALDOPPLERFACTOR = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:731
LPALDOPPLERVELOCITY = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:732
LPALSPEEDOFSOUND = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:733
LPALDISTANCEMODEL = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:734
__all__ = ['AL_API', 'ALAPI', 'AL_INVALID', 'AL_ILLEGAL_ENUM',
'AL_ILLEGAL_COMMAND', 'ALboolean', 'ALchar', 'ALbyte', 'ALubyte', 'ALshort',
'ALushort', 'ALint', 'ALuint', 'ALsizei', 'ALenum', 'ALfloat', 'ALdouble',
'ALvoid', 'AL_NONE', 'AL_FALSE', 'AL_TRUE', 'AL_SOURCE_RELATIVE',
'AL_CONE_INNER_ANGLE', 'AL_CONE_OUTER_ANGLE', 'AL_PITCH', 'AL_POSITION',
'AL_DIRECTION', 'AL_VELOCITY', 'AL_LOOPING', 'AL_BUFFER', 'AL_GAIN',
'AL_MIN_GAIN', 'AL_MAX_GAIN', 'AL_ORIENTATION', 'AL_SOURCE_STATE',
'AL_INITIAL', 'AL_PLAYING', 'AL_PAUSED', 'AL_STOPPED', 'AL_BUFFERS_QUEUED',
'AL_BUFFERS_PROCESSED', 'AL_SEC_OFFSET', 'AL_SAMPLE_OFFSET', 'AL_BYTE_OFFSET',
'AL_SOURCE_TYPE', 'AL_STATIC', 'AL_STREAMING', 'AL_UNDETERMINED',
'AL_FORMAT_MONO8', 'AL_FORMAT_MONO16', 'AL_FORMAT_STEREO8',
'AL_FORMAT_STEREO16', 'AL_REFERENCE_DISTANCE', 'AL_ROLLOFF_FACTOR',
'AL_CONE_OUTER_GAIN', 'AL_MAX_DISTANCE', 'AL_FREQUENCY', 'AL_BITS',
'AL_CHANNELS', 'AL_SIZE', 'AL_UNUSED', 'AL_PENDING', 'AL_PROCESSED',
'AL_NO_ERROR', 'AL_INVALID_NAME', 'AL_INVALID_ENUM', 'AL_INVALID_VALUE',
'AL_INVALID_OPERATION', 'AL_OUT_OF_MEMORY', 'AL_VENDOR', 'AL_VERSION',
'AL_RENDERER', 'AL_EXTENSIONS', 'AL_DOPPLER_FACTOR', 'AL_DOPPLER_VELOCITY',
'AL_SPEED_OF_SOUND', 'AL_DISTANCE_MODEL', 'AL_INVERSE_DISTANCE',
'AL_INVERSE_DISTANCE_CLAMPED', 'AL_LINEAR_DISTANCE',
'AL_LINEAR_DISTANCE_CLAMPED', 'AL_EXPONENT_DISTANCE',
'AL_EXPONENT_DISTANCE_CLAMPED', 'alEnable', 'alDisable', 'alIsEnabled',
'alGetString', 'alGetBooleanv', 'alGetIntegerv', 'alGetFloatv',
'alGetDoublev', 'alGetBoolean', 'alGetInteger', 'alGetFloat', 'alGetDouble',
'alGetError', 'alIsExtensionPresent', 'alGetProcAddress', 'alGetEnumValue',
'alListenerf', 'alListener3f', 'alListenerfv', 'alListeneri', 'alListener3i',
'alListeneriv', 'alGetListenerf', 'alGetListener3f', 'alGetListenerfv',
'alGetListeneri', 'alGetListener3i', 'alGetListeneriv', 'alGenSources',
'alDeleteSources', 'alIsSource', 'alSourcef', 'alSource3f', 'alSourcefv',
'alSourcei', 'alSource3i', 'alSourceiv', 'alGetSourcef', 'alGetSource3f',
'alGetSourcefv', 'alGetSourcei', 'alGetSource3i', 'alGetSourceiv',
'alSourcePlayv', 'alSourceStopv', 'alSourceRewindv', 'alSourcePausev',
'alSourcePlay', 'alSourceStop', 'alSourceRewind', 'alSourcePause',
'alSourceQueueBuffers', 'alSourceUnqueueBuffers', 'alGenBuffers',
'alDeleteBuffers', 'alIsBuffer', 'alBufferData', 'alBufferf', 'alBuffer3f',
'alBufferfv', 'alBufferi', 'alBuffer3i', 'alBufferiv', 'alGetBufferf',
'alGetBuffer3f', 'alGetBufferfv', 'alGetBufferi', 'alGetBuffer3i',
'alGetBufferiv', 'alDopplerFactor', 'alDopplerVelocity', 'alSpeedOfSound',
'alDistanceModel', 'LPALENABLE', 'LPALDISABLE', 'LPALISENABLED',
'LPALGETSTRING', 'LPALGETBOOLEANV', 'LPALGETINTEGERV', 'LPALGETFLOATV',
'LPALGETDOUBLEV', 'LPALGETBOOLEAN', 'LPALGETINTEGER', 'LPALGETFLOAT',
'LPALGETDOUBLE', 'LPALGETERROR', 'LPALISEXTENSIONPRESENT',
'LPALGETPROCADDRESS', 'LPALGETENUMVALUE', 'LPALLISTENERF', 'LPALLISTENER3F',
'LPALLISTENERFV', 'LPALLISTENERI', 'LPALLISTENER3I', 'LPALLISTENERIV',
'LPALGETLISTENERF', 'LPALGETLISTENER3F', 'LPALGETLISTENERFV',
'LPALGETLISTENERI', 'LPALGETLISTENER3I', 'LPALGETLISTENERIV',
'LPALGENSOURCES', 'LPALDELETESOURCES', 'LPALISSOURCE', 'LPALSOURCEF',
'LPALSOURCE3F', 'LPALSOURCEFV', 'LPALSOURCEI', 'LPALSOURCE3I', 'LPALSOURCEIV',
'LPALGETSOURCEF', 'LPALGETSOURCE3F', 'LPALGETSOURCEFV', 'LPALGETSOURCEI',
'LPALGETSOURCE3I', 'LPALGETSOURCEIV', 'LPALSOURCEPLAYV', 'LPALSOURCESTOPV',
'LPALSOURCEREWINDV', 'LPALSOURCEPAUSEV', 'LPALSOURCEPLAY', 'LPALSOURCESTOP',
'LPALSOURCEREWIND', 'LPALSOURCEPAUSE', 'LPALSOURCEQUEUEBUFFERS',
'LPALSOURCEUNQUEUEBUFFERS', 'LPALGENBUFFERS', 'LPALDELETEBUFFERS',
'LPALISBUFFER', 'LPALBUFFERDATA', 'LPALBUFFERF', 'LPALBUFFER3F',
'LPALBUFFERFV', 'LPALBUFFERI', 'LPALBUFFER3I', 'LPALBUFFERIV',
'LPALGETBUFFERF', 'LPALGETBUFFER3F', 'LPALGETBUFFERFV', 'LPALGETBUFFERI',
'LPALGETBUFFER3I', 'LPALGETBUFFERIV', 'LPALDOPPLERFACTOR',
'LPALDOPPLERVELOCITY', 'LPALSPEEDOFSOUND', 'LPALDISTANCEMODEL']
| bsd-3-clause |
sirex/Misago | misago/threads/views/generic/threads/actions.py | 8 | 1141 | from django.contrib import messages
from django.core.paginator import Paginator
from django.shortcuts import redirect
from django.utils.translation import ungettext, ugettext_lazy, ugettext as _
from misago.threads import moderation
from misago.threads.views.generic.actions import ActionsBase, ReloadAfterDelete
__all__ = ['Actions', 'ReloadAfterDelete']
class Actions(ActionsBase):
select_items_message = ugettext_lazy(
"You have to select at least one thread.")
is_mass_action = True
def redirect_after_deletion(self, request, queryset):
paginator = Paginator(queryset, 20, 10)
current_page = int(request.resolver_match.kwargs.get('page', 0))
if paginator.num_pages < current_page:
namespace = request.resolver_match.namespace
url_name = request.resolver_match.url_name
kwars = request.resolver_match.kwargs
kwars['page'] = paginator.num_pages
if kwars['page'] == 1:
del kwars['page']
return redirect('%s:%s' % (namespace, url_name), **kwars)
else:
return redirect(request.path)
| gpl-2.0 |
TheBoegl/letsencrypt | letsencrypt-apache/letsencrypt_apache/augeas_configurator.py | 6 | 7387 | """Class of Augeas Configurators."""
import logging
import augeas
from letsencrypt import errors
from letsencrypt import reverter
from letsencrypt.plugins import common
from letsencrypt_apache import constants
logger = logging.getLogger(__name__)
class AugeasConfigurator(common.Plugin):
"""Base Augeas Configurator class.
:ivar config: Configuration.
:type config: :class:`~letsencrypt.interfaces.IConfig`
:ivar aug: Augeas object
:type aug: :class:`augeas.Augeas`
:ivar str save_notes: Human-readable configuration change notes
:ivar reverter: saves and reverts checkpoints
:type reverter: :class:`letsencrypt.reverter.Reverter`
"""
def __init__(self, *args, **kwargs):
super(AugeasConfigurator, self).__init__(*args, **kwargs)
self.aug = augeas.Augeas(
# specify a directory to load our preferred lens from
loadpath=constants.AUGEAS_LENS_DIR,
# Do not save backup (we do it ourselves), do not load
# anything by default
flags=(augeas.Augeas.NONE | augeas.Augeas.NO_MODL_AUTOLOAD))
self.save_notes = ""
# See if any temporary changes need to be recovered
# This needs to occur before VirtualHost objects are setup...
# because this will change the underlying configuration and potential
# vhosts
self.reverter = reverter.Reverter(self.config)
self.recovery_routine()
def check_parsing_errors(self, lens):
"""Verify Augeas can parse all of the lens files.
:param str lens: lens to check for errors
:raises .errors.PluginError: If there has been an error in parsing with
the specified lens.
"""
error_files = self.aug.match("/augeas//error")
for path in error_files:
# Check to see if it was an error resulting from the use of
# the httpd lens
lens_path = self.aug.get(path + "/lens")
# As aug.get may return null
if lens_path and lens in lens_path:
msg = (
"There has been an error in parsing the file (%s): %s",
# Strip off /augeas/files and /error
path[13:len(path) - 6], self.aug.get(path + "/message"))
raise errors.PluginError(msg)
# TODO: Cleanup this function
def save(self, title=None, temporary=False):
"""Saves all changes to the configuration files.
This function first checks for save errors, if none are found,
all configuration changes made will be saved. According to the
function parameters. If an exception is raised, a new checkpoint
was not created.
:param str title: The title of the save. If a title is given, the
configuration will be saved as a new checkpoint and put in a
timestamped directory.
:param bool temporary: Indicates whether the changes made will
be quickly reversed in the future (ie. challenges)
:raises .errors.PluginError: If there was an error in Augeas, in
an attempt to save the configuration, or an error creating a
checkpoint
"""
save_state = self.aug.get("/augeas/save")
self.aug.set("/augeas/save", "noop")
# Existing Errors
ex_errs = self.aug.match("/augeas//error")
try:
# This is a noop save
self.aug.save()
except (RuntimeError, IOError):
self._log_save_errors(ex_errs)
# Erase Save Notes
self.save_notes = ""
raise errors.PluginError(
"Error saving files, check logs for more info.")
# Retrieve list of modified files
# Note: Noop saves can cause the file to be listed twice, I used a
# set to remove this possibility. This is a known augeas 0.10 error.
save_paths = self.aug.match("/augeas/events/saved")
# If the augeas tree didn't change, no files were saved and a backup
# should not be created
if save_paths:
save_files = set()
for path in save_paths:
save_files.add(self.aug.get(path)[6:])
try:
# Create Checkpoint
if temporary:
self.reverter.add_to_temp_checkpoint(
save_files, self.save_notes)
else:
self.reverter.add_to_checkpoint(save_files,
self.save_notes)
except errors.ReverterError as err:
raise errors.PluginError(str(err))
self.aug.set("/augeas/save", save_state)
self.save_notes = ""
self.aug.save()
if title and not temporary:
try:
self.reverter.finalize_checkpoint(title)
except errors.ReverterError as err:
raise errors.PluginError(str(err))
def _log_save_errors(self, ex_errs):
"""Log errors due to bad Augeas save.
:param list ex_errs: Existing errors before save
"""
# Check for the root of save problems
new_errs = self.aug.match("/augeas//error")
# logger.error("During Save - %s", mod_conf)
logger.error("Unable to save files: %s. Attempted Save Notes: %s",
", ".join(err[13:len(err) - 6] for err in new_errs
# Only new errors caused by recent save
if err not in ex_errs), self.save_notes)
# Wrapper functions for Reverter class
def recovery_routine(self):
"""Revert all previously modified files.
Reverts all modified files that have not been saved as a checkpoint
:raises .errors.PluginError: If unable to recover the configuration
"""
try:
self.reverter.recovery_routine()
except errors.ReverterError as err:
raise errors.PluginError(str(err))
# Need to reload configuration after these changes take effect
self.aug.load()
def revert_challenge_config(self):
"""Used to cleanup challenge configurations.
:raises .errors.PluginError: If unable to revert the challenge config.
"""
try:
self.reverter.revert_temporary_config()
except errors.ReverterError as err:
raise errors.PluginError(str(err))
self.aug.load()
def rollback_checkpoints(self, rollback=1):
"""Rollback saved checkpoints.
:param int rollback: Number of checkpoints to revert
:raises .errors.PluginError: If there is a problem with the input or
the function is unable to correctly revert the configuration
"""
try:
self.reverter.rollback_checkpoints(rollback)
except errors.ReverterError as err:
raise errors.PluginError(str(err))
self.aug.load()
def view_config_changes(self):
"""Show all of the configuration changes that have taken place.
:raises .errors.PluginError: If there is a problem while processing
the checkpoints directories.
"""
try:
self.reverter.view_config_changes()
except errors.ReverterError as err:
raise errors.PluginError(str(err))
| apache-2.0 |
cedricporter/everlost | frameworks/cocos2d-x/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/enumerations.py | 307 | 1077 | #===- enumerations.py - Python Enumerations ------------------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
"""
Clang Enumerations
==================
This module provides static definitions of enumerations that exist in libclang.
Enumerations are typically defined as a list of tuples. The exported values are
typically munged into other types or classes at module load time.
All enumerations are centrally defined in this file so they are all grouped
together and easier to audit. And, maybe even one day this file will be
automatically generated by scanning the libclang headers!
"""
# Maps to CXTokenKind. Note that libclang maintains a separate set of token
# enumerations from the C++ API.
TokenKinds = [
('PUNCTUATION', 0),
('KEYWORD', 1),
('IDENTIFIER', 2),
('LITERAL', 3),
('COMMENT', 4),
]
__all__ = ['TokenKinds']
| apache-2.0 |
sebastic/QGIS | python/plugins/processing/algs/qgis/ZonalStatistics.py | 2 | 10813 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ZonalStatistics.py
---------------------
Date : August 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'August 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import numpy
try:
from scipy.stats.mstats import mode
hasSciPy = True
except:
hasSciPy = False
from osgeo import gdal, ogr, osr
from qgis.core import QgsRectangle, QgsGeometry, QgsFeature
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterRaster
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputVector
from processing.tools.raster import mapToPixel
from processing.tools import dataobjects, vector
class ZonalStatistics(GeoAlgorithm):
INPUT_RASTER = 'INPUT_RASTER'
RASTER_BAND = 'RASTER_BAND'
INPUT_VECTOR = 'INPUT_VECTOR'
COLUMN_PREFIX = 'COLUMN_PREFIX'
GLOBAL_EXTENT = 'GLOBAL_EXTENT'
OUTPUT_LAYER = 'OUTPUT_LAYER'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Zonal Statistics')
self.group, self.i18n_group = self.trAlgorithm('Raster tools')
self.addParameter(ParameterRaster(self.INPUT_RASTER,
self.tr('Raster layer')))
self.addParameter(ParameterNumber(self.RASTER_BAND,
self.tr('Raster band'), 1, 999, 1))
self.addParameter(ParameterVector(self.INPUT_VECTOR,
self.tr('Vector layer containing zones'),
[ParameterVector.VECTOR_TYPE_POLYGON]))
self.addParameter(ParameterString(self.COLUMN_PREFIX,
self.tr('Output column prefix'), '_'))
self.addParameter(ParameterBoolean(self.GLOBAL_EXTENT,
self.tr('Load whole raster in memory')))
self.addOutput(OutputVector(self.OUTPUT_LAYER, self.tr('Zonal statistics')))
def processAlgorithm(self, progress):
""" Based on code by Matthew Perry
https://gist.github.com/perrygeo/5667173
"""
layer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT_VECTOR))
rasterPath = unicode(self.getParameterValue(self.INPUT_RASTER))
bandNumber = self.getParameterValue(self.RASTER_BAND)
columnPrefix = self.getParameterValue(self.COLUMN_PREFIX)
useGlobalExtent = self.getParameterValue(self.GLOBAL_EXTENT)
rasterDS = gdal.Open(rasterPath, gdal.GA_ReadOnly)
geoTransform = rasterDS.GetGeoTransform()
rasterBand = rasterDS.GetRasterBand(bandNumber)
noData = rasterBand.GetNoDataValue()
cellXSize = abs(geoTransform[1])
cellYSize = abs(geoTransform[5])
rasterXSize = rasterDS.RasterXSize
rasterYSize = rasterDS.RasterYSize
rasterBBox = QgsRectangle(geoTransform[0], geoTransform[3] - cellYSize
* rasterYSize, geoTransform[0] + cellXSize
* rasterXSize, geoTransform[3])
rasterGeom = QgsGeometry.fromRect(rasterBBox)
crs = osr.SpatialReference()
crs.ImportFromProj4(str(layer.crs().toProj4()))
if useGlobalExtent:
xMin = rasterBBox.xMinimum()
xMax = rasterBBox.xMaximum()
yMin = rasterBBox.yMinimum()
yMax = rasterBBox.yMaximum()
(startColumn, startRow) = mapToPixel(xMin, yMax, geoTransform)
(endColumn, endRow) = mapToPixel(xMax, yMin, geoTransform)
width = endColumn - startColumn
height = endRow - startRow
srcOffset = (startColumn, startRow, width, height)
srcArray = rasterBand.ReadAsArray(*srcOffset)
srcArray = srcArray * rasterBand.GetScale() + rasterBand.GetOffset()
newGeoTransform = (
geoTransform[0] + srcOffset[0] * geoTransform[1],
geoTransform[1],
0.0,
geoTransform[3] + srcOffset[1] * geoTransform[5],
0.0,
geoTransform[5],
)
memVectorDriver = ogr.GetDriverByName('Memory')
memRasterDriver = gdal.GetDriverByName('MEM')
fields = layer.pendingFields()
(idxMin, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'min', 21, 6)
(idxMax, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'max', 21, 6)
(idxSum, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'sum', 21, 6)
(idxCount, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'count', 21, 6)
(idxMean, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'mean', 21, 6)
(idxStd, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'std', 21, 6)
(idxUnique, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'unique', 21, 6)
(idxRange, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'range', 21, 6)
(idxVar, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'var', 21, 6)
(idxMedian, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'median', 21, 6)
if hasSciPy:
(idxMode, fields) = vector.findOrCreateField(layer, fields,
columnPrefix + 'mode', 21, 6)
writer = self.getOutputFromName(self.OUTPUT_LAYER).getVectorWriter(
fields.toList(), layer.dataProvider().geometryType(), layer.crs())
outFeat = QgsFeature()
outFeat.initAttributes(len(fields))
outFeat.setFields(fields)
features = vector.features(layer)
total = 100.0 / len(features)
for current, f in enumerate(features):
geom = f.geometry()
intersectedGeom = rasterGeom.intersection(geom)
ogrGeom = ogr.CreateGeometryFromWkt(intersectedGeom.exportToWkt())
if not useGlobalExtent:
bbox = intersectedGeom.boundingBox()
xMin = bbox.xMinimum()
xMax = bbox.xMaximum()
yMin = bbox.yMinimum()
yMax = bbox.yMaximum()
(startColumn, startRow) = mapToPixel(xMin, yMax, geoTransform)
(endColumn, endRow) = mapToPixel(xMax, yMin, geoTransform)
width = endColumn - startColumn
height = endRow - startRow
if width == 0 or height == 0:
continue
srcOffset = (startColumn, startRow, width, height)
srcArray = rasterBand.ReadAsArray(*srcOffset)
srcArray = srcArray * rasterBand.GetScale() + rasterBand.GetOffset()
newGeoTransform = (
geoTransform[0] + srcOffset[0] * geoTransform[1],
geoTransform[1],
0.0,
geoTransform[3] + srcOffset[1] * geoTransform[5],
0.0,
geoTransform[5],
)
# Create a temporary vector layer in memory
memVDS = memVectorDriver.CreateDataSource('out')
memLayer = memVDS.CreateLayer('poly', crs, ogr.wkbPolygon)
ft = ogr.Feature(memLayer.GetLayerDefn())
ft.SetGeometry(ogrGeom)
memLayer.CreateFeature(ft)
ft.Destroy()
# Rasterize it
rasterizedDS = memRasterDriver.Create('', srcOffset[2],
srcOffset[3], 1, gdal.GDT_Byte)
rasterizedDS.SetGeoTransform(newGeoTransform)
gdal.RasterizeLayer(rasterizedDS, [1], memLayer, burn_values=[1])
rasterizedArray = rasterizedDS.ReadAsArray()
srcArray = numpy.nan_to_num(srcArray)
masked = numpy.ma.MaskedArray(srcArray,
mask=numpy.logical_or(srcArray == noData,
numpy.logical_not(rasterizedArray)))
outFeat.setGeometry(geom)
attrs = f.attributes()
attrs.insert(idxMin, float(masked.min()))
attrs.insert(idxMax, float(masked.max()))
attrs.insert(idxSum, float(masked.sum()))
attrs.insert(idxCount, int(masked.count()))
attrs.insert(idxMean, float(masked.mean()))
attrs.insert(idxStd, float(masked.std()))
attrs.insert(idxUnique, numpy.unique(masked.compressed()).size)
attrs.insert(idxRange, float(masked.max()) - float(masked.min()))
attrs.insert(idxVar, float(masked.var()))
attrs.insert(idxMedian, float(numpy.ma.median(masked)))
if hasSciPy:
attrs.insert(idxMode, float(mode(masked, axis=None)[0][0]))
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
memVDS = None
rasterizedDS = None
progress.setPercentage(int(current * total))
rasterDS = None
del writer
| gpl-2.0 |
cognitiveclass/edx-platform | common/djangoapps/config_models/models.py | 38 | 7489 | """
Django Model baseclass for database-backed configuration.
"""
from django.db import connection, models
from django.contrib.auth.models import User
from django.core.cache import caches, InvalidCacheBackendError
from django.utils.translation import ugettext_lazy as _
try:
cache = caches['configuration'] # pylint: disable=invalid-name
except InvalidCacheBackendError:
from django.core.cache import cache
class ConfigurationModelManager(models.Manager):
"""
Query manager for ConfigurationModel
"""
def _current_ids_subquery(self):
"""
Internal helper method to return an SQL string that will get the IDs of
all the current entries (i.e. the most recent entry for each unique set
of key values). Only useful if KEY_FIELDS is set.
"""
key_fields_escaped = [connection.ops.quote_name(name) for name in self.model.KEY_FIELDS]
# The following assumes that the rows with the most recent date also have the highest IDs
return "SELECT MAX(id) FROM {table_name} GROUP BY {key_fields}".format(
key_fields=', '.join(key_fields_escaped),
table_name=self.model._meta.db_table # pylint: disable=protected-access
)
def current_set(self):
"""
A queryset for the active configuration entries only. Only useful if KEY_FIELDS is set.
Active means the means recent entries for each unique combination of keys. It does not
necessaryily mean enbled.
"""
assert self.model.KEY_FIELDS != (), "Just use model.current() if there are no KEY_FIELDS"
return self.get_queryset().extra( # pylint: disable=no-member
where=["id IN ({subquery})".format(subquery=self._current_ids_subquery())],
select={'is_active': 1}, # This annotation is used by the admin changelist. sqlite requires '1', not 'True'
)
def with_active_flag(self):
"""
A query set where each result is annotated with an 'is_active' field that indicates
if it's the most recent entry for that combination of keys.
"""
if self.model.KEY_FIELDS:
subquery = self._current_ids_subquery()
return self.get_queryset().extra( # pylint: disable=no-member
select={'is_active': "id IN ({subquery})".format(subquery=subquery)}
)
else:
return self.get_queryset().extra( # pylint: disable=no-member
select={'is_active': "id = {pk}".format(pk=self.model.current().pk)}
)
class ConfigurationModel(models.Model):
"""
Abstract base class for model-based configuration
Properties:
cache_timeout (int): The number of seconds that this configuration
should be cached
"""
class Meta(object):
abstract = True
ordering = ("-change_date", )
objects = ConfigurationModelManager()
KEY_FIELDS = ()
# The number of seconds
cache_timeout = 600
change_date = models.DateTimeField(auto_now_add=True, verbose_name=_("Change date"))
changed_by = models.ForeignKey(
User,
editable=False,
null=True,
on_delete=models.PROTECT,
# Translators: this label indicates the name of the user who made this change:
verbose_name=_("Changed by"),
)
enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
def save(self, *args, **kwargs):
"""
Clear the cached value when saving a new configuration entry
"""
# Always create a new entry, instead of updating an existing model
self.pk = None # pylint: disable=invalid-name
super(ConfigurationModel, self).save(*args, **kwargs)
cache.delete(self.cache_key_name(*[getattr(self, key) for key in self.KEY_FIELDS]))
if self.KEY_FIELDS:
cache.delete(self.key_values_cache_key_name())
@classmethod
def cache_key_name(cls, *args):
"""Return the name of the key to use to cache the current configuration"""
if cls.KEY_FIELDS != ():
if len(args) != len(cls.KEY_FIELDS):
raise TypeError(
"cache_key_name() takes exactly {} arguments ({} given)".format(len(cls.KEY_FIELDS), len(args))
)
return u'configuration/{}/current/{}'.format(cls.__name__, u','.join(unicode(arg) for arg in args))
else:
return 'configuration/{}/current'.format(cls.__name__)
@classmethod
def current(cls, *args):
"""
Return the active configuration entry, either from cache,
from the database, or by creating a new empty entry (which is not
persisted).
"""
cached = cache.get(cls.cache_key_name(*args))
if cached is not None:
return cached
key_dict = dict(zip(cls.KEY_FIELDS, args))
try:
current = cls.objects.filter(**key_dict).order_by('-change_date')[0]
except IndexError:
current = cls(**key_dict)
cache.set(cls.cache_key_name(*args), current, cls.cache_timeout)
return current
@classmethod
def is_enabled(cls):
"""Returns True if this feature is configured as enabled, else False."""
return cls.current().enabled
@classmethod
def key_values_cache_key_name(cls, *key_fields):
""" Key for fetching unique key values from the cache """
key_fields = key_fields or cls.KEY_FIELDS
return 'configuration/{}/key_values/{}'.format(cls.__name__, ','.join(key_fields))
@classmethod
def key_values(cls, *key_fields, **kwargs):
"""
Get the set of unique values in the configuration table for the given
key[s]. Calling cls.current(*value) for each value in the resulting
list should always produce an entry, though any such entry may have
enabled=False.
Arguments:
key_fields: The positional arguments are the KEY_FIELDS to return. For example if
you had a course embargo configuration where each entry was keyed on (country,
course), then you might want to know "What countries have embargoes configured?"
with cls.key_values('country'), or "Which courses have country restrictions?"
with cls.key_values('course'). You can also leave this unspecified for the
default, which returns the distinct combinations of all keys.
flat: If you pass flat=True as a kwarg, it has the same effect as in Django's
'values_list' method: Instead of returning a list of lists, you'll get one list
of values. This makes sense to use whenever there is only one key being queried.
Return value:
List of lists of each combination of keys found in the database.
e.g. [("Italy", "course-v1:SomeX+some+2015"), ...] for the course embargo example
"""
flat = kwargs.pop('flat', False)
assert not kwargs, "'flat' is the only kwarg accepted"
key_fields = key_fields or cls.KEY_FIELDS
cache_key = cls.key_values_cache_key_name(*key_fields)
cached = cache.get(cache_key)
if cached is not None:
return cached
values = list(cls.objects.values_list(*key_fields, flat=flat).order_by().distinct())
cache.set(cache_key, values, cls.cache_timeout)
return values
| agpl-3.0 |
telefonicaid/selenium | py/test/selenium/webdriver/common/window_tests.py | 15 | 1750 | # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pytest
from selenium.webdriver.support.wait import WebDriverWait
class WindowTests(unittest.TestCase):
@pytest.mark.ignore_chrome
@pytest.mark.ignore_opera
@pytest.mark.ignore_ie
def testShouldMaximizeTheWindow(self):
resize_timeout = 5
wait = WebDriverWait(self.driver, resize_timeout)
old_size = self.driver.get_window_size()
self.driver.set_window_size(200, 200)
wait.until(
lambda dr: dr.get_window_size() != old_size if old_size["width"] != 200 and old_size["height"] != 200 \
else True)
size = self.driver.get_window_size()
self.driver.maximize_window()
wait.until(lambda dr: dr.get_window_size() != size)
new_size = self.driver.get_window_size()
assert new_size["width"] > size["width"]
assert new_size["height"] > size["height"]
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| apache-2.0 |
gavin-feng/odoo | addons/contacts/__openerp__.py | 260 | 1594 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Address Book',
'version': '1.0',
'category': 'Tools',
'description': """
This module gives you a quick view of your address book, accessible from your home page.
You can track your suppliers, customers and other contacts.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/crm',
'summary': 'Contacts, People and Companies',
'depends': [
'mail',
],
'data': [
'contacts_view.xml',
],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.