repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
motord/elbowgrease | settings.py | 1 | 2021 | import logging
import tornado
import tornado.template
import os
from tornado.options import define, options
# import environment
# import logconfig
# Make filepaths relative to settings.
path = lambda root,*a: os.path.join(root, *a)
ROOT = os.path.dirname(os.path.abspath(__file__))
define("port", default=8888, help="run on the given port", type=int)
define("config", default=None, help="tornado config file")
define("debug", default=False, help="debug mode")
tornado.options.parse_command_line()
MEDIA_ROOT = path(ROOT, 'media')
TEMPLATE_ROOT = path(ROOT, 'templates')
# Deployment Configuration
class DeploymentType:
PRODUCTION = "PRODUCTION"
DEV = "DEV"
SOLO = "SOLO"
STAGING = "STAGING"
dict = {
SOLO: 1,
PRODUCTION: 2,
DEV: 3,
STAGING: 4
}
if 'DEPLOYMENT_TYPE' in os.environ:
DEPLOYMENT = os.environ['DEPLOYMENT_TYPE'].upper()
else:
DEPLOYMENT = DeploymentType.SOLO
settings = {}
settings['debug'] = DEPLOYMENT != DeploymentType.PRODUCTION or options.debug
# settings['static_path'] = MEDIA_ROOT
settings['cookie_secret'] = "your-cookie-secret"
# settings['xsrf_cookies'] = True
settings['template_loader'] = tornado.template.Loader(TEMPLATE_ROOT)
SYSLOG_TAG = "boilerplate"
SYSLOG_FACILITY = logging.handlers.SysLogHandler.LOG_LOCAL2
# See PEP 391 and logconfig for formatting help. Each section of LOGGERS
# will get merged into the corresponding section of log_settings.py.
# Handlers and log levels are set up automatically based on LOG_LEVEL and DEBUG
# unless you set them here. Messages will not propagate through a logger
# unless propagate: True is set.
LOGGERS = {
'loggers': {
'boilerplate': {},
},
}
if settings['debug']:
LOG_LEVEL = logging.DEBUG
else:
LOG_LEVEL = logging.INFO
USE_SYSLOG = DEPLOYMENT != DeploymentType.SOLO
# logconfig.initialize_logging(SYSLOG_TAG, SYSLOG_FACILITY, LOGGERS,
# LOG_LEVEL, USE_SYSLOG)
if options.config:
tornado.options.parse_config_file(options.config)
| mit | 4,620,776,737,570,405,000 | 27.069444 | 79 | 0.711529 | false |
iosonofabio/haplotree | src/treeviewer.py | 1 | 16230 | # vim: fdm=marker
'''
author: Fabio Zanini
date: 08/12/14
content: Plot tree of haplotypes.
'''
# Modules
from __future__ import print_function
import os
import argparse
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
# Functions
def tree_from_json(json_file):
'''Convert JSON into a Biopython tree'''
from Bio import Phylo
import json
def node_from_json(json_data, node):
'''Biopython Clade from json (for recursive call)'''
for attr in json_data:
val = json_data[attr]
if attr == 'children':
for sub_json in val:
child = Phylo.BaseTree.Clade()
node.clades.append(child)
node_from_json(sub_json, child)
else:
if attr == 'name':
node.__setattr__(attr, str(val))
continue
try:
node.__setattr__(attr, float(val))
except:
node.__setattr__(attr, val)
try:
with open(json_file, 'r') as infile:
json_data = json.load(infile)
except IOError:
raise IOError("Cannot open "+json_file)
tree = Phylo.BaseTree.Tree()
node_from_json(json_data, tree.root)
tree.root.branch_length=0.01
return tree
def draw_tree(tree, label_func=str, do_show=True, show_confidence=True,
# For power users
x_offset=0, y_offset=0,
axes=None, branch_labels=None, *args, **kwargs):
"""Plot the given tree using matplotlib (or pylab).
The graphic is a rooted tree, drawn with roughly the same algorithm as
draw_ascii.
Additional keyword arguments passed into this function are used as pyplot
options. The input format should be in the form of:
pyplot_option_name=(tuple), pyplot_option_name=(tuple, dict), or
pyplot_option_name=(dict).
Example using the pyplot options 'axhspan' and 'axvline':
>>> Phylo.draw(tree, axhspan=((0.25, 7.75), {'facecolor':'0.5'}),
... axvline={'x':'0', 'ymin':'0', 'ymax':'1'})
Visual aspects of the plot can also be modified using pyplot's own functions
and objects (via pylab or matplotlib). In particular, the pyplot.rcParams
object can be used to scale the font size (rcParams["font.size"]) and line
width (rcParams["lines.linewidth"]).
:Parameters:
label_func : callable
A function to extract a label from a node. By default this is str(),
but you can use a different function to select another string
associated with each node. If this function returns None for a node,
no label will be shown for that node.
do_show : bool
Whether to show() the plot automatically.
show_confidence : bool
Whether to display confidence values, if present on the tree.
axes : matplotlib/pylab axes
If a valid matplotlib.axes.Axes instance, the phylogram is plotted
in that Axes. By default (None), a new figure is created.
branch_labels : dict or callable
A mapping of each clade to the label that will be shown along the
branch leading to it. By default this is the confidence value(s) of
the clade, taken from the ``confidence`` attribute, and can be
easily toggled off with this function's ``show_confidence`` option.
But if you would like to alter the formatting of confidence values,
or label the branches with something other than confidence, then use
this option.
"""
try:
import matplotlib.pyplot as plt
except ImportError:
try:
import pylab as plt
except ImportError:
from Bio import MissingPythonDependencyError
raise MissingPythonDependencyError(
"Install matplotlib or pylab if you want to use draw.")
import matplotlib.collections as mpcollections
# Arrays that store lines for the plot of clades
horizontal_linecollections = []
vertical_linecollections = []
# Options for displaying branch labels / confidence
def conf2str(conf):
if int(conf) == conf:
return str(int(conf))
return str(conf)
if not branch_labels:
if show_confidence:
def format_branch_label(clade):
if hasattr(clade, 'confidences'):
# phyloXML supports multiple confidences
return '/'.join(conf2str(cnf.value)
for cnf in clade.confidences)
if clade.confidence:
return conf2str(clade.confidence)
return None
else:
def format_branch_label(clade):
return None
elif isinstance(branch_labels, dict):
def format_branch_label(clade):
return branch_labels.get(clade)
else:
assert callable(branch_labels), \
"branch_labels must be either a dict or a callable (function)"
format_branch_label = branch_labels
# Layout
def get_x_positions(tree):
"""Create a mapping of each clade to its horizontal position.
Dict of {clade: x-coord}
"""
depths = tree.depths()
# If there are no branch lengths, assume unit branch lengths
if not max(depths.values()):
depths = [x_offset + depth
for depth in tree.depths(unit_branch_lengths=True)]
return depths
def get_y_positions(tree):
"""Create a mapping of each clade to its vertical position.
Dict of {clade: y-coord}.
Coordinates are negative, and integers for tips.
"""
maxheight = tree.count_terminals()
# Rows are defined by the tips
heights = dict((tip, maxheight - i + y_offset)
for i, tip in enumerate(reversed(tree.get_terminals())))
# Internal nodes: place at midpoint of children
def calc_row(clade):
for subclade in clade:
if subclade not in heights:
calc_row(subclade)
# Closure over heights
heights[clade] = (heights[clade.clades[0]] +
heights[clade.clades[-1]]) / 2.0
if tree.root.clades:
calc_row(tree.root)
return heights
x_posns = get_x_positions(tree)
y_posns = get_y_positions(tree)
# The function draw_clade closes over the axes object
if axes is None:
fig = plt.figure()
axes = fig.add_subplot(1, 1, 1)
elif not isinstance(axes, plt.matplotlib.axes.Axes):
raise ValueError("Invalid argument for axes: %s" % axes)
def draw_clade_lines(use_linecollection=False, orientation='horizontal',
y_here=0, x_start=0, x_here=0, y_bot=0, y_top=0,
color='black', lw='.1'):
"""Create a line with or without a line collection object.
Graphical formatting of the lines representing clades in the plot can be
customized by altering this function.
"""
if (use_linecollection is False and orientation == 'horizontal'):
axes.hlines(y_here, x_start, x_here, color=color, lw=lw)
elif (use_linecollection is True and orientation == 'horizontal'):
horizontal_linecollections.append(mpcollections.LineCollection(
[[(x_start, y_here), (x_here, y_here)]], color=color, lw=lw),)
elif (use_linecollection is False and orientation == 'vertical'):
axes.vlines(x_here, y_bot, y_top, color=color)
elif (use_linecollection is True and orientation == 'vertical'):
vertical_linecollections.append(mpcollections.LineCollection(
[[(x_here, y_bot), (x_here, y_top)]], color=color, lw=lw),)
def draw_clade(clade, x_start, color, lw):
"""Recursively draw a tree, down from the given clade."""
x_here = x_posns[clade]
y_here = y_posns[clade]
# phyloXML-only graphics annotations
if hasattr(clade, 'color') and clade.color is not None:
color = clade.color.to_hex()
if hasattr(clade, 'width') and clade.width is not None:
lw = clade.width * plt.rcParams['lines.linewidth']
# Draw a horizontal line from start to here
draw_clade_lines(use_linecollection=True, orientation='horizontal',
y_here=y_here, x_start=x_start, x_here=x_here, color=color, lw=lw)
# Add node/taxon labels
label = label_func(clade)
if label not in (None, clade.__class__.__name__):
axes.text(x_here, y_here, ' %s' %
label, verticalalignment='center')
# Add label above the branch (optional)
conf_label = format_branch_label(clade)
if conf_label:
axes.text(0.5 * (x_start + x_here), y_here, conf_label,
fontsize='small', horizontalalignment='center')
if clade.clades:
# Draw a vertical line connecting all children
y_top = y_posns[clade.clades[0]]
y_bot = y_posns[clade.clades[-1]]
# Only apply widths to horizontal lines, like Archaeopteryx
draw_clade_lines(use_linecollection=True, orientation='vertical',
x_here=x_here, y_bot=y_bot, y_top=y_top, color=color, lw=lw)
# Draw descendents
for child in clade:
draw_clade(child, x_here, color, lw)
draw_clade(tree.root, 0, 'k', plt.rcParams['lines.linewidth'])
# If line collections were used to create clade lines, here they are added
# to the pyplot plot.
for i in horizontal_linecollections:
axes.add_collection(i)
for i in vertical_linecollections:
axes.add_collection(i)
# Aesthetics
if hasattr(tree, 'name') and tree.name:
axes.set_title(tree.name)
axes.set_xlabel('branch length')
axes.set_ylabel('taxa')
# Add margins around the tree to prevent overlapping the axes
xmax = max(x_posns.values())
axes.set_xlim(-0.05 * xmax, 1.25 * xmax)
# Also invert the y-axis (origin at the top)
# Add a small vertical margin, but avoid including 0 and N+1 on the y axis
axes.set_ylim(max(y_posns.values()) + 0.8, 0.2)
# Parse and process key word arguments as pyplot options
for key, value in kwargs.items():
try:
# Check that the pyplot option input is iterable, as required
[i for i in value]
except TypeError:
raise ValueError('Keyword argument "%s=%s" is not in the format '
'pyplot_option_name=(tuple), pyplot_option_name=(tuple, dict),'
' or pyplot_option_name=(dict) '
% (key, value))
if isinstance(value, dict):
getattr(plt, str(key))(**dict(value))
elif not (isinstance(value[0], tuple)):
getattr(plt, str(key))(*value)
elif (isinstance(value[0], tuple)):
getattr(plt, str(key))(*value[0], **dict(value[1]))
if do_show:
plt.show()
def load_tree(filename, fmt=None):
'''Load a tree from file'''
from Bio import Phylo
if fmt is None:
fmt = filename.split('.')[-1].lower()
if fmt == 'json':
tree = tree_from_json(filename)
elif fmt == 'newick':
def set_frequency(node):
if node.name is not None:
try:
frequency = float(node.name.split(':')[-1])
except ValueError:
pass
else:
node.frequency = frequency
for child in node.clades:
set_frequency(child)
tree = Phylo.read(filename, 'newick')
set_frequency(tree.root)
else:
raise NotImplemented
return tree
def plot_haplotype_trees(datum,
VERBOSE=0,
tree_label='root',
draw_legend_sizes=True,
draw_scale_bar=True,
fig_filename=None):
'''Plot tree of minor haplotypes in a typical patient'''
from operator import attrgetter
import seaborn as sns
from matplotlib import pyplot as plt
plt.ioff()
if VERBOSE:
print('Plot haplotype tree')
fig, ax = plt.subplots(1, 1, figsize=(7, 5))
sns.set_style('white')
ax.grid(False)
x_offset = 0
y_offset = 35
y_padding = 15
tree = getattr(datum, tree_label)
tree.root.branch_length = 0.01
depths = tree.depths()
maxdepth = max(depths.values())
mindepth = min(depths.values())
# Normalize frequencies
freqsum = sum(leaf.frequency for leaf in tree.get_terminals())
for leaf in tree.get_terminals():
leaf.frequency = 1.0 * leaf.frequency / freqsum
# Collect data for circle plot
rmin = 5
rmax = 150
rfun = lambda hf: rmin + (rmax - rmin) * (hf**(0.5))
data_circles = []
for il, leaf in enumerate(tree.get_terminals(), 1):
hf = leaf.frequency
r = rfun(hf)
y = il + y_offset
x = depths[leaf] + x_offset
data_circles.append((x, y, 2 * r, 'grey', 'black'))
# Draw the tree
draw_tree(tree,
show_confidence=False,
label_func=lambda x: '',
axes=ax,
x_offset=x_offset,
y_offset=y_offset,
do_show=False)
# Add circles to the leaves
(x, y, s, c,cs) = zip(*data_circles)
ax.scatter(x, y, s=s, c=c, edgecolor=cs, zorder=2)
ax.set_xlim(-0.04 * maxdepth, 1.04 * maxdepth)
y_offset += tree.count_terminals() + y_padding
ax.set_ylim((y_offset + y_padding, 0))
ax.set_ylabel('')
ax.set_yticklabels([])
ax.set_axis_off()
ax.xaxis.set_tick_params(labelsize=16)
ax.set_xlabel('Genetic distance [changes / site]',
fontsize=16,
labelpad=10)
# Draw a "legend" for sizes
if draw_legend_sizes:
datal = [{'hf': 0.05, 'label': '5%'},
{'hf': 0.20, 'label': '20%'},
{'hf': 1.00, 'label': '100%'}]
ax.text(0.98 * maxdepth,
0.03 * ax.get_ylim()[0],
'Haplotype frequency:', fontsize=16, ha='right')
for idl, datuml in enumerate(datal):
r = rfun(datuml['hf'])
y = (0.07 + 0.07 * idl) * ax.get_ylim()[0]
ax.scatter(0.85 * maxdepth, y,
s=r,
facecolor='grey',
edgecolor='black')
ax.text(0.98 * maxdepth, y + 0.02 * ax.get_ylim()[0],
datuml['label'], ha='right',
fontsize=14)
# Draw scale bar
if draw_scale_bar:
xbar = (0.01 + 0.3 * (len(datal) >= 9)) * maxdepth
ybar = 0.02 * ax.get_ylim()[0]
lbar = 0.05 * maxdepth
lbar_label = '{:.1G}'.format(lbar)
lbar = float(lbar_label)
ax.plot([xbar, xbar + lbar], [ybar, ybar], lw=4, c='k')
ax.text(xbar + 0.5 * lbar, ybar + 0.08 * ax.get_ylim()[0],
lbar_label, fontsize=14,
ha='center')
plt.tight_layout(rect=(0, -0.32, 1, 1))
if fig_filename:
fig.savefig(fig_filename)
plt.close(fig)
else:
plt.show()
# Script
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Plot a haplotype tree',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('filename',
help='Filename with the tree in JSON format')
parser.add_argument('--verbose', type=int, default=2,
help='Verbosity level [0-4]')
parser.add_argument('--outputfile', default='',
help='Output file for the figure')
args = parser.parse_args()
tree = load_tree(args.filename)
plot_haplotype_trees(tree,
VERBOSE=args.verbose,
fig_filename=args.outputfile)
| mit | -1,316,847,662,179,091,000 | 34.986696 | 92 | 0.567344 | false |
GuLinux/PlanetaryImager | scripting_client/planetaryimager/network/driver_protocol.py | 1 | 3043 | from .protocol import *
import PyQt5
class Camera:
def __init__(self, camera_dict):
self.name = camera_dict['n']
self.address = camera_dict['a']
def __str__(self):
return '{} [{}]'.format(self.name, self.address)
def __repr__(self):
return self.__str__()
@protocol(area='Driver', packets=['CameraList', 'CameraListReply', 'GetCameraName', 'GetCameraNameReply', 'ConnectCamera', 'ConnectCameraReply', \
'CloseCamera', 'signalDisconnected', 'signalCameraConnected', 'signalFPS', 'signalTemperature', 'signalControlChanged', \
'GetControls', 'GetControlsReply', 'GetProperties', 'GetPropertiesReply', 'StartLive', 'StartLiveReply', 'SetControl', \
'SetROI', 'ClearROI'])
class DriverProtocol:
def camera_list(self):
return [Camera(x) for x in self.client.round_trip(self.packet_cameralist.packet(), self.packet_cameralistreply).variant]
def connect_camera(self, camera):
self.client.send(self.packet_connectcamera.packet(variant=camera.address))
def close_camera(self):
self.client.send(self.packet_closecamera.packet())
def get_camera_name(self):
return self.client.round_trip(self.packet_getcameraname.packet(), self.packet_getcameranamereply).variant
def get_controls(self):
return self.client.round_trip(self.packet_getcontrols.packet(), self.packet_getcontrolsreply).variant
def set_control(self, control):
self.client.send(self.packet_setcontrol.packet(variant=control))
def set_roi(self, x, y, width, height):
self.client.send(self.packet_setroi.packet(variant=PyQt5.QtCore.QRect(x, y, width, height)))
def clear_roi(self):
self.client.send(self.packet_clearroi.packet())
def get_properties(self):
return self.client.round_trip(self.packet_getproperties.packet(), self.packet_getpropertiesreply).variant
def start_live(self):
return self.client.round_trip(self.packet_startlive.packet(), self.packet_startlivereply)
def on_signal_fps(self, callback):
def dispatch(packet): callback(packet.variant)
Protocol.register_packet_handler(self.client, self.packet_signalfps, dispatch)
def on_camera_connected(self, callback):
def dispatch(_): callback()
Protocol.register_packet_handler(self.client, self.packet_signalcameraconnected, dispatch)
def on_camera_disconnected(self, callback):
def dispatch(_): callback()
Protocol.register_packet_handler(self.client, self.packet_signaldisconnected, dispatch)
def on_signal_temperature(self, callback):
def dispatch(packet): callback(packet.variant)
Protocol.register_packet_handler(self.client, self.packet_signaltemperature, dispatch)
def on_control_changed(self, callback):
def dispatch(packet): callback(packet.variant)
Protocol.register_packet_handler(self.client, self.packet_signalcontrolchanged, dispatch)
| gpl-3.0 | 8,043,753,285,607,263,000 | 41.263889 | 155 | 0.683536 | false |
ssadedin/seqr | seqr/management/tests/reload_saved_variant_json_tests.py | 1 | 4155 | #-*- coding: utf-8 -*-
import mock
from django.core.management import call_command
from django.test import TestCase
from seqr.models import Family
PROJECT_NAME = '1kg project n\u00e5me with uni\u00e7\u00f8de'
PROJECT_GUID = 'R0001_1kg'
FAMILY_ID = '1'
class ReloadSavedVariantJsonTest(TestCase):
fixtures = ['users', '1kg_project']
@mock.patch('logging.getLogger')
@mock.patch('seqr.views.utils.variant_utils.get_es_variants_for_variant_ids')
def test_with_param_command(self, mock_get_variants, mock_get_logger):
mock_get_variants.side_effect = lambda families, variant_ids: \
[{'variantId': variant_id, 'familyGuids': [family.guid for family in families]}
for variant_id in variant_ids]
mock_logger = mock_get_logger.return_value
# Test with a specific project and a family id.
call_command('reload_saved_variant_json',
PROJECT_NAME,
'--family-id={}'.format(FAMILY_ID))
family_1 = Family.objects.get(id=1)
mock_get_variants.assert_called_with(
[family_1], ['1-1562437-G-C', '1-46859832-G-A','21-3343353-GAGA-G'])
logger_info_calls = [
mock.call('Project: 1kg project n\xe5me with uni\xe7\xf8de'),
mock.call('Updated 3 variants for project 1kg project n\xe5me with uni\xe7\xf8de'),
mock.call('Done'),
mock.call('Summary: '),
mock.call(' 1kg project n\xe5me with uni\xe7\xf8de: Updated 3 variants')
]
mock_logger.info.assert_has_calls(logger_info_calls)
mock_get_variants.reset_mock()
mock_logger.reset_mock()
# Test for all projects and no specific family ids
call_command('reload_saved_variant_json')
self.assertEqual(mock_get_variants.call_count, 3)
family_2 = Family.objects.get(id=2)
mock_get_variants.assert_has_calls([
mock.call(
[family_1, family_2], ['1-1562437-G-C', '1-46859832-G-A', '12-48367227-TC-T', '21-3343353-GAGA-G'],
),
mock.call([Family.objects.get(id=11)], ['12-48367227-TC-T', 'prefix_19107_DEL']),
mock.call([Family.objects.get(id=14)], ['12-48367227-TC-T'])
], any_order=True)
logger_info_calls = [
mock.call('Project: 1kg project n\xe5me with uni\xe7\xf8de'),
mock.call('Updated 4 variants for project 1kg project n\xe5me with uni\xe7\xf8de'),
mock.call('Project: Empty Project'),
mock.call('Updated 0 variants for project Empty Project'),
mock.call('Project: Test Reprocessed Project'),
mock.call('Updated 2 variants for project Test Reprocessed Project'),
mock.call('Project: Non-Analyst Project'),
mock.call('Updated 1 variants for project Non-Analyst Project'),
mock.call('Done'),
mock.call('Summary: '),
mock.call(' 1kg project n\xe5me with uni\xe7\xf8de: Updated 4 variants'),
mock.call(' Test Reprocessed Project: Updated 2 variants'),
mock.call(' Non-Analyst Project: Updated 1 variants'),
]
mock_logger.info.assert_has_calls(logger_info_calls)
mock_get_variants.reset_mock()
mock_logger.reset_mock()
# Test with an exception.
mock_get_variants.side_effect = Exception("Database error.")
call_command('reload_saved_variant_json',
PROJECT_GUID,
'--family-id={}'.format(FAMILY_ID))
mock_get_variants.assert_called_with([family_1], ['1-1562437-G-C', '1-46859832-G-A', '21-3343353-GAGA-G'])
logger_info_calls = [
mock.call('Project: 1kg project n\xe5me with uni\xe7\xf8de'),
mock.call('Done'),
mock.call('Summary: '),
mock.call('1 failed projects'),
mock.call(' 1kg project n\xe5me with uni\xe7\xf8de: Database error.')
]
mock_logger.info.assert_has_calls(logger_info_calls)
mock_logger.error.assert_called_with('Error in project 1kg project n\xe5me with uni\xe7\xf8de: Database error.')
| agpl-3.0 | 7,825,958,456,046,299,000 | 43.677419 | 120 | 0.608905 | false |
bioinf-jku/SNNs | figure1/utils.py | 1 | 4576 | # -*- coding: utf-8 -*-
'''
Tensorflow Implementation of the Scaled ELU function and Dropout
'''
import numbers
import numpy as np
import tensorflow as tf
from tensorflow.contrib import layers
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
# (1) scale inputs to zero mean and unit variance
# (2) use SELUs
def selu(x):
with ops.name_scope('elu') as scope:
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale*tf.where(x>=0.0, x, alpha*tf.nn.elu(x))
# (3) initialize weights with stddev sqrt(1/n)
# e.g. use:
initializer = layers.variance_scaling_initializer(factor=1.0, mode='FAN_IN')
# (4) use this dropout
def dropout_selu(x, rate, alpha= -1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0,
noise_shape=None, seed=None, name=None, training=False):
"""Dropout to a value with rescaling."""
def dropout_selu_impl(x, rate, alpha, noise_shape, seed, name):
keep_prob = 1.0 - rate
x = ops.convert_to_tensor(x, name="x")
if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1:
raise ValueError("keep_prob must be a scalar tensor or a float in the "
"range (0, 1], got %g" % keep_prob)
keep_prob = ops.convert_to_tensor(keep_prob, dtype=x.dtype, name="keep_prob")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
alpha = ops.convert_to_tensor(alpha, dtype=x.dtype, name="alpha")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
if tensor_util.constant_value(keep_prob) == 1:
return x
noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)
random_tensor = keep_prob
random_tensor += random_ops.random_uniform(noise_shape, seed=seed, dtype=x.dtype)
binary_tensor = math_ops.floor(random_tensor)
ret = x * binary_tensor + alpha * (1-binary_tensor)
a = math_ops.sqrt(fixedPointVar / (keep_prob *((1-keep_prob) * math_ops.pow(alpha-fixedPointMean,2) + fixedPointVar)))
b = fixedPointMean - a * (keep_prob * fixedPointMean + (1 - keep_prob) * alpha)
ret = a * ret + b
ret.set_shape(x.get_shape())
return ret
with ops.name_scope(name, "dropout", [x]) as name:
return utils.smart_cond(training,
lambda: dropout_selu_impl(x, rate, alpha, noise_shape, seed, name),
lambda: array_ops.identity(x))
def get_timestamp(fmt='%y%m%d_%H%M'):
'''Returns a string that contains the current date and time.
Suggested formats:
short_format=%y%m%d_%H%M (default)
long format=%Y%m%d_%H%M%S
'''
import datetime
now = datetime.datetime.now()
return datetime.datetime.strftime(now, fmt)
def generate_slices(n, slice_size, allow_smaller_final_batch=True):
"""Generates slices of given slice_size up to n"""
start, end = 0, 0
for pack_num in range(int(n / slice_size)):
end = start + slice_size
yield slice(start, end, None)
start = end
# last slice might not be a full batch
if allow_smaller_final_batch:
if end < n:
yield slice(end, n, None)
def generate_minibatches(batch_size, ph_list, data_list, n_epochs=1,
allow_smaller_final_batch=False, shuffle=True,
feed_dict=None):
cnt_epochs = 0
assert len(ph_list) == len(data_list), "Passed different number of data and placeholders"
assert len(data_list) >= 0, "Passed empty lists"
n_samples = data_list[0].shape[0]
n_items = len(data_list)
while True:
if shuffle:
idx = np.arange(n_samples)
np.random.shuffle(idx)
for i in range(n_items):
data_list[i] = data_list[i][idx]
if feed_dict is None:
feed_dict = {}
for s in generate_slices(n_samples, batch_size, allow_smaller_final_batch):
for i in range(n_items):
ph = ph_list[i]
d = data_list[i][s]
feed_dict[ph] = d
yield feed_dict
cnt_epochs += 1
if n_epochs is not None and cnt_epochs >= n_epochs:
break
| gpl-3.0 | -7,715,505,530,584,231,000 | 34.75 | 126 | 0.621941 | false |
elin-moco/metrics | metrics/etl/management/commands/load.py | 1 | 1229 | from django.core.management.base import BaseCommand
from metrics.etl.tools import related_posts_extract, fx_extract, log_extract, pd_transform, main_extract, moztech_extract, mozblog_extract, newsletter_extract, moztech_load, browser_survey_extract
from metrics.settings import LOG_PATH
class Command(BaseCommand):
help = 'Perform ETL cycle'
def handle(self, *args, **options):
if args is None:
return
if args[0] == 'log':
log_extract.main((LOG_PATH, args[1]))
if args[0] == 'fx':
fx_extract.main()
pd_transform.main()
if args[0] == 'main':
main_extract.main()
if args[0] == 'moztech':
moztech_extract.main()
moztech_load.main()
if args[0] == 'mozblog':
mozblog_extract.main()
if args[0] == 'newsletter':
newsletter_extract.main()
if args[0] == 'related_posts':
if len(args) > 1:
related_posts_extract.main(args[1])
else:
related_posts_extract.main('blog')
related_posts_extract.main('tech')
if args[0] == 'browser_survey':
browser_survey_extract.main()
| bsd-3-clause | 2,593,122,512,009,476,600 | 36.242424 | 196 | 0.567128 | false |
briandorsey/partisci | clients/python/pypartisci.py | 1 | 1390 | import httplib
import json
import time
import random
import socket
__version__ = "1.0"
def serialize(app, ver, host, instance):
update = dict(
app=app,
ver=ver,
host=host,
instance=instance)
data = json.dumps(update)
return data
def send_udp(server, port, app, ver, host="", instance=0):
if not host:
try:
host = socket.gethostname()
except StandardError:
pass
data = serialize(app, ver, host, instance)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((server, port))
s.send(data)
s.close()
return
def send_http(server, port, app, ver, host="", instance=0):
conn = httplib.HTTPConnection(server, port)
body = serialize(app, ver, host, instance)
conn.request("POST", "/api/v1/update/", body)
response = conn.getresponse()
data = response.read()
conn.close()
return response.status, data
if __name__ == '__main__':
versions = ["1.0", "2.0", "3.0"]
hosts = ["abc", "def", "ghi"]
instances = [0, 1, 2, 3]
while True:
print "%-14s Sending update" % time.time()
send_udp('localhost', 7777, 'Python Client demo',
random.choice(versions),
random.choice(hosts),
random.choice(instances))
time.sleep(2)
| bsd-2-clause | 4,227,259,765,084,404,000 | 24.272727 | 59 | 0.558993 | false |
SFPD/rlreloaded | 3rdparty/climin/cg.py | 1 | 10960 | # -*- coding: utf-8 -*-
"""Module containing functionality for conjugate gradients.
Conjugate gradients is motivated from a first order Taylor expansion of the
objective:
.. math::
f(\\theta_t + \\alpha_t d_t) \\approx f(\\theta_t) + \\alpha_td_t^Tf'(\\theta_t).
To locally decrease the objective, it is optimal to set
:math:`d_t \propto -f'(\\theta_t)` and find :math:`\\alpha_t` with a line search
algorithm, which is known as steepest descent. Yet, a well known disadvantage
of this approach is that directions found at :math:`t` will often interfere with
directions found for :math:`t' < t`.
The solution to this problem is to chose :math:`d_t` in a way that it does not
interfere with previous updates. If the dimensions of our problem were
independent, we could just move along these dimensions. If they were independent
up to rotation, we would have to chose directions which are orthogonal to each
other. This is exactly the case when the Hessian of the problem, :math:`A` is
diagonal. If it is not diagonal, we have to move along directions which are
called *conjugate* to each other with respect to the matrix :math:`A`.
The conjugate gradients algorithms provide methods to do so efficiently. The
linear conjugate gradients algorithm assumes that the objective is a quadratic
and can thus determine :math:`\\alpha` exactly. Nonlinear conjugate gradients
works on arbitrary functions (yet, the Taylor expansion assumption above has to
be reasonable). Since the Hessian :math:`A` is not constant in this case, the
previous directions (to which a new direction has to be conjugate) have to be
reset from time to time. Additionally, we need to perform a line search to solve
for :math:`\\alpha_t`.
"""
import warnings
import scipy
import numpy as np
import scipy.linalg
import scipy.optimize
from base import Minimizer, is_nonzerofinite
from linesearch import WolfeLineSearch
class ConjugateGradient(Minimizer):
"""ConjugateGradient class.
Minimize a quadratic objective of the form
.. math::
f(\\theta) = {1 \over 2} \\theta^TA\\theta + \\theta^Tb + c.
The minimization will take place by moving along conjugate directions of
steepest decrease in the error. This will take at most as many steps as
the dimensionality of the problem.
.. note::
In most cases it is better to use ``scipy.optimize.solve``. Only use
this function if you want to monitor intermediate quantities and are
not entirely interested in optimization of a quadratic objective, but in
a different error measure. E.g. as in Hessian free optimization.
Attributes
----------
wrt : array_like
Parameters of the problem.
H : array_like, 2 dimensional, square
Curvature term of the quadratic, the Hessian.
b : array_like
Linear term of the quadratic.
f_Hp : callable
Function to calculcate the dot product of a Hessian with an
arbitrary vector.
min_grad : float, optional, default: 1e-14
If all components of the gradient fall below this threshold,
stop optimization.
precond : array_like
Matrix to precondition the problem. If a vector, is taken to
represent a diagonal matrix.
"""
def __init__(self, wrt, H=None, b=None, f_Hp=None, min_grad=1e-14,
precond=None):
"""Create a ConjugateGradient object.
Parameters
----------
wrt : array_like
Parameters of the problem.
H : array_like, 2 dimensional, square
Curvature term of the quadratic, the Hessian.
b : array_like
Linear term of the quadratic.
f_Hp : callable
Function to calculcate the dot product of a Hessian with an
arbitrary vector.
min_grad : float, optional, default: 1e-14
If all components of the gradient fall below this threshold,
stop optimization.
precond : array_like
Matrix to precondition the problem. If a vector, is taken to
represent a diagonal matrix.
"""
super(ConjugateGradient, self).__init__(
wrt, args=None)
self.f_Hp = f_Hp if f_Hp is not None else lambda p: np.dot(H, p)
self.b = b
self.min_grad = min_grad
self.precond = precond
def solve(self, r):
if self.precond is None:
return r
elif self.precond.ndim == 1:
#if the preconditioning matrix is diagonal,
#then it is supposedly given as a vector
return r / self.precond
else:
return scipy.linalg.solve(self.precond, r)
def __iter__(self):
grad = self.f_Hp(self.wrt) - self.b
y = self.solve(grad)
direction = -y
# If the gradient is exactly zero, we stop. Otherwise, the
# updates will lead to NaN errors because the direction will
# be zero.
if (grad == 0).all():
warnings.warn('gradient is 0')
return
for i in range(self.wrt.size):
Hp = self.f_Hp(direction)
if not np.isfinite(Hp).all():
print "hessian vector product is not finite. aborting cg"
break
ry = np.dot(grad, y)
pHp = np.inner(direction, Hp)
step_length = ry / pHp
self.wrt += step_length * direction
# We do this every few iterations to compensate for possible
# numerical errors due to additions.
if i % 10 == 0:
grad = self.f_Hp(self.wrt) - self.b
else:
grad += step_length * Hp
y = self.solve(grad)
beta = np.dot(grad, y) / ry
direction = - y + beta * direction
# If we don't bail out here, we will enter regions of numerical
# instability.
if (abs(grad) < self.min_grad).all():
warnings.warn('gradient is below threshold')
break
yield {
'ry': ry,
'Hp': Hp,
'pHp': pHp,
'step_length': step_length,
'n_iter': i,
}
class NonlinearConjugateGradient(Minimizer):
"""
NonlinearConjugateGradient optimizer.
NCG minimizes functions by following directions which are conjugate to each
other with respect to the Hessian. Since the curvature changes if the
objective is nonquadratic, the Hessian will not be accurate and thus the
conjugacy of successive search directions as well. Furthermore, the optimal
step length cannot be found in closed form and has to be obtained with a
line search.
During optimization, we sometimes perform a restart. That means we give up
on trying to find conjugate directions and use the gradient as a new search
direction. This is done whenever two successive directions are far from
orthogonal, an indicator that the quadratic assumption is either inaccurate
or the Hessian has changed too much lately.
Attributes
----------
wrt : array_like
Array of parameters of the problem.
f : callable
Objective function.
fprime : callable
First derivative of the objective function.
min_grad : float
If all components of the gradient fall below this value, stop
minimization.
line_search : LineSearch object.
Line search object to perform line searches with.
args : iterable
Iterable of arguments passed on to the objective function and its
derivatives.
"""
def __init__(self, wrt, f, fprime, min_grad=1e-6, args=None):
"""Create a NonlinearConjugateGradient object.
Parameters
----------
wrt : array_like
Array of parameters of the problem.
f : callable
Objective function.
fprime : callable
First derivative of the objective function.
min_grad : float
If all components of the gradient fall below this value, stop
minimization.
args : iterable, optional
Iterable of arguments passed on to the objective function and its
derivatives.
"""
super(NonlinearConjugateGradient, self).__init__(wrt, args=args)
self.f = f
self.fprime = fprime
self.line_search = WolfeLineSearch(wrt, self.f, self.fprime, c2=0.2)
self.min_grad = min_grad
def find_direction(self, grad_m1, grad, direction_m1):
# Computation of beta as a compromise between Fletcher-Reeves
# and Polak-Ribiere.
grad_norm_m1 = np.dot(grad_m1, grad_m1)
grad_diff = grad - grad_m1
betaFR = np.dot(grad, grad) / grad_norm_m1
betaPR = np.dot(grad, grad_diff) / grad_norm_m1
betaHS = np.dot(grad, grad_diff) / np.dot(direction_m1, grad_diff)
beta = max(-betaFR, min(betaPR, betaFR))
# Restart if not a direction of sufficient descent, ie if two
# consecutive gradients are far from orthogonal.
if np.dot(grad, grad_m1) / grad_norm_m1 > 0.1:
beta = 0
direction = -grad + beta * direction_m1
return direction, {}
def __iter__(self):
args, kwargs = self.args.next()
grad = self.fprime(self.wrt, *args, **kwargs)
grad_m1 = np.zeros(grad.shape)
loss = self.f(self.wrt, *args, **kwargs)
loss_m1 = 0
for i, (next_args, next_kwargs) in enumerate(self.args):
if i == 0:
direction, info = -grad, {}
else:
direction, info = self.find_direction(grad_m1, grad, direction)
if not is_nonzerofinite(direction):
warnings.warn('gradient is either zero, nan or inf')
break
# Line search minimization.
initialization = 2 * (loss - loss_m1) / np.dot(grad, direction)
initialization = min(1, initialization)
step_length = self.line_search.search(
direction, initialization, args, kwargs)
self.wrt += step_length * direction
# If we don't bail out here, we will enter regions of numerical
# instability.
if (abs(grad) < self.min_grad).all():
warnings.warn('gradient is too small')
break
# Prepare everything for the next loop.
args, kwargs = next_args, next_kwargs
grad_m1[:], grad[:] = grad, self.line_search.grad
loss_m1, loss = loss, self.line_search.val
info.update({
'n_iter': i,
'args': args,
'kwargs': kwargs,
'loss': loss,
'gradient': grad,
'gradient_m1': grad_m1,
'step_length': step_length,
})
yield info
| mit | 1,880,162,668,656,026,400 | 33.25 | 84 | 0.610584 | false |
gjcarneiro/pybindgen | pybindgen/typehandlers/codesink.py | 1 | 4693 | """
Objects that receive generated C/C++ code lines, reindents them, and
writes them to a file, memory, or another code sink object.
"""
import sys
PY3 = (sys.version_info[0] >= 3)
if PY3:
string_types = str,
else:
string_types = basestring,
DEBUG = 0
if DEBUG:
import traceback
import sys
class CodeSink(object):
"""Abstract base class for code sinks"""
def __init__(self):
r'''Constructor
>>> sink = MemoryCodeSink()
>>> sink.writeln("foo();")
>>> sink.writeln("if (true) {")
>>> sink.indent()
>>> sink.writeln("bar();")
>>> sink.unindent()
>>> sink.writeln("zbr();")
>>> print sink.flush().rstrip()
foo();
if (true) {
bar();
zbr();
>>> sink = MemoryCodeSink()
>>> sink.writeln("foo();")
>>> sink.writeln()
>>> sink.writeln("bar();")
>>> print len(sink.flush().split("\n"))
4
'''
self.indent_level = 0 # current indent level
self.indent_stack = [] # previous indent levels
if DEBUG:
self._last_unindent_stack = None # for debugging
def _format_code(self, code):
"""Utility method for subclasses to use for formatting code
(splits lines and indents them)"""
assert isinstance(code, string_types)
l = []
for line in code.split('\n'):
l.append(' '*self.indent_level + line)
return l
def writeln(self, line=''):
"""Write one or more lines of code"""
raise NotImplementedError
def indent(self, level=4):
'''Add a certain ammount of indentation to all lines written
from now on and until unindent() is called'''
self.indent_stack.append(self.indent_level)
self.indent_level += level
def unindent(self):
'''Revert indentation level to the value before last indent() call'''
if DEBUG:
try:
self.indent_level = self.indent_stack.pop()
except IndexError:
if self._last_unindent_stack is not None:
for line in traceback.format_list(self._last_unindent_stack):
sys.stderr.write(line)
raise
self._last_unindent_stack = traceback.extract_stack()
else:
self.indent_level = self.indent_stack.pop()
class FileCodeSink(CodeSink):
"""A code sink that writes to a file-like object"""
def __init__(self, file_):
"""
:param file_: a file like object
"""
CodeSink.__init__(self)
self.file = file_
def __repr__(self):
return "<pybindgen.typehandlers.codesink.FileCodeSink %r>" % (self.file.name,)
def writeln(self, line=''):
"""Write one or more lines of code"""
self.file.write('\n'.join(self._format_code(line)))
self.file.write('\n')
def __lt__(self, other):
if isinstance(other, FileCodeSink):
return self.file.name < other.file.name
class MemoryCodeSink(CodeSink):
"""A code sink that keeps the code in memory,
and can later flush the code to another code sink"""
def __init__(self):
"Constructor"
CodeSink.__init__(self)
self.lines = []
def writeln(self, line=''):
"""Write one or more lines of code"""
self.lines.extend(self._format_code(line))
def flush_to(self, sink):
"""Flushes code to another code sink
:param sink: another CodeSink instance
"""
assert isinstance(sink, CodeSink)
for line in self.lines:
sink.writeln(line.rstrip())
self.lines = []
def flush(self):
"Flushes the code and returns the formatted output as a return value string"
l = []
for line in self.lines:
l.extend(self._format_code(line))
self.lines = []
return "\n".join(l) + '\n'
class NullCodeSink(CodeSink):
"""A code sink that discards all content. Useful to 'test' if code
generation would work without actually generating anything."""
def __init__(self):
"Constructor"
CodeSink.__init__(self)
def writeln(self, line=''):
"""Write one or more lines of code"""
pass
def flush_to(self, sink):
"""Flushes code to another code sink
:param sink: another CodeSink instance
"""
raise TypeError("Cannot flush a NullCodeSink; it has no content!")
def flush(self):
"Flushes the code and returns the formatted output as a return value string"
raise TypeError("Cannot flush a NullCodeSink; it has no content!")
| lgpl-2.1 | -1,145,504,394,182,056,200 | 29.474026 | 86 | 0.568506 | false |
maljac/odoo-addons | project_task_desc_html/__openerp__.py | 1 | 1587 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Project Task Description in HTML',
'version': '1.0',
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'description': """
Project Task Description in HTML
================================
Changes description type on tasks to html
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'project',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -3,380,056,052,953,167,000 | 31.387755 | 78 | 0.561437 | false |
aswolf/xmeos | xmeos/models/compress.py | 1 | 34752 | # -*- coding: utf-8 -*-
# from __future__ import absolute_import, print_function, division
from future.utils import with_metaclass
import numpy as np
import scipy as sp
from abc import ABCMeta, abstractmethod
from scipy import integrate
import scipy.interpolate as interpolate
from . import core
__all__ = ['CompressEos','CompressCalc']
#====================================================================
# Models
#====================================================================
def set_calculator(eos_mod, kind, kind_opts, path_const, order=3):
assert kind in kind_opts, (
kind + ' is not a valid thermal calculator. '+
'You must select one of: ' + str(kind_opts))
if kind=='Vinet':
calc = _Vinet(eos_mod, path_const=path_const, order=order)
elif kind=='BirchMurn3':
calc = _BirchMurn3(eos_mod, path_const=path_const, order=order)
elif kind=='BirchMurn4':
calc = _BirchMurn4(eos_mod, path_const=path_const, order=order)
elif kind=='GenFiniteStrain':
calc = _GenFiniteStrain(eos_mod, path_const=path_const, order=order)
elif kind=='Tait':
calc = _Tait(eos_mod, path_const=path_const, order=order)
elif kind=='PolyRho':
calc = _PolyRho(eos_mod, path_const=path_const, order=order)
else:
raise NotImplementedError(kind+' is not a valid '+\
'CompressEos Calculator.')
eos_mod._add_calculator(calc, calc_type='compress')
pass
#====================================================================
class CompressEos(with_metaclass(ABCMeta, core.Eos)):
"""
EOS model for reference compression path.
Parameters
----------
Path can either be isothermal (T=const) or adiabatic (S=const)
For this restricted path, thermodyn properties depend only on volume
"""
_path_opts = ['T','S','0K']
_kind_opts = ['Vinet','BirchMurn3','BirchMurn4','GenFiniteStrain',
'Tait','PolyRho']
def __init__(self, kind='Vinet', natom=1, molar_mass=100, path_const='T',
order=3, model_state={}):
self._pre_init(natom=natom, molar_mass=molar_mass)
set_calculator(self, kind, self._kind_opts, path_const, order=order)
self._set_ref_state()
self._post_init(model_state=model_state)
pass
def __repr__(self):
calc = self.calculators['compress']
return ("CompressEos(kind={kind}, natom={natom}, "
"molar_mass={molar_mass},"
"path_const={path_const}, order={order}, "
"model_state={model_state}, "
")"
.format(kind=repr(calc.name),
natom=repr(self.natom),
molar_mass=repr(self.molar_mass),
path_const=repr(self.path_const),
order=repr(calc.order),
model_state=self.model_state
)
)
def _set_ref_state(self):
calc = self.calculators['compress']
path_const = calc.path_const
energy_scale = calc.get_energy_scale()
T0 = 300
# Add needed extra parameters (depending on path_const)
if path_const=='T':
param_ref_names = ['T0','F0']
param_ref_units = ['K','eV']
param_ref_defaults = [T0, 0.0]
param_ref_scales = [T0, energy_scale]
elif path_const=='S':
param_ref_names = ['T0','E0']
param_ref_units = ['K','eV']
param_ref_defaults = [T0, 0.0]
param_ref_scales = [T0, energy_scale]
elif path_const=='0K':
param_ref_names = []
param_ref_units = []
param_ref_defaults = []
param_ref_scales = []
pass
else:
raise NotImplementedError(
'path_const '+path_const+' is not valid for CompressEos.')
self._path_const = path_const
self._param_ref_names = param_ref_names
self._param_ref_units = param_ref_units
self._param_ref_defaults = param_ref_defaults
self._param_ref_scales = param_ref_scales
pass
@property
def path_opts(self):
return self._path_opts
@property
def path_const(self):
return self._path_const
def press(self, V_a, apply_expand_adj=True):
press_a = self.calculators['compress']._calc_press(V_a)
return press_a
def energy( self, V_a, apply_expand_adj=True ):
energy0 = 0.0
try:
energy0 = self.get_param_values(param_names=['F0'])
except:
pass
try:
energy0 = self.get_param_values(param_names=['E0'])
except:
pass
energy_a = energy0 + self.calculators['compress']._calc_energy(V_a)
# if self.expand_adj and apply_expand_adj:
# ind_exp = self.get_ind_expand(V_a, eos_d)
# if apply_expand_adj and (ind_exp.size>0):
# energy_a[ind_exp] = self.expand_adj_mod._calc_energy( V_a[ind_exp], eos_d )
return energy_a
def bulk_mod( self, V_a, apply_expand_adj=True ):
try:
bulk_mod_a = self.calculators['compress']._calc_bulk_mod(V_a)
# if self.expand_adj and apply_expand_adj:
# ind_exp = self.get_ind_expand(V_a)
# if apply_expand_adj and (ind_exp.size>0):
# bulk_mod_a[ind_exp] =
# self.expand_adj_mod._calc_bulk_mod(V_a[ind_exp])
except:
TOL=1e-4
P_lo_a = self.press(V_a*np.exp(-TOL/2))
P_hi_a = self.press(V_a*np.exp(+TOL/2))
bulk_mod_a = -(P_hi_a-P_lo_a)/TOL
return bulk_mod_a
def bulk_mod_deriv( self,V_a, apply_expand_adj=True ):
bulk_mod_deriv_a = self.calculators['compress']._calc_bulk_mod_deriv(V_a)
if self.expand_adj and apply_expand_adj:
ind_exp = self.get_ind_expand(V_a)
if apply_expand_adj and (ind_exp.size>0):
bulk_mod_deriv_a[ind_exp] = self.expand_adj_mod_deriv._calc_bulk_mod_deriv(V_a[ind_exp])
return bulk_mod_deriv_a
def energy_perturb( self, V_a, apply_expand_adj=True ):
# Eval positive press values
Eperturb_pos_a, scale_a, paramkey_a = self.calculators['compress']._calc_energy_perturb(V_a)
if (self.expand_adj==False) or (apply_expand_adj==False):
return Eperturb_pos_a, scale_a, paramkey_a
else:
Nparam_pos = Eperturb_pos_a.shape[0]
scale_a, paramkey_a, ind_pos = \
self.get_param_scale(apply_expand_adj=True,
output_ind=True)
Eperturb_a = np.zeros((paramkey_a.size, V_a.size))
Eperturb_a[ind_pos,:] = Eperturb_pos_a
# Overwrite negative pressure Expansion regions
ind_exp = self.get_ind_expand(V_a)
if ind_exp.size>0:
Eperturb_adj_a = \
self.expand_adj_mod._calc_energy_perturb(V_a[ind_exp])[0]
Eperturb_a[:,ind_exp] = Eperturb_adj_a
return Eperturb_a, scale_a, paramkey_a
# Standard methods must be overridden (as needed) by implimentation model
def get_param_scale_sub(self):
raise NotImplementedError("'get_param_scale_sub' function not implimented for this model")
####################
# Required Methods #
####################
####################
# Optional Methods #
####################
def _calc_energy_perturb(self, V_a):
"""Returns Energy pertubation basis functions resulting from fractional changes to EOS params."""
fname = 'energy'
scale_a, paramkey_a = self.get_param_scale(
apply_expand_adj=self.expand_adj)
Eperturb_a = []
for paramname in paramkey_a:
iEperturb_a = self.param_deriv(fname, paramname, V_a)
Eperturb_a.append(iEperturb_a)
Eperturb_a = np.array(Eperturb_a)
return Eperturb_a, scale_a, paramkey_a
#====================================================================
#====================================================================
# Calculators
#====================================================================
class CompressCalc(with_metaclass(ABCMeta, core.Calculator)):
"""
Abstract Equation of State class for a reference Compression Path
Path can either be isothermal (T=const) or adiabatic (S=const)
For this restricted path, thermodyn properties depend only on volume
"""
_path_opts = ['T','S','0K']
supress_energy = False
supress_press = False
def __init__( self, eos_mod, path_const='T', order=None,
expand_adj_mod=None, expand_adj=None,
supress_energy=False, supress_press=False ):
assert path_const in self.path_opts, path_const + ' is not a valid ' + \
'path const. You must select one of: ' + path_opts
assert (np.isscalar(order))&(order>0)&(np.mod(order,1)==0), (
'order must be a positive integer.')
self._eos_mod = eos_mod
self._init_params(order)
self._required_calculators = None
self._path_const = path_const
self.supress_energy = supress_energy
self.supress_press = supress_press
# Use Expansion Adjustment for negative pressure region?
if expand_adj is None:
self.expand_adj = False
else:
self.expand_adj = expand_adj
if expand_adj_mod is None:
self.expand_adj = False
self.expand_adj_mod = None
else:
self.expand_adj = True
self.expand_adj_mod = expand_adj_mod
pass
@property
def path_opts(self):
return self._path_opts
def get_ind_expand(self, V_a):
V0 = core.get_params(['V0'])
ind_exp = np.where( V_a > V0 )[0]
return ind_exp
@property
def path_const(self):
return self._path_const
@property
def order(self):
return self._order
# NEED to write infer volume function
# Standard methods must be overridden (as needed) by implimentation model
def get_energy_scale(self):
V0, K0 = self.get_param_defaults(['V0','K0'])
energy_scale = np.round(V0*K0/core.CONSTS['PV_ratio'],decimals=2)
return energy_scale
def get_param_scale_sub(self):
raise NotImplementedError("'get_param_scale_sub' function not implimented for this model")
####################
# Required Methods #
####################
@abstractmethod
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
pass
@abstractmethod
def _calc_press(self, V_a):
"""Returns Press variation along compression curve."""
pass
@abstractmethod
def _calc_energy(self, V_a):
"""Returns Energy along compression curve."""
pass
####################
# Optional Methods #
####################
# EOS property functions
def _calc_param_deriv(self, fname, paramname, V_a, dxfrac=1e-6):
scale_a, paramkey_a = self.get_param_scale(apply_expand_adj=True )
scale = scale_a[paramkey_a==paramname][0]
# print 'scale: ' + np.str(scale)
#if (paramname is 'E0') and (fname is 'energy'):
# return np.ones(V_a.shape)
try:
fun = getattr(self, fname)
# Note that self is implicitly included
val0_a = fun(V_a)
except:
assert False, 'That is not a valid function name ' + \
'(e.g. it should be press or energy)'
try:
param = core.get_params([paramname])[0]
dparam = scale*dxfrac
# print 'param: ' + np.str(param)
# print 'dparam: ' + np.str(dparam)
except:
assert False, 'This is not a valid parameter name'
# set param value in eos_d dict
core.set_params([paramname,], [param+dparam,])
# Note that self is implicitly included
dval_a = fun(V_a) - val0_a
# reset param to original value
core.set_params([paramname], [param])
deriv_a = dval_a/dxfrac
return deriv_a
def _calc_energy_perturb(self, V_a):
"""Returns Energy pertubation basis functions resulting from fractional changes to EOS params."""
fname = 'energy'
scale_a, paramkey_a = self.get_param_scale(
apply_expand_adj=self.expand_adj)
Eperturb_a = []
for paramname in paramkey_a:
iEperturb_a = self._calc_param_deriv(fname, paramname, V_a)
Eperturb_a.append(iEperturb_a)
Eperturb_a = np.array(Eperturb_a)
return Eperturb_a, scale_a, paramkey_a
def _calc_bulk_mod(self, V_a):
"""Returns Bulk Modulus variation along compression curve."""
raise NotImplementedError("'bulk_mod' function not implimented for this model")
def _calc_bulk_mod_deriv(self, V_a):
"""Returns Bulk Modulus Deriv (K') variation along compression curve."""
raise NotImplementedError("'bulk_mod_deriv' function not implimented for this model")
#====================================================================
#====================================================================
# Implementations
#====================================================================
class _Vinet(CompressCalc):
_name='Vinet'
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
order = 3 # ignore order input
V0, K0, KP0 = 100, 150, 4
param_names = ['V0','K0','KP0']
param_units = ['ang^3','GPa','1']
param_defaults = [V0,K0,KP0]
param_scales = [V0,K0,KP0]
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
def _calc_press(self, V_a):
V0, K0, KP0 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0'])
eta = 3/2*(KP0-1)
vratio_a = V_a/V0
x_a = vratio_a**(1/3)
press_a = 3*K0*(1-x_a)*x_a**(-2)*np.exp(eta*(1-x_a))
return press_a
def _calc_energy(self, V_a):
V0, K0, KP0 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0'])
PV_ratio, = core.get_consts(['PV_ratio'])
eta = 3/2*(KP0-1)
vratio_a = V_a/V0
x_a = vratio_a**(1/3)
energy_a = 9*K0*V0/PV_ratio/eta**2*\
(1 + (eta*(1-x_a)-1)*np.exp(eta*(1-x_a)))
return energy_a
# def get_param_scale_sub(self):
# """Return scale values for each parameter"""
# V0, K0, KP0 = core.get_params(['V0','K0','KP0'])
# PV_ratio, = core.get_consts(['PV_ratio'])
# paramkey_a = np.array(['V0','K0','KP0','E0'])
# scale_a = np.array([V0,K0,KP0,K0*V0/PV_ratio])
# return scale_a, paramkey_a
# def _calc_energy_perturb(self, V_a):
# """Returns Energy pertubation basis functions resulting from fractional changes to EOS params."""
# V0, K0, KP0, E0 = core.get_params(['V0','K0','KP0','E0'])
# PV_ratio, = core.get_consts(['PV_ratio'])
# eta = 3/2*(KP0-1)
# vratio_a = V_a/V0
# x = vratio_a**(1/3)
# scale_a, paramkey_a = self.get_param_scale_sub()
# # NOTE: CHECK UNITS (PV_RATIO) here
# dEdp_a = 1/PV_ratio*np.vstack\
# ([-3*K0*(eta**2*x*(x-1) + 3*eta*(x-1) - 3*np.exp(eta*(x-1)) + 3)\
# *np.exp(-eta*(x-1))/eta**2,
# -9*V0*(eta*(x-1) - np.exp(eta*(x-1)) + 1)*np.exp(-eta*(x-1))/eta**2,
# 27*K0*V0*(2*eta*(x-1) + eta*(-x + (x-1)*(eta*(x-1) + 1) + 1)
# -2*np.exp(eta*(x-1)) + 2)*np.exp(-eta*(x-1))/(2*eta**3),
# PV_ratio*np.ones(V_a.shape)])
# Eperturb_a = np.expand_dims(scale_a,1)*dEdp_a
# #Eperturb_a = np.expand_dims(scale_a)*dEdp_a
# return Eperturb_a, scale_a, paramkey_a
#====================================================================
class _BirchMurn3(CompressCalc):
def _calc_press(self, V_a):
V0, K0, KP0 = self.eos_mod.get_param_values(param_names=['V0','K0','KP0'])
vratio_a = V_a/V0
press_a = 3/2*K0 * (vratio_a**(-7/3) - vratio_a**(-5/3)) * \
(1 + 3/4*(KP0-4)*(vratio_a**(-2/3)-1))
return press_a
def _calc_energy(self, V_a):
V0, K0, KP0 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0'])
PV_ratio = core.CONSTS['PV_ratio']
vratio_a = V_a/V0
fstrain_a = 1/2*(vratio_a**(-2/3) - 1)
energy_a = 9/2*(V0*K0/PV_ratio)*\
( KP0*fstrain_a**3 + fstrain_a**2*(1-4*fstrain_a) )
return energy_a
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
order = 3 # ignore order input
V0, K0, KP0 = 100, 150, 4
param_names = ['V0','K0','KP0']
param_units = ['ang^3','GPa','1']
param_defaults = [V0,K0,KP0]
param_scales = [V0,K0,KP0]
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
#====================================================================
class _BirchMurn4(CompressCalc):
# def get_param_scale_sub(self):
# """Return scale values for each parameter"""
# V0, K0, KP0, KP20 = core.get_params(['V0','K0','KP0','KP20'])
# PV_ratio = core.CONSTS['PV_ratio']
# paramkey_a = np.array(['V0','K0','KP0','KP20','E0'])
# scale_a = np.array([V0,K0,KP0,KP0/K0,K0*V0/PV_ratio])
# return scale_a, paramkey_a
def _calc_strain_energy_coeffs(self, nexp, K0, KP0, KP20):
a1 = 3/2*(KP0-nexp-2)
a2 = 3/2*(K0*KP20 + KP0*(KP0-2*nexp-3)+3+4*nexp+11/9*nexp**2)
return a1,a2
def _calc_press(self, V_a):
V0, K0, KP0, KP20 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0','KP20'])
nexp = +2
vratio_a = V_a/V0
fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1)
a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20)
press_a = 3*K0*(1+a1*fstrain_a + a2*fstrain_a**2)*\
fstrain_a*(nexp*fstrain_a+1)**((nexp+3)/nexp)
return press_a
def _calc_energy(self, V_a):
V0, K0, KP0, KP20 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0','KP20'])
nexp = +2
PV_ratio = core.CONSTS['PV_ratio']
vratio_a = V_a/V0
fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1)
a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20)
energy_a = 9*(V0*K0/PV_ratio)*\
( 1/2*fstrain_a**2 + a1/3*fstrain_a**3 + a2/4*fstrain_a**4)
return energy_a
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
order = 4 # ignore order input
V0, K0, KP0 = 100, 150, 4
KP20 = -KP0/K0
KP20_scale = np.abs(KP20)
param_names = ['V0','K0','KP0','KP20']
param_units = ['ang^3','GPa','1','GPa^-1']
param_defaults = [V0,K0,KP0,KP20]
param_scales = [V0,K0,KP0,KP20_scale]
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
#====================================================================
class _GenFiniteStrain(CompressCalc):
"""
Generalized Finite Strain EOS from Jeanloz1989b
Note: nexp=2 yields Birch Murnaghan (eulerian strain) EOS
nexp=-2 yields lagragian strain EOS
"""
def _calc_strain_energy_coeffs(self, nexp, K0, KP0, KP20=None, KP30=None):
a1 = 3/2*(KP0-nexp-2)
if KP20 is None:
return a1
else:
a2 = 3/2*(K0*KP20 + KP0*(KP0-2*nexp-3)+3+4*nexp+11/9*nexp**2)
if KP30 is None:
return a1,a2
else:
a3 = 1/8*(9*K0**2*KP30 + 6*(6*KP0-5*nexp-6)*K0*KP20
+((3*KP0-5*nexp-6)**2 +10*nexp**2 + 30*nexp + 18)*KP0
-(50/3*nexp**3 + 70*nexp**2 + 90*nexp + 36))
return a1,a2,a3
def _calc_press(self, V_a):
V0, K0, KP0, KP20, nexp = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0','KP20','nexp'])
vratio_a = V_a/V0
fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1)
a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20=KP20)
press_a = 3*K0*(1+a1*fstrain_a + a2*fstrain_a**2)*\
fstrain_a*(nexp*fstrain_a+1)**((nexp+3)/nexp)
return press_a
def _calc_energy(self, V_a):
V0, K0, KP0, KP20, nexp = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0','KP20','nexp'])
PV_ratio = core.CONSTS['PV_ratio']
vratio_a = V_a/V0
fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1)
a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20=KP20)
energy_a = 9*(V0*K0/PV_ratio)*\
( 1/2*fstrain_a**2 + a1/3*fstrain_a**3 + a2/4*fstrain_a**4)
return energy_a
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
order = 4 #ignore input order
V0, K0, KP0, nexp = 100, 150, 4, 2
nexp_scale = 1
KP20 = -KP0/K0
KP20_scale = np.abs(KP20)
param_names = ['V0','K0','KP0','KP20','nexp']
param_units = ['ang^3','GPa','1','GPa^-1','1']
param_defaults = [V0,K0,KP0,KP20,nexp]
param_scales = [V0,K0,KP0,KP20_scale,nexp_scale]
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
#====================================================================
class _Tait(CompressCalc):
# def __init__( self, setlogPmin=False,
# path_const='T', level_const=300, expand_adj_mod=None,
# expand_adj=None, supress_energy=False, supress_press=False ):
# super(Tait, self).__init__( expand_adj=None )
# self.setlogPmin = setlogPmin
# pass
# def __init__( self, setlogPmin=False, expand_adj=False ):
# self.setlogPmin = setlogPmin
# self.expand_adj = expand_adj
# pass
def _get_eos_params(self):
V0, K0, KP0 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0'])
if self.setlogPmin:
logPmin, = self.eos_mod.get_param_values(
param_names=['logPmin'])
Pmin = np.exp(logPmin)
# assert Pmin>0, 'Pmin must be positive.'
KP20 = (KP0+1)*(KP0/K0 - 1/Pmin)
else:
KP20, = self.eos_mod.get_param_values(
param_names=['KP20'])
return V0,K0,KP0,KP20
# def get_param_scale_sub(self):
# """Return scale values for each parameter"""
# V0, K0, KP0, KP20 = self.eos_mod.get_param_values(
# param_names=['V0','K0','KP0','KP20'])
# PV_ratio = core.CONSTS['PV_ratio']
# if self.setlogPmin:
# # [V0,K0,KP0,E0]
# paramkey_a = np.array(['V0','K0','KP0','E0'])
# scale_a = np.array([V0,K0,KP0,K0*V0/PV_ratio])
# else:
# # [V0,K0,KP0,KP20,E0]
# paramkey_a = np.array(['V0','K0','KP0','KP20','E0'])
# scale_a = np.array([V0,K0,KP0,KP0/K0,K0*V0/PV_ratio])
# return scale_a, paramkey_a
def _eos_to_abc_params(self, K0, KP0, KP20):
a = (KP0 + 1)/(K0*KP20 + KP0 + 1)
b = -KP20/(KP0+1) + KP0/K0
c = (K0*KP20 + KP0 + 1)/(-K0*KP20 + KP0**2 + KP0)
return a,b,c
def _calc_press(self, V_a):
V0, K0, KP0, KP20 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0','KP20'])
a,b,c = self._eos_to_abc_params(K0,KP0,KP20)
vratio_a = V_a/V0
press_a = 1/b*(((vratio_a + a - 1)/a)**(-1/c) - 1)
# from IPython import embed; import pdb; embed(); pdb.set_trace()
return press_a
def _calc_energy(self, V_a):
V0, K0, KP0, KP20 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0','KP20'])
a,b,c = self._eos_to_abc_params(K0,KP0,KP20)
PV_ratio = core.CONSTS['PV_ratio']
vratio_a = V_a/V0
press_a = self._calc_press(V_a)
eta_a = b*press_a + 1
eta_pow_a = eta_a**(-c)
# NOTE: Need to simplify energy expression here
energy_a = (V0/b)/PV_ratio*(a*c/(c-1)-1)\
- (V0/b)/PV_ratio*( a*c/(c-1)*eta_a*eta_pow_a - a*eta_pow_a + a - 1)
return energy_a
# def _calc_energy_perturb_deprecate(self, V_a):
# """
# Returns Energy pertubation basis functions resulting from
# fractional changes to EOS params.
# """
# V0, K0, KP0, KP20 = self._get_eos_params()
# E0, = core.get_params(['E0'])
# a,b,c = self._eos_to_abc_params(K0,KP0,KP20)
# PV_ratio = core.CONSTS['PV_ratio']
# vratio_a = V_a/V0
# press_a = self._calc_press(V_a)
# eta_a = b*press_a + 1
# eta_pow_a = eta_a**(-c)
# scale_a, paramkey_a = self.get_param_scale_sub()
# # [V0,K0,KP0,KP20,E0]
# dEdp_a = np.ones((4, V_a.size))
# # dEdp_a[0,:] = 1/(PV_ratio*b*(c-1))*eta_a*(-a*eta_pow_a -1 + (1-a)*(a+c))
# dEdp_a[0,:] = 1/(PV_ratio*b*(c-1))*eta_a*(-a*eta_pow_a +a -1 -a*c+c) \
# + 1/(PV_ratio*b)*(a*c/(c-1)-1)
# dEdp_a[-1,:] = 1
# # from IPython import embed; embed(); import ipdb; ipdb.set_trace()
# # 1x3
# dEdabc_a = np.vstack\
# ([V0*eta_a/(a*b*(c-1))*(-a*eta_pow_a + a*(1-c))+c*V0/(b*(c-1)),
# V0/(b**2*(c-1))*((-a*eta_pow_a+a-1)*(c-1) + c*a*eta_a*eta_pow_a) \
# - V0/b**2*(a*c/(c-1) - 1),
# -a*V0/(b*(c-1)**2)*eta_a*eta_pow_a*(-c+(c-1)*(1-np.log(eta_a)))\
# +a*V0/(b*(c-1))*(1-c/(c-1))])
# # 3x3
# abc_jac = np.array([[-KP20*(KP0+1)/(K0*KP20+KP0+1)**2,
# K0*KP20/(K0*KP20+KP0+1)**2,
# -K0*(KP0+1)/(K0*KP20+KP0+1)**2],
# [-KP0/K0**2, KP20/(KP0+1)**2 + 1/K0, -1/(KP0+1)],
# [KP20*(KP0**2+2*KP0+1)/(-K0*KP20+KP0**2+KP0)**2,
# (-K0*KP20+KP0**2+KP0-(2*KP0+1)*(K0*KP20+KP0+1))/\
# (-K0*KP20+KP0**2+KP0)**2,
# K0*(KP0**2+2*KP0+1)/(-K0*KP20+KP0**2+KP0)**2]])
# dEdp_a[1:4,:] = 1/PV_ratio*np.dot(abc_jac.T,dEdabc_a)
# print(dEdp_a.shape)
# if self.setlogPmin:
# # [V0,K0,KP0,E0]
# print(dEdp_a.shape)
# dEdp_a = dEdp_a[[0,1,2,4],:]
# Eperturb_a = np.expand_dims(scale_a,1)*dEdp_a
# #Eperturb_a = np.expand_dims(scale_a)*dEdp_a
# return Eperturb_a, scale_a, paramkey_a
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
order = 4 # ignore input order
V0, K0, KP0 = 100, 150, 4
KP20 = -KP0/K0
KP20_scale = np.abs(KP20)
param_names = ['V0','K0','KP0','KP20']
param_units = ['ang^3','GPa','1','GPa^-1']
param_defaults = [V0,K0,KP0,KP20]
param_scales = [V0,K0,KP0,KP20_scale]
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
#====================================================================
class _PolyRho(CompressCalc):
"""
Needed for Spera 2011
"""
# def __init__(self, eos_mod, path_const='T', order=5, mass=100 ):
# def _get_coef_array(self):
# basename = 'Pcoef'
# param_names = core.make_array_param_defaults(basename, self.order)
# param_values = np.array(self.eos_mod.get_param_values(
# param_names=param_names))
# coef_index = core.get_array_param_index(param_names)
# order = np.max(coef_index)
# param_full = np.zeros(order)
# param_full[coef_index] = param_values
def _vol_to_rho(self, V):
rho = (self.eos_mod.molar_mass/V)*(core.CONSTS['ang3percc']/core.CONSTS['Nmol'])
return rho
def _rho_to_vol(self, rho):
V = (self.eos_mod.molar_mass/rho)*(core.CONSTS['ang3percc']/core.CONSTS['Nmol'])
return V
def _get_poly_coef(self):
param_names = self.eos_mod.get_array_param_names('Pcoef')
param_values = self.eos_mod.get_param_values(param_names=param_names)
V0, = self.eos_mod.get_param_values(param_names=['V0'])
rho0 = self._vol_to_rho(V0)
coef_index = core.get_array_param_index(param_names)
order = np.max(coef_index)+1
param_full = np.zeros(order)
param_full[coef_index] = param_values
coef_a = np.flipud(param_full)
return coef_a, rho0
def _get_unshifted_poly_coef(self):
coef_a, rho0 = self._get_poly_coef()
order = coef_a.size
Pcoef_a = coef_a*rho0**np.flipud(np.arange(order))
core.simplify_poly(coef_a)
def _calc_press(self, V_a):
V_a = core.fill_array(V_a)
coef_a, rho0 = self._get_poly_coef()
rho_a = self._vol_to_rho(V_a)
order = coef_a.size
Pcoef_a = coef_a*rho0**np.flipud(np.arange(order))
x = rho_a/rho0
press_a = np.polyval(Pcoef_a, x-1)
return press_a
def _calc_energy(self, V_a):
V_a = core.fill_array(V_a)
PV_ratio = core.CONSTS['PV_ratio']
coef_a, rho0 = self._get_poly_coef()
rho_a = self._vol_to_rho(V_a)
order = coef_a.size
Pcoef_a = coef_a*rho0**np.flipud(np.arange(order))
x = rho_a/rho0
press_a = np.polyval(Pcoef_a, x-1)
core.simplify_poly(Pcoef_a)
V0, = self.eos_mod.get_param_values(param_names=['V0'])
coef_a, rho0 = self._get_poly_coef()
coef_rev_a = np.flipud(coef_a)
order = coef_a.size
coef_exp_a = np.flipud(np.arange(0,order))
energy_a = np.zeros(V_a.shape)
energy_a += coef_rev_a[0]*(V_a-V0)*PV_ratio
energy_a += coef_rev_a[1]*np.log(V_a/V0)*PV_ratio
for deg in range(2,order):
energy_a += coef_rev_a[deg]*()
return energy_a
def get_energy_scale(self):
V0, dPdrho = self.get_param_defaults(['V0','_Pcoef_1'])
rho0 = self._vol_to_rho(V0)
K0 = rho0*dPdrho
energy_scale = np.round(V0*K0/core.CONSTS['PV_ratio'],decimals=2)
return energy_scale
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
rho0 = 2.58516
coef_basename = 'Pcoef'
param_names = core.make_array_param_names(coef_basename, order,
skipzero=True)
param_values_sio2 = [8.78411, 12.08481, -5.5986, 4.92863, -0.90499]
if order>6:
param_defaults = [0 for ind in range(1,order)]
param_defaults[0:5] = param_values_sio2
else:
param_defaults = param_values_sio2[0:order-1]
param_scales = [1 for ind in range(1,order)]
param_units = core.make_array_param_units(param_names, base_unit='GPa',
deriv_unit='(g/cc)')
V0 = self._rho_to_vol(rho0)
param_names.append('V0')
param_scales.append(V0)
param_units.append('ang^3')
param_defaults.append(V0)
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
#====================================================================
class _PolyV(CompressCalc):
_name='PolyV'
def _init_params(self, order):
"""Initialize list of calculator parameter names."""
order = 3 # ignore order input
V0, K0, KP0 = 100, 150, 4
param_names = ['V0','K0','KP0']
param_units = ['ang^3','GPa','1']
param_defaults = [V0,K0,KP0]
param_scales = [V0,K0,KP0]
self._set_params(param_names, param_units,
param_defaults, param_scales, order=order)
pass
def _calc_press(self, V_a):
V0, K0, KP0 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0'])
eta = 3/2*(KP0-1)
vratio_a = V_a/V0
x_a = vratio_a**(1/3)
press_a = 3*K0*(1-x_a)*x_a**(-2)*np.exp(eta*(1-x_a))
return press_a
def _calc_energy(self, V_a):
V0, K0, KP0 = self.eos_mod.get_param_values(
param_names=['V0','K0','KP0'])
PV_ratio, = core.get_consts(['PV_ratio'])
eta = 3/2*(KP0-1)
vratio_a = V_a/V0
x_a = vratio_a**(1/3)
energy_a = 9*K0*V0/PV_ratio/eta**2*\
(1 + (eta*(1-x_a)-1)*np.exp(eta*(1-x_a)))
return energy_a
# def get_param_scale_sub(self):
# """Return scale values for each parameter"""
# V0, K0, KP0 = core.get_params(['V0','K0','KP0'])
# PV_ratio, = core.get_consts(['PV_ratio'])
# paramkey_a = np.array(['V0','K0','KP0','E0'])
# scale_a = np.array([V0,K0,KP0,K0*V0/PV_ratio])
# return scale_a, paramkey_a
# def _calc_energy_perturb(self, V_a):
# """Returns Energy pertubation basis functions resulting from fractional changes to EOS params."""
# V0, K0, KP0, E0 = core.get_params(['V0','K0','KP0','E0'])
# PV_ratio, = core.get_consts(['PV_ratio'])
# eta = 3/2*(KP0-1)
# vratio_a = V_a/V0
# x = vratio_a**(1/3)
# scale_a, paramkey_a = self.get_param_scale_sub()
# # NOTE: CHECK UNITS (PV_RATIO) here
# dEdp_a = 1/PV_ratio*np.vstack\
# ([-3*K0*(eta**2*x*(x-1) + 3*eta*(x-1) - 3*np.exp(eta*(x-1)) + 3)\
# *np.exp(-eta*(x-1))/eta**2,
# -9*V0*(eta*(x-1) - np.exp(eta*(x-1)) + 1)*np.exp(-eta*(x-1))/eta**2,
# 27*K0*V0*(2*eta*(x-1) + eta*(-x + (x-1)*(eta*(x-1) + 1) + 1)
# -2*np.exp(eta*(x-1)) + 2)*np.exp(-eta*(x-1))/(2*eta**3),
# PV_ratio*np.ones(V_a.shape)])
# Eperturb_a = np.expand_dims(scale_a,1)*dEdp_a
# #Eperturb_a = np.expand_dims(scale_a)*dEdp_a
# return Eperturb_a, scale_a, paramkey_a
#====================================================================
| mit | 6,094,169,553,511,042,000 | 32.871345 | 107 | 0.512978 | false |
pointhi/searx | searx/plugins/tracker_url_remover.py | 1 | 1397 | '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <[email protected]>
'''
from flask_babel import gettext
import re
from urlparse import urlunparse
regexes = {re.compile(r'utm_[^&]+&?'),
re.compile(r'(wkey|wemail)[^&]+&?'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
description = gettext('Remove trackers arguments from the returned URL')
default_on = True
def on_result(request, ctx):
query = ctx['result']['parsed_url'].query
if query == "":
return True
for reg in regexes:
query = reg.sub('', query)
if query != ctx['result']['parsed_url'].query:
ctx['result']['parsed_url'] = ctx['result']['parsed_url']._replace(query=query)
ctx['result']['url'] = urlunparse(ctx['result']['parsed_url'])
return True
| agpl-3.0 | -4,441,933,707,822,547,500 | 30.75 | 87 | 0.688618 | false |
teagles/teagles-pc | 24/24.py | 1 | 3371 | #!/usr/bin/env python
# http://butter:[email protected]/pc/hex/ambiguity.html
import os
import sys
import Image
import requests
import collections
import ImageDraw
IMAGE_URL = 'http://butter:[email protected]/pc/hex/maze.png'
FNAME = '24/maze.png'
Point = collections.namedtuple('Point', ['x', 'y'])
class MazeTurtle:
DIRECTIONS = {'north': lambda p: Point(p.x, p.y - 1),
'east': lambda p: Point(p.x + 1, p.y),
'south': lambda p: Point(p.x, p.y + 1),
'west': lambda p: Point(p.x - 1, p.y)}
def __init__(self, img, start_point, goal_point, wall_colour):
self.img = img
self.pix = img.load()
self.point = start_point
self.goal_point = goal_point
self.wall_colour = wall_colour
self.visited = set()
self.path = []
self.branches = []
self.dead_ends = []
self.im_num = 0
def valid_point(self, p):
return (p.x >= 0 and p.y >= 0 and p.x < self.img.size[0]
and p.y < self.img.size[1])
def scout(self):
possibilities = []
for fp in MazeTurtle.DIRECTIONS.values():
pp = fp(self.point)
if self.valid_point(pp):
if self.pix[pp] != self.wall_colour:
if pp not in self.visited:
possibilities.append(pp)
return possibilities
def victory(self):
return (self.goal_point.x == self.point.x
and
self.goal_point.y == self.point.y)
def find_path(self):
while (not self.victory()):
self.path.append(self.point)
self.visited.add(self.point)
possibilities = self.scout()
if len(possibilities) > 0:
if len(possibilities) > 1:
self.branches.append((len(self.path), possibilities))
self.point = possibilities[0]
else:
#print self.path
#print self.branches
self.dead_ends.append(self.path[self.branches[-1][0]:])
del self.path[self.branches[-1][0]:]
del self.branches[-1][1][0]
#self.show_path()
#raw_input('Continue:')
self.point = self.branches[-1][1][0]
if len(self.branches[-1][1]) is 1:
del self.branches[-1]
self.path.append(self.point)
return self.path
def show_path(self):
temp = self.img.copy()
draw = ImageDraw.Draw(temp)
for dp in self.path:
draw.point(dp, fill='green')
for de in self.dead_ends[:-1]:
for dp in de:
#draw.point(dp, fill='blue')
pass
for dp in self.dead_ends[-1]:
#draw.point(dp, fill='purple')
pass
temp.save('24/img%d.png' % self.im_num, quality=100)
self.im_num = self.im_num + 1
# 24/24.py
def main(args=None):
if args is None:
args = sys.argv[1:]
if not os.path.isfile(FNAME):
with open(FNAME, 'wb') as f:
f.write(requests.get(IMAGE_URL).content)
img = Image.open(FNAME)
turtle = MazeTurtle(img, Point(639, 0), Point(1, 640), 25)
print turtle.find_path()
turtle.show_path()
if __name__ == '__main__':
main()
| mit | 9,179,901,707,781,394,000 | 30.801887 | 73 | 0.5221 | false |
MJB47/Jokusoramame | joku/cogs/mod.py | 1 | 13299 | """
Non-generic moderation cog.
"""
import asyncio
import collections
import random
import discord
from discord.ext import commands
from joku.cogs._common import Cog
from joku.core import checks
from joku.core.bot import Context
from joku.core.checks import mod_command, bot_has_permissions
from joku.core.utils import get_role
class Moderation(Cog):
"""
Non-generic moderation cog.
"""
async def on_member_remove(self, member: discord.Member):
# Rolestate
await self.bot.database.save_rolestate(member)
async def on_member_join(self, member: discord.Member):
# Rolestate
setting = await self.bot.database.get_setting(member.guild, "rolestate", {})
if setting.get("status") == 1:
rolestate = await self.bot.database.get_rolestate_for_member(member)
if rolestate is None:
return
roles = [discord.utils.get(member.guild.roles, id=r_id) for r_id in rolestate.roles]
roles = [r for r in roles if r is not None]
await member.edit(roles=roles)
if rolestate.nick:
await member.edit(nick=rolestate.nick)
async def on_message(self, message: discord.Message):
# Anti mention spam
# Use a set here as we only want to ban unique mentions
mentions = len(set(message.mentions) & set(message.role_mentions))
if mentions < 3:
# dont bother loading from DB
return
if message.guild is None:
return
if message.author.top_role >= message.guild.me.top_role or message.author == message.guild.owner:
# can't ban anyway
return
c = await self.bot.database.get_setting(message.guild, "mention_spam", {
"enabled": False,
"threshold": 5
})
if c["enabled"] is True:
if mentions == c["threshold"]:
guild = message.guild # type: discord.Guild
await guild.ban(message.author)
await message.channel.send("Member **{}** was automatically banned for going over the mention spam "
"limit.".format(message.author))
# anti mention spam
@commands.group(pass_context=True, invoke_without_command=True)
@checks.has_permissions(ban_members=True)
@mod_command()
async def antimention(self, ctx: Context, *, status: str = None):
"""
Toggles the antimention status in this server.
"""
previous = await ctx.bot.database.get_setting(ctx.guild, "mention_spam", {
"enabled": False,
"threshold": 5
})
if status is None or status not in ["on", "off"]:
current_status = previous.get("enabled", False)
if current_status:
await ctx.send("Anti-mention spam is currently **on**.")
else:
await ctx.send("Anti-mention spam is currently **off**.")
return
if status == "on":
await ctx.bot.database.set_setting(ctx.guild, "mention_spam",
**{
"enabled": True,
"threshold": previous["threshold"]
})
await ctx.send(":heavy_check_mark: Enabled anti-mention spam.")
elif status == "off":
await ctx.bot.database.set_setting(ctx.guild, "mention_spam",
**{
"enabled": False,
"threshold": previous["threshold"]
})
await ctx.send(":heavy_check_mark: Disabled anti-mention spam.")
@antimention.command()
@checks.has_permissions(ban_members=True)
@mod_command()
async def threshold(self, ctx: Context, threshold: int):
"""
Changes the threshold for anti-mention spam to ban at.
"""
if threshold < 3:
await ctx.send(":x: Cannot set a threshold lower than 3.")
return
previous = await ctx.bot.database.get_setting(ctx.guild, "mention_spam", {
"enabled": False,
"threshold": 5
})
await ctx.bot.database.set_setting(ctx.guild, "mention_spam", enabled=previous["enabled"], threshold=threshold)
await ctx.send(":heavy_check_mark: Set anti-mention spam threshold to {}.".format(threshold))
@commands.command(pass_context=True)
@checks.has_permissions(ban_members=True)
@bot_has_permissions(ban_members=True)
@mod_command()
async def xban(self, ctx: Context, user_id: int):
"""
Cross-bans a user.
"""
if user_id in [m.id for m in ctx.message.guild.members]:
await ctx.channel.send(":x: This command is used for banning members not in the server.")
return
try:
user = await ctx.bot.get_user_info(user_id)
await ctx.bot.http.ban(user_id, ctx.message.guild.id, 0)
except discord.Forbidden:
await ctx.channel.send(":x: 403 FORBIDDEN")
except discord.NotFound:
await ctx.channel.send(":x: User not found.")
else:
await ctx.channel.send(":heavy_check_mark: Banned user {}.".format(user.name))
@commands.group(pass_context=True, invoke_without_command=True)
@checks.has_permissions(manage_guild=True, manage_roles=True)
@mod_command()
async def rolestate(self, ctx: Context, *, status: str = None):
"""
Manages rolestate.
This will automatically save roles for users who have left the server.
"""
if status is None:
# Check the status.
setting = await ctx.bot.database.get_setting(ctx.message.guild, "rolestate", {})
if setting.get("status") == 1:
await ctx.channel.send("Rolestate is currently **on.**")
else:
await ctx.channel.send("Rolestate is currently **off.**")
else:
if status.lower() == "on":
await ctx.bot.database.set_setting(ctx.message.guild, "rolestate", status=1)
await ctx.channel.send(":heavy_check_mark: Turned Rolestate on.")
return
elif status.lower() == "off":
await ctx.bot.database.set_setting(ctx.message.guild, "rolestate", status=0)
await ctx.channel.send(":heavy_check_mark: Turned Rolestate off.")
return
else:
await ctx.channel.send(":x: No.")
@rolestate.command()
@checks.has_permissions(manage_guild=True, manage_roles=True)
@mod_command()
async def view(self, ctx: Context, *, user_id: int = None):
"""
Views the current rolestate of a member.
"""
if user_id is None:
user_id = ctx.author.id
rolestate = await self.bot.database.get_rolestate_for_id(ctx.guild.id, user_id)
user = await ctx.bot.get_user_info(user_id) # type: discord.User
em = discord.Embed(title="Rolestate viewer")
if rolestate is None:
em.description = "**No rolestate found for this user here.**"
em.colour = discord.Colour.red()
else:
em.description = "This shows the most recent rolestate for a user ID. This is **not accurate** if they " \
"haven't left before, or are still in the guild."
em.add_field(name="Username", value=user.name)
em.add_field(name="Nick", value=rolestate.nick, inline=False)
roles = ", ".join([get_role(ctx.guild, r_id).mention for r_id in rolestate.roles if r_id != ctx.guild.id])
em.add_field(name="Roles", value=roles, inline=False)
em.colour = discord.Colour.light_grey()
em.set_thumbnail(url=user.avatar_url)
em.set_footer(text="Rolestate for guild {}".format(ctx.guild.name))
await ctx.send(embed=em)
@commands.command(pass_context=True)
@commands.cooldown(rate=1, per=5 * 60, type=commands.BucketType.guild)
@checks.has_permissions(kick_members=True)
@mod_command()
async def islandbot(self, ctx: Context):
"""
Who will be voted off of the island?
"""
message = ctx.message # type: discord.Message
channel = message.channel
# Begin the raffle!
timeout = random.randrange(30, 60)
await ctx.channel.send(":warning: :warning: :warning: Raffle ends in **{}** seconds!".format(timeout))
# messages to collect
messages = []
async def _inner():
# inner closure point - this is killed by asyncio.wait()
while True:
next_message = await ctx.bot.wait_for("message", check=lambda m: m.channel == channel)
if next_message.author == message.guild.me:
continue
# Do some checks on the user to make sure we can kick them.
if next_message.author.guild_permissions.administrator:
continue
if next_message.author.top_role >= message.guild.me.top_role:
continue
messages.append(next_message)
try:
# wait for the waiter, but discard it when we're done
await asyncio.wait_for(_inner(), timeout=timeout)
except asyncio.TimeoutError:
pass
# gather all the users in the messages
authors = list({m.author for m in messages})
# Choose some random people from the authors.
chosen = []
for x in range(0, min(len(authors), 5)):
r = random.choice(authors)
chosen.append(r)
authors.remove(r)
if not chosen:
await ctx.channel.send(":x: Nobody entered the raffle")
return
fmt = ":island: These people are up for vote:\n\n{}\n\nMention to vote.".format(
"\n".join(m.mention for m in chosen)
)
await ctx.channel.send(fmt)
votes = []
voted = []
async def _inner2():
while True:
next_message = await ctx.bot.wait_for("message", check=lambda m: m.channel == channel)
# Ignore bots.
if next_message.author.bot:
continue
# No double voting.
if next_message.author in voted:
continue
# They didn't mention anyone.
if not next_message.mentions:
continue
# Check the first mention.
m = next_message.mentions[0]
# You can't vote for somebody not in the raffle!
if m not in chosen:
continue
if m == next_message.author:
await ctx.send("I am not a tool for assisted suicide")
continue
# Add them to the votes, and add the author to the voted count.
votes.append(m)
voted.append(next_message.author)
try:
# wait for the waiter, but discard it when we're done
await asyncio.wait_for(_inner2(), timeout=timeout)
except asyncio.TimeoutError:
pass
# Count the votes.
counted = collections.Counter(votes)
try:
winner = counted.most_common()[0]
except IndexError:
await ctx.channel.send(":bomb: Nobody voted")
return
await ctx.channel.send(":medal: The winner is {}, with `{}` votes!".format(winner[0].mention, winner[1]))
try:
await winner[0].send("You have been voted off the island.")
except discord.HTTPException:
pass
try:
await ctx.guild.kick(winner[0])
except discord.HTTPException:
await ctx.send(channel, "The island is rigged")
@commands.command(pass_context=True)
@checks.has_permissions(manage_nicknames=True)
@mod_command()
async def massnick(self, ctx: Context, prefix: str = "", suffix: str = ""):
"""
Mass-nicknames an entire server.
"""
coros = []
for member in ctx.message.guild.members:
coros.append(member.edit(nick=prefix + member.name + suffix))
fut = asyncio.gather(*coros, return_exceptions=True, loop=ctx.bot.loop)
async with ctx.channel.typing():
await fut
count = sum(1 for i in fut.result() if not isinstance(i, Exception))
forbidden = sum(1 for i in fut.result() if isinstance(i, discord.Forbidden))
httperror = sum(1 for i in fut.result() if isinstance(i, discord.HTTPException)) - forbidden
failed = ctx.message.guild.member_count - count
await ctx.channel.send(
":heavy_check_mark: Updated `{}` nicknames - failed to change `{}` nicknames. "
"(`{}` forbidden, `{}` too long/other)".format(count, failed, forbidden, httperror)
)
setup = Moderation.setup
| mit | -5,663,790,421,347,314,000 | 36.78125 | 119 | 0.560117 | false |
Godley/Music-Library | implementation/primaries/GUI/PlaylistDialog.py | 1 | 2929 | from PyQt4 import QtCore, QtGui, uic
import os
from implementation.primaries.GUI.helpers import get_base_dir
from implementation.primaries.GUI import themedWindow
class PlaylistDialog(QtGui.QDialog, themedWindow.ThemedWindow):
def __init__(self, app, theme, themes, design_folder):
QtGui.QDialog.__init__(self)
themedWindow.ThemedWindow.__init__(self, theme, themes)
self.qApp = app
self.theme = theme
self.design_folder = design_folder
def load(self):
path_to_file = os.path.join(self.design_folder, "NewPlaylist.ui")
uic.loadUi(path_to_file, self)
self.autoCompleteFrame.hide()
self.buttonBox.accepted.connect(self.newPlaylistOk)
self.autoCompleteBox.itemDoubleClicked.connect(self.itemClicked)
self.piecesLineEdit.editingFinished.connect(self.onInactiveSearchBar)
self.deleteItem.clicked.connect(self.removeItem)
self.piecesLineEdit.textChanged.connect(self.updateOptions)
self.piecesLineEdit.editingFinished.connect(self.onInactiveSearchBar)
self.applyTheme()
def removeItem(self):
listItems = self.listWidget.selectedItems()
if not listItems:
return
for item in listItems:
self.listWidget.takeItem(self.listWidget.row(item))
self.listWidget.show()
def newPlaylistOk(self):
data = {"name": self.playlistNameLineEdit.text(), "pieces": []}
for i in range(self.listWidget.count()):
item = self.listWidget.item(i)
fname = item.data(2)
data["pieces"].append(fname)
self.qApp.addPlaylist(data)
def updateOptions(self):
text = self.piecesLineEdit.text()
results = self.qApp.queryNotThreaded(text)
self.autoCompleteBox.clear()
for key in results:
item = QtGui.QTreeWidgetItem(key)
item.setData(0, 0, key)
self.autoCompleteBox.addTopLevelItem(item)
for file in results[key]:
fitem = QtGui.QTreeWidgetItem(file[0])
fitem.setData(0, 0, file[1])
item.addChild(fitem)
if len(results) == 0:
pass
else:
pass
self.autoCompleteBox.show()
self.autoCompleteFrame.show()
def onInactiveSearchBar(self):
if self.piecesLineEdit.text() == "" or self.piecesLineEdit.text(
) == " " or self.autoCompleteBox.topLevelItemCount() == 0 or self.focusWidget() != self.autoCompleteBox:
self.autoCompleteBox.clear()
self.autoCompleteFrame.hide()
self.autoCompleteBox.hide()
else:
self.updateOptions()
def itemClicked(self, current_item):
fname = current_item.data(0, 0)
item = QtGui.QListWidgetItem(fname)
self.listWidget.addItem(item)
self.listWidget.show()
self.autoCompleteFrame.hide()
| gpl-2.0 | -3,475,940,596,530,879,000 | 36.075949 | 112 | 0.637078 | false |
pbraunstein/trackercise | app/brain/admin/user_data.py | 1 | 1727 | from flask_login import current_user
from app.brain.utilities import prepare_history_entry
from app.constants import HISTORY_CONSTANTS, TAXONOMY_CONSTANTS
from app.service import RepExercisesHistoryService, RepExercisesTaxonomyService
class UserData(object):
"""
Returns the relevant data for one user
I N T E R F A C E G U A R A N T E E D
---------------------------------------
get_user_data(cls):
-- Returns a dictionary with the nickname, user_id, all rep exercises history and rep exercises taxonomy
specific to that user
"""
@classmethod
def get_user_data(cls):
user_data = {'nickname': cls._get_current_user_nickname(), 'user_id': cls._get_current_user_id(),
HISTORY_CONSTANTS.GROUP_NAME: cls._get_user_rep_history()}
user_data[TAXONOMY_CONSTANTS.GROUP_NAME] = cls._get_taxonomies_for_exercises(
cls._convert_rep_exercises_to_exercise_ids(user_data[HISTORY_CONSTANTS.GROUP_NAME])
)
return user_data
@classmethod
def _get_user_rep_history(cls):
exercises = RepExercisesHistoryService.get_list_of_users_exercises(cls._get_current_user_id())
return [prepare_history_entry(x) for x in exercises]
@staticmethod
def _get_current_user_nickname():
return current_user.nickname
@staticmethod
def _get_current_user_id():
return current_user.id
@staticmethod
def _convert_rep_exercises_to_exercise_ids(rep_exercises):
return [x.exercise_id for x in rep_exercises]
@staticmethod
def _get_taxonomies_for_exercises(exercise_ids):
return RepExercisesTaxonomyService.get_list_of_taxonomies_by_exercise_ids(exercise_ids)
| mit | 1,006,561,498,319,172,900 | 36.543478 | 112 | 0.671106 | false |
bks/veusz | veusz/dialogs/export.py | 1 | 18397 | # Copyright (C) 2014 Jeremy S. Sanders
# Email: Jeremy Sanders <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
from __future__ import division, print_function
import os
import os.path
from .. import qtall as qt4
from .. import setting
from .. import utils
from .. import document
from ..compat import citems, cstrerror, cstr, cgetcwd
from .veuszdialog import VeuszDialog
def _(text, disambiguation=None, context='ExportDialog'):
"""Translate text."""
return qt4.QCoreApplication.translate(context, text, disambiguation)
# formats which can have multiple pages
multipageformats = set(('ps', 'pdf'))
bitmapformats = set(('png', 'bmp', 'jpg', 'tiff', 'xpm'))
# map formats to names of radio buttons
formatradio = (
('pdf', 'radioFormatPDF'),
('eps', 'radioFormatEPS'),
('ps', 'radioFormatPS' ),
('svg', 'radioFormatSVG'),
('emf', 'radioFormatEMF'),
('png', 'radioFormatPNG'),
('bmp', 'radioFormatBMP'),
('jpg', 'radioFormatJPG'),
('tiff', 'radioFormatTIFF'),
('xpm', 'radioFormatXPM'),
)
class ExportDialog(VeuszDialog):
"""Export dialog."""
def __init__(self, mainwindow, doc, docfilename):
"""Setup dialog."""
VeuszDialog.__init__(self, mainwindow, 'export.ui')
self.document = doc
doc.signalModified.connect(self.updatePagePages)
self.updatePagePages()
# change 'Save' button to 'Export'
self.buttonBox.button(qt4.QDialogButtonBox.Save).setText(_('Export'))
# these are mappings between filetypes and radio buttons
self.fmtradios = dict([(f, getattr(self, r)) for f, r in formatradio])
self.radiofmts = dict([(getattr(self, r), f) for f, r in formatradio])
# get allowed types (some formats are disabled if no helper)
docfmts = set()
for types, descr in document.Export.getFormats():
docfmts.update(types)
# disable type if not allowed
for fmt, radio in citems(self.fmtradios):
if fmt not in docfmts:
radio.setEnabled(False)
# connect format radio buttons
def fmtclicked(f):
return lambda: self.formatClicked(f)
for r, f in citems(self.radiofmts):
r.clicked.connect(fmtclicked(f))
# connect page radio buttons
self.radioPageSingle.clicked.connect(lambda: self.pageClicked('single'))
self.radioPageAll.clicked.connect(lambda: self.pageClicked('all'))
self.radioPagePages.clicked.connect(lambda: self.pageClicked('pages'))
# other controls
self.checkMultiPage.clicked.connect(self.updateSingleMulti)
self.buttonBrowse.clicked.connect(self.browseClicked)
setdb = setting.settingdb
eloc = setdb['dirname_export_location']
# where to export file
if eloc == 'doc':
self.dirname = os.path.dirname(os.path.abspath(docfilename))
elif eloc == 'cwd':
self.dirname = cgetcwd()
else: # 'prev'
self.dirname = setdb.get('dirname_export', qt4.QDir.homePath())
# set default filename
ext = setdb.get('export_format', 'pdf')
if not docfilename:
docfilename = 'export'
self.docname = os.path.splitext(os.path.basename(docfilename))[0]
self.formatselected = ext
self.pageselected = setdb.get('export_page', 'single')
self.checkMultiPage.setChecked(setdb.get('export_multipage', True))
self.updateSingleMulti()
self.checkOverwrite.setChecked(setdb.get('export_overwrite', False))
self.exportSVGTextAsText.setChecked(setdb['export_SVG_text_as_text'])
self.exportAntialias.setChecked(setdb['export_antialias'])
self.exportQuality.setValue(setdb['export_quality'])
# validate and set DPIs
dpis = ('72', '75', '90', '96', '100', '150', '200', '300')
for cntrl in self.exportDPI, self.exportDPISVG, self.exportDPIPDF:
cntrl.addItems(dpis)
cntrl.setValidator(qt4.QIntValidator(10, 10000, self))
self.exportDPI.setEditText(str(setdb['export_DPI']))
self.exportDPISVG.setEditText(str(setdb['export_DPI_SVG']))
self.exportDPIPDF.setEditText(str(setdb['export_DPI_PDF']))
# button to change bitmap background
self.exportBackgroundButton.clicked.connect(
self.slotExportBackgroundClicked)
self.updateExportBackground(setdb['export_background'])
# set correct format
self.fmtradios[ext].click()
# regexp for comma separated ranges
valre = qt4.QRegExp(
r'^[0-9]+(\s*-\s*[0-9]+)?(\s*,\s*[0-9]+(\s*-\s*[0-9]+)?)*$')
valid = qt4.QRegExpValidator(valre, self)
self.editPagePages.setValidator(valid)
# set page mode
{
'range': self.radioPageSingle, # compatibility
'single': self.radioPageSingle,
'all': self.radioPageAll,
'pages': self.radioPagePages,
}[self.pageselected].click()
# label showing success/failure
self.labelStatus.clear()
# fix height as widgets are hidden
width = self.size().width()
self.adjustSize()
self.resize(width, self.size().height())
def formatClicked(self, fmt):
"""If the format is changed."""
setting.settingdb['export_format'] = fmt
self.formatselected = fmt
self.checkMultiPage.setEnabled(fmt in multipageformats)
for c in (self.exportAntialias, self.exportDPI, self.labelDPI,
self.exportBackgroundButton, self.labelBackgroundButton):
c.setVisible(fmt in bitmapformats)
for c in (self.exportDPIPDF, self.labelDPIPDF,
self.exportColor, self.labelColor):
c.setVisible(fmt in ('pdf', 'ps', 'eps'))
for c in (self.exportQuality, self.labelQuality):
c.setVisible(fmt == 'jpg')
for c in (self.exportSVGTextAsText, self.labelSVGTextAsText,
self.exportDPISVG, self.labelDPISVG):
c.setVisible(fmt == 'svg')
self.updateSingleMulti()
filename = os.path.splitext(self.editFileName.text())[0] + '.' + fmt
self.editFileName.setText(filename)
def pageClicked(self, page):
"""If page type is set."""
setting.settingdb['export_page'] = page
self.pageselected = page
self.updateSingleMulti()
self.editPagePages.setEnabled(page=='pages')
def browseClicked(self):
"""Browse for file."""
setdb = setting.settingdb
# File types we can export to in the form ([extensions], Name)
fd = qt4.QFileDialog(self, _('Export page'))
filename = self.editFileName.text()
dirname = os.path.dirname(self.editFileName.text())
fd.setDirectory(dirname if dirname else self.dirname)
fd.setFileMode(qt4.QFileDialog.AnyFile)
fd.setAcceptMode(qt4.QFileDialog.AcceptSave)
fd.setOptions(qt4.QFileDialog.DontConfirmOverwrite)
# Create a mapping between a format string and extensions
filtertoext = {}
# convert extensions to filter
exttofilter = {}
filters = []
# a list of extensions which are allowed
validextns = []
formats = document.Export.getFormats()
for extns, name in formats:
extensions = " ".join(["*." + item for item in extns])
# join eveything together to make a filter string
filterstr = '%s (%s)' % (name, extensions)
filtertoext[filterstr] = extns
for e in extns:
exttofilter[e] = filterstr
filters.append(filterstr)
validextns += extns
fd.setNameFilters(filters)
fd.selectNameFilter(exttofilter[setdb['export_format']])
filename = self.editFileName.text()
dirname = os.path.dirname(os.path.abspath(filename))
if os.path.isdir(dirname):
fd.selectFile(filename)
if fd.exec_() == qt4.QDialog.Accepted:
# convert filter to extension
filterused = str(fd.selectedNameFilter())
chosenext = filtertoext[filterused][0]
filename = fd.selectedFiles()[0]
fileext = os.path.splitext(filename)[1][1:]
if fileext not in validextns or fileext != chosenext:
filename += "." + chosenext
self.editFileName.setText(filename)
self.fmtradios[chosenext].click()
def isMultiFile(self):
"""Is output going to be multiple pages?"""
multifile = self.pageselected != 'single'
if (self.formatselected in multipageformats and
self.checkMultiPage.isChecked()):
multifile = False
return multifile
def updateSingleMulti(self, _oldmulti=[None]):
"""Change filename according to selected single or multi button."""
setting.settingdb['export_multipage'] = self.checkMultiPage.isChecked()
multifile = self.isMultiFile()
if multifile:
templ = setting.settingdb['export_template_multi']
else:
templ = setting.settingdb['export_template_single']
newfilename = os.path.join(
self.dirname,
templ.replace('%DOCNAME%', self.docname) + '.' + self.formatselected)
# only change if multi format status has changed or is
# uninitialised
if multifile is not getattr(self, '_oldsinglemulti', None):
self.editFileName.setText(newfilename)
self._oldsinglemulti = multifile
def updatePagePages(self):
"""Update widgets allowing user to set ranges of pages."""
npages = self.document.getNumberPages()
if npages == 0:
return
text = '%i-%i' % (1, npages)
self.editPagePages.setText(text)
@qt4.pyqtSlot()
def clearLabel(self):
"""Clear label.
Defined as a slot to work around PyQt C++ object deleted bug. """
self.labelStatus.clear()
def showMessage(self, text):
"""Show a message in a label, clearing after a time."""
self.labelStatus.setText(text)
qt4.QTimer.singleShot(3000, self.clearLabel)
def updateExportBackground(self, colorname):
"""Update color on export background."""
pixmap = qt4.QPixmap(16, 16)
col = self.document.evaluate.colors.get(colorname)
pixmap.fill(col)
# update button (storing color in button itself - what fun!)
self.exportBackgroundButton.setIcon(qt4.QIcon(pixmap))
self.exportBackgroundButton.iconcolor = colorname
def slotExportBackgroundClicked(self):
"""Button clicked to change background."""
qcolor = self.document.evaluate.colors.get(
self.exportBackgroundButton.iconcolor)
color = qt4.QColorDialog.getColor(
qcolor,
self,
"Choose color",
qt4.QColorDialog.ShowAlphaChannel )
if color.isValid():
self.updateExportBackground(utils.extendedColorFromQColor(color))
def getPagePages(self):
"""Get list of entered pages."""
txt = self.editPagePages.text()
parts = txt.split(',')
pages = []
for p in parts:
p = p.replace(' ', '')
try:
if p.find('-')>=0:
rng = p.split('-')
pages += list(range(int(rng[0])-1, int(rng[1])))
else:
pages.append(int(p)-1)
except ValueError:
# convertsion error
raise RuntimeError(_('Error: invalid list of pages'))
# check in range
for pg in pages:
if pg<0 or pg>=self.document.getNumberPages():
raise RuntimeError(_('Error: pages out of range'))
return pages
def accept(self):
"""Do the export"""
if self.document.getNumberPages() == 0:
self.showMessage(_('Error: no pages in document'))
return
filename = self.editFileName.text()
if (self.isMultiFile() and
'%PAGENAME%' not in filename and
'%PAGE%' not in filename and
'%PAGE00%' not in filename and
'%PAGE000%' not in filename):
self.showMessage(
_('Error: page name or number must be in filename'))
return
if self.pageselected == 'single':
pages = [self.mainwindow.plot.getPageNumber()]
elif self.pageselected == 'all':
pages = list(range(self.document.getNumberPages()))
elif self.pageselected == 'pages':
try:
pages = self.getPagePages()
except RuntimeError as e:
self.showMessage(str(e))
return
setdb = setting.settingdb
# update settings from controls
setdb['export_overwrite'] = self.checkOverwrite.isChecked()
setdb['export_antialias'] = self.exportAntialias.isChecked()
setdb['export_quality'] = self.exportQuality.value()
setdb['export_color'] = self.exportColor.currentIndex() == 0
setdb['export_background'] = self.exportBackgroundButton.iconcolor
setdb['export_SVG_text_as_text'] = self.exportSVGTextAsText.isChecked()
# update dpi if possible
# FIXME: requires some sort of visual notification of validator
for cntrl, setn in (
(self.exportDPI, 'export_DPI'),
(self.exportDPIPDF, 'export_DPI_PDF'),
(self.exportDPISVG, 'export_DPI_SVG')):
try:
text = cntrl.currentText()
valid = cntrl.validator().validate(text, 0)[0]
if valid == qt4.QValidator.Acceptable:
setdb[setn] = int(text)
except ValueError:
pass
export = document.Export(
self.document,
'', # filename
[0], # page numbers
bitmapdpi=setdb['export_DPI'],
pdfdpi=setdb['export_DPI_PDF'],
antialias=setdb['export_antialias'],
color=setdb['export_color'],
quality=setdb['export_quality'],
backcolor=setdb['export_background'],
svgtextastext=setdb['export_SVG_text_as_text'],
svgdpi=setdb['export_DPI_SVG'],
)
def _overwriteQuestion(filename):
"""Ask user whether file can be overwritten."""
retn = qt4.QMessageBox.question(
self,
_("Overwrite file?"),
_("The file %s already exists") % os.path.basename(filename),
qt4.QMessageBox.Save | qt4.QMessageBox.Cancel,
qt4.QMessageBox.Cancel)
return retn == qt4.QMessageBox.Save
# count exported pages (in list so can be modified in function)
pagecount = [0]
def _checkAndExport():
"""Check whether file exists and export if ok."""
if os.path.exists(export.filename):
if not setdb['export_overwrite']:
if not _overwriteQuestion(export.filename):
return
# show busy cursor
qt4.QApplication.setOverrideCursor(qt4.QCursor(qt4.Qt.WaitCursor))
# delete file if already exists
try:
os.unlink(export.filename)
except EnvironmentError:
pass
try:
# actually do the export
export.export()
pagecount[0] += len(export.pagenumbers)
except (RuntimeError, EnvironmentError) as e:
# errors from the export
if isinstance(e, EnvironmentError):
msg = cstrerror(e)
else:
msg = cstr(e)
qt4.QApplication.restoreOverrideCursor()
qt4.QMessageBox.critical(
self, _("Error - Veusz"),
_("Error exporting to file '%s'\n\n%s") %
(export.filename, msg))
else:
qt4.QApplication.restoreOverrideCursor()
if self.isMultiFile() or len(pages)==1:
# write pages to multiple files
for page in pages:
pagename = self.document.getPage(page).name
export.pagenumbers = [page]
pg = page+1
fname = filename.replace('%PAGE%', str(pg))
fname = fname.replace('%PAGE00%', '%02i' % pg)
fname = fname.replace('%PAGE000%', '%03i' % pg)
fname = fname.replace('%PAGENAME%', pagename)
export.filename = fname
_checkAndExport()
else:
# write page/pages to single file
fname = filename.replace('%PAGE%', _('none'))
fname = fname.replace('%PAGE00%', _('none'))
fname = fname.replace('%PAGE000%', _('none'))
fname = fname.replace('%PAGENAME%', _('none'))
export.pagenumbers = pages
export.filename = fname
_checkAndExport()
dirname = os.path.dirname(filename)
if dirname:
setting.settingdb['dirname_export'] = dirname
# format feedback
ext = os.path.splitext(export.filename)[1]
if ext:
utils.feedback.exportcts[ext] += 1
if pagecount[0] > 0:
self.showMessage(_('Exported %i page(s)') % pagecount[0])
| gpl-2.0 | 8,928,892,009,857,449,000 | 36.931959 | 81 | 0.585802 | false |
hakii27/PythonVersionMaster | Results/OneDimDot/MCTDHF/w=05/DensityPlot.py | 1 | 1339 | infile = open("DensityCC_w=05_L=10.txt",'r')
infile2 = open("DensityFCI_w=05_N=2_L=6_t=10.txt",'r')
infile3 = open("DensityCCSD_w=05_N=2_L=6_t=10.txt",'r')
densityCC_HF = []
densityFCI = []
densityCC2 = []
infile.next()
infile.next()
infile2.next()
infile2.next()
infile3.next()
infile3.next()
for line in infile:
tmp = line.split(",")
tmp2 = tmp[0].split("(")
d = float(tmp2[1])
densityCC_HF.append(d)
for line in infile2:
tmp = line.split(",")
tmp2 = tmp[0].split("(")
d = float(tmp2[1])
print d
densityFCI.append(d)
for line in infile3:
tmp = line.split(",")
tmp2 = tmp[0].split("(")
d = float(tmp2[1])
print d
densityCC2.append(d)
from numpy import *
Lx = 10
densityCC_HF = array(densityCC_HF)
densityFCI = array(densityFCI)
densityCC2 = array(densityCC2)
#densityCC_HF = array(densityCC_HF)
x = linspace(-Lx,Lx,len(densityFCI))
dx = x[1]-x[0]
print sum(densityFCI)
print sum(densityCC_HF)
print sum(densityCC2)
import matplotlib.pyplot as plt
plt.figure(1)
plt.title("Onebody Density for w=0.5 FCI vs. CCSD")
plt.plot(x,densityCC2/dx,'-ob',x,densityFCI/dx,'-r') #,t_vec,EsinPert,'-r')
plt.legend(["CCSD","FCI"])
plt.xlabel("x",fontsize=16)
plt.ylabel("$p(x,x)$",fontsize=16)
plt.figure(2)
plt.title("Difference")
plt.semilogy(x,abs(densityCC2-densityFCI)/dx,'o')
plt.show() | lgpl-3.0 | 7,361,011,529,086,168,000 | 20.612903 | 75 | 0.655713 | false |
Azure/azure-sdk-for-python | sdk/servicebus/azure-servicebus/tests/servicebus_preparer.py | 1 | 25718 | import functools
import hashlib
import os
import time
from collections import namedtuple
from azure.mgmt.servicebus import ServiceBusManagementClient
from azure.mgmt.servicebus.models import SBQueue, SBSubscription, AccessRights
from azure_devtools.scenario_tests.exceptions import AzureTestError
from devtools_testutils import (
ResourceGroupPreparer, AzureMgmtPreparer, FakeResource, get_region_override
)
from devtools_testutils.resource_testcase import RESOURCE_GROUP_PARAM
SERVICEBUS_DEFAULT_AUTH_RULE_NAME = 'RootManageSharedAccessKey'
SERVICEBUS_NAMESPACE_PARAM = 'servicebus_namespace'
SERVICEBUS_TOPIC_PARAM = 'servicebus_topic'
SERVICEBUS_SUBSCRIPTION_PARAM = 'servicebus_subscription'
SERVICEBUS_QUEUE_PARAM = 'servicebus_queue'
SERVICEBUS_AUTHORIZATION_RULE_PARAM = 'servicebus_authorization_rule'
SERVICEBUS_QUEUE_AUTHORIZATION_RULE_PARAM = 'servicebus_queue_authorization_rule'
# Service Bus Namespace Preparer and its shorthand decorator
class ServiceBusNamespacePreparer(AzureMgmtPreparer):
def __init__(self,
name_prefix='',
use_cache=False,
sku='Standard', location=get_region_override('westus'),
parameter_name=SERVICEBUS_NAMESPACE_PARAM,
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(ServiceBusNamespacePreparer, self).__init__(name_prefix, 24,
random_name_enabled=random_name_enabled,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.location = location
self.sku = sku
self.resource_group_parameter_name = resource_group_parameter_name
self.parameter_name = parameter_name
self.connection_string = ''
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbname"
self.set_cache(use_cache, sku, location)
def create_resource(self, name, **kwargs):
if self.is_live:
self.client = self.create_mgmt_client(ServiceBusManagementClient)
group = self._get_resource_group(**kwargs)
retries = 4
for i in range(retries):
try:
namespace_async_operation = self.client.namespaces.create_or_update(
group.name,
name,
{
'sku': {'name': self.sku},
'location': self.location,
}
)
self.resource = namespace_async_operation.result()
break
except Exception as ex:
error = "The requested resource {} does not exist".format(group.name)
not_found_error = "Operation returned an invalid status code 'Not Found'"
if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1:
raise
time.sleep(3)
key = self.client.namespaces.list_keys(group.name, name, SERVICEBUS_DEFAULT_AUTH_RULE_NAME)
self.connection_string = key.primary_connection_string
self.key_name = key.key_name
self.primary_key = key.primary_key
self.test_class_instance.scrubber.register_name_pair(
name,
self.resource_moniker
)
else:
self.resource = FakeResource(name=name, id=name)
self.connection_string = 'Endpoint=sb://{}.servicebus.windows.net/;SharedAccessKeyName=test;SharedAccessKey=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX='.format(name)
self.key_name = SERVICEBUS_DEFAULT_AUTH_RULE_NAME
self.primary_key = 'ZmFrZV9hY29jdW50X2tleQ=='
return {
self.parameter_name: self.resource,
'{}_connection_string'.format(self.parameter_name): self.connection_string,
'{}_key_name'.format(self.parameter_name): self.key_name,
'{}_primary_key'.format(self.parameter_name): self.primary_key,
}
def remove_resource(self, name, **kwargs):
if self.is_live:
group = self._get_resource_group(**kwargs)
self.client.namespaces.delete(group.name, name, polling=False)
def _get_resource_group(self, **kwargs):
try:
return kwargs.get(self.resource_group_parameter_name)
except KeyError:
template = 'To create a service bus a resource group is required. Please add ' \
'decorator @{} in front of this service bus preparer.'
raise AzureTestError(template.format(ResourceGroupPreparer.__name__))
# Shared base class for service bus sub-resources that require a namespace and RG to exist.
class _ServiceBusChildResourcePreparer(AzureMgmtPreparer):
def __init__(self,
name_prefix='',
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(_ServiceBusChildResourcePreparer, self).__init__(name_prefix, 24,
random_name_enabled=random_name_enabled,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.resource_group_parameter_name = resource_group_parameter_name
self.servicebus_namespace_parameter_name = servicebus_namespace_parameter_name
def _get_resource_group(self, **kwargs):
try:
return kwargs.get(self.resource_group_parameter_name)
except KeyError:
template = 'To create this service bus child resource service bus a resource group is required. Please add ' \
'decorator @{} in front of this service bus preparer.'
raise AzureTestError(template.format(ResourceGroupPreparer.__name__))
def _get_namespace(self, **kwargs):
try:
return kwargs.get(self.servicebus_namespace_parameter_name)
except KeyError:
template = 'To create this service bus child resource a service bus namespace is required. Please add ' \
'decorator @{} in front of this service bus preparer.'
raise AzureTestError(template.format(ServiceBusNamespacePreparer.__name__))
class ServiceBusTopicPreparer(_ServiceBusChildResourcePreparer):
def __init__(self,
name_prefix='',
use_cache=False,
parameter_name=SERVICEBUS_TOPIC_PARAM,
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(ServiceBusTopicPreparer, self).__init__(name_prefix,
random_name_enabled=random_name_enabled,
resource_group_parameter_name=resource_group_parameter_name,
servicebus_namespace_parameter_name=servicebus_namespace_parameter_name,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.parameter_name = parameter_name
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbtopic"
self.set_cache(use_cache)
def create_resource(self, name, **kwargs):
if self.is_live:
self.client = self.create_mgmt_client(ServiceBusManagementClient)
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
retries = 4
for i in range(retries):
try:
self.resource = self.client.topics.create_or_update(
group.name,
namespace.name,
name,
{}
)
break
except Exception as ex:
error = "The requested resource {} does not exist".format(namespace)
not_found_error = "Operation returned an invalid status code 'Not Found'"
if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1:
raise
time.sleep(3)
self.test_class_instance.scrubber.register_name_pair(
name,
self.resource_moniker
)
else:
self.resource = FakeResource(name=name, id=name)
return {
self.parameter_name: self.resource,
}
def remove_resource(self, name, **kwargs):
if self.is_live:
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
self.client.topics.delete(group.name, namespace.name, name, polling=False)
class ServiceBusSubscriptionPreparer(_ServiceBusChildResourcePreparer):
def __init__(self,
name_prefix='',
use_cache=False,
parameter_name=SERVICEBUS_SUBSCRIPTION_PARAM,
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM,
servicebus_topic_parameter_name=SERVICEBUS_TOPIC_PARAM,
requires_session=False,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(ServiceBusSubscriptionPreparer, self).__init__(name_prefix,
random_name_enabled=random_name_enabled,
resource_group_parameter_name=resource_group_parameter_name,
servicebus_namespace_parameter_name=servicebus_namespace_parameter_name,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.servicebus_topic_parameter_name = servicebus_topic_parameter_name
self.parameter_name = parameter_name
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbsub"
self.set_cache(use_cache, requires_session)
self.requires_session=requires_session
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbqueue"
def create_resource(self, name, **kwargs):
if self.is_live:
self.client = self.create_mgmt_client(ServiceBusManagementClient)
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
topic = self._get_topic(**kwargs)
retries = 4
for i in range(retries):
try:
self.resource = self.client.subscriptions.create_or_update(
group.name,
namespace.name,
topic.name,
name,
SBSubscription(
requires_session=self.requires_session
)
)
break
except Exception as ex:
error = "The requested resource {} does not exist".format(namespace)
not_found_error = "Operation returned an invalid status code 'Not Found'"
if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1:
raise
time.sleep(3)
self.test_class_instance.scrubber.register_name_pair(
name,
self.resource_moniker
)
else:
self.resource = FakeResource(name=name, id=name)
return {
self.parameter_name: self.resource,
}
def remove_resource(self, name, **kwargs):
if self.is_live:
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
topic = self._get_topic(**kwargs)
self.client.subscriptions.delete(group.name, namespace.name, topic.name, name, polling=False)
def _get_topic(self, **kwargs):
try:
return kwargs.get(self.servicebus_topic_parameter_name)
except KeyError:
template = 'To create this service bus subscription a service bus topic is required. Please add ' \
'decorator @{} in front of this service bus preparer.'
raise AzureTestError(template.format(ServiceBusTopicPreparer.__name__))
class ServiceBusQueuePreparer(_ServiceBusChildResourcePreparer):
def __init__(self,
name_prefix='',
use_cache=False,
requires_duplicate_detection=False,
dead_lettering_on_message_expiration=False,
requires_session=False,
lock_duration='PT30S',
parameter_name=SERVICEBUS_QUEUE_PARAM,
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(ServiceBusQueuePreparer, self).__init__(name_prefix,
random_name_enabled=random_name_enabled,
resource_group_parameter_name=resource_group_parameter_name,
servicebus_namespace_parameter_name=servicebus_namespace_parameter_name,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.parameter_name = parameter_name
self.set_cache(use_cache, requires_duplicate_detection, dead_lettering_on_message_expiration, requires_session, lock_duration)
# Queue parameters
self.requires_duplicate_detection=requires_duplicate_detection
self.dead_lettering_on_message_expiration=dead_lettering_on_message_expiration
self.requires_session=requires_session
self.lock_duration=lock_duration
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbqueue"
def create_resource(self, name, **kwargs):
if self.is_live:
self.client = self.create_mgmt_client(ServiceBusManagementClient)
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
retries = 4
for i in range(retries):
try:
self.resource = self.client.queues.create_or_update(
group.name,
namespace.name,
name,
SBQueue(
lock_duration=self.lock_duration,
requires_duplicate_detection = self.requires_duplicate_detection,
dead_lettering_on_message_expiration = self.dead_lettering_on_message_expiration,
requires_session = self.requires_session)
)
break
except Exception as ex:
error = "The requested resource {} does not exist".format(namespace)
not_found_error = "Operation returned an invalid status code 'Not Found'"
if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1:
raise
time.sleep(3)
self.test_class_instance.scrubber.register_name_pair(
name,
self.resource_moniker
)
else:
self.resource = FakeResource(name=name, id=name)
return {
self.parameter_name: self.resource,
}
def remove_resource(self, name, **kwargs):
if self.is_live:
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
self.client.queues.delete(group.name, namespace.name, name, polling=False)
class ServiceBusNamespaceAuthorizationRulePreparer(_ServiceBusChildResourcePreparer):
def __init__(self,
name_prefix='',
use_cache=False,
access_rights=[AccessRights.manage, AccessRights.send, AccessRights.listen],
parameter_name=SERVICEBUS_AUTHORIZATION_RULE_PARAM,
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(ServiceBusNamespaceAuthorizationRulePreparer, self).__init__(name_prefix,
random_name_enabled=random_name_enabled,
resource_group_parameter_name=resource_group_parameter_name,
servicebus_namespace_parameter_name=servicebus_namespace_parameter_name,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.parameter_name = parameter_name
self.access_rights = access_rights
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbnameauth"
self.set_cache(use_cache, access_rights)
def create_resource(self, name, **kwargs):
if self.is_live:
self.client = self.create_mgmt_client(ServiceBusManagementClient)
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
retries = 4
for i in range(retries):
try:
self.resource = self.client.namespaces.create_or_update_authorization_rule(
group.name,
namespace.name,
name,
self.access_rights
)
break
except Exception as ex:
error = "The requested resource {} does not exist".format(namespace)
not_found_error = "Operation returned an invalid status code 'Not Found'"
if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1:
raise
time.sleep(3)
key = self.client.namespaces.list_keys(group.name, namespace.name, name)
connection_string = key.primary_connection_string
self.test_class_instance.scrubber.register_name_pair(
name,
self.resource_moniker
)
else:
self.resource = FakeResource(name=name, id=name)
connection_string = 'https://microsoft.com'
return {
self.parameter_name: self.resource,
'{}_connection_string'.format(self.parameter_name): connection_string,
}
def remove_resource(self, name, **kwargs):
if self.is_live:
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
self.client.namespaces.delete_authorization_rule(group.name, namespace.name, name, polling=False)
class ServiceBusQueueAuthorizationRulePreparer(_ServiceBusChildResourcePreparer):
def __init__(self,
name_prefix='',
use_cache=False,
access_rights=[AccessRights.manage, AccessRights.send, AccessRights.listen],
parameter_name=SERVICEBUS_QUEUE_AUTHORIZATION_RULE_PARAM,
resource_group_parameter_name=RESOURCE_GROUP_PARAM,
servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM,
servicebus_queue_parameter_name=SERVICEBUS_QUEUE_PARAM,
disable_recording=True, playback_fake_resource=None,
client_kwargs=None, random_name_enabled=True):
super(ServiceBusQueueAuthorizationRulePreparer, self).__init__(name_prefix,
random_name_enabled=random_name_enabled,
resource_group_parameter_name=resource_group_parameter_name,
servicebus_namespace_parameter_name=servicebus_namespace_parameter_name,
disable_recording=disable_recording,
playback_fake_resource=playback_fake_resource,
client_kwargs=client_kwargs)
self.parameter_name = parameter_name
self.access_rights = access_rights
self.servicebus_queue_parameter_name = servicebus_queue_parameter_name
if random_name_enabled:
self.resource_moniker = self.name_prefix + "sbqueueauth"
self.set_cache(use_cache, access_rights)
def create_resource(self, name, **kwargs):
if self.is_live:
self.client = self.create_mgmt_client(ServiceBusManagementClient)
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
queue = self._get_queue(**kwargs)
retries = 4
for i in range(retries):
try:
self.resource = self.client.queues.create_or_update_authorization_rule(
group.name,
namespace.name,
queue.name,
name,
self.access_rights
)
break
except Exception as ex:
error = "The requested resource {} does not exist".format(namespace)
not_found_error = "Operation returned an invalid status code 'Not Found'"
if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1:
raise
time.sleep(3)
key = self.client.queues.list_keys(group.name, namespace.name, queue.name, name)
connection_string = key.primary_connection_string
self.test_class_instance.scrubber.register_name_pair(
name,
self.resource_moniker
)
else:
self.resource = FakeResource(name=name, id=name)
connection_string = 'https://microsoft.com'
return {
self.parameter_name: self.resource,
'{}_connection_string'.format(self.parameter_name): connection_string,
}
def remove_resource(self, name, **kwargs):
if self.is_live:
group = self._get_resource_group(**kwargs)
namespace = self._get_namespace(**kwargs)
queue = self._get_queue(**kwargs)
self.client.queues.delete_authorization_rule(group.name, namespace.name, queue.name, name, polling=False)
def _get_queue(self, **kwargs):
try:
return kwargs.get(self.servicebus_queue_parameter_name)
except KeyError:
template = 'To create this service bus queue authorization rule a service bus queue is required. Please add ' \
'decorator @{} in front of this service bus preparer.'
raise AzureTestError(template.format(ServiceBusQueuePreparer.__name__))
CachedServiceBusNamespacePreparer = functools.partial(ServiceBusNamespacePreparer, use_cache=True)
CachedServiceBusQueuePreparer = functools.partial(ServiceBusQueuePreparer, use_cache=True)
CachedServiceBusTopicPreparer = functools.partial(ServiceBusTopicPreparer, use_cache=True)
CachedServiceBusSubscriptionPreparer = functools.partial(ServiceBusSubscriptionPreparer, use_cache=True)
| mit | 2,745,185,431,363,098,600 | 49.826087 | 177 | 0.562291 | false |
vprusso/npa_nonlocal | npa_nonlocal/util.py | 1 | 2230 | # -*- coding: utf-8 -*-
'''
#------------------------------------------------------------------------------
# Name: util.py
# Purpose: Various utility functions for npa_nonlocal.
#
# Author: Vincent Russo ([email protected])
#
# Created: 1/13/2015
# Copyright: (c) Vincent Russo 2015
# Licence: GNU
#------------------------------------------------------------------------------
'''
import os
import shelve
import itertools
def check_equal(iterator):
'''Checks if elements in an iterable object are all equal to each other.'''
return len(set(iterator)) <= 1
def chunks(l, n):
'''Splits a list (l) into (n) separate chunks.'''
if n < 1:
n = 1
return [l[i:i + n] for i in range(0, len(l), n)]
def clear():
'''Clears the shell of the spyder application. Use either clear() or cls()
'''
os.system('cls')
return None
def clear_all():
'''Clears all the variables from the workspace of the spyder application'''
cls()
gl = globals().copy()
for var in gl:
if var[0] == '_': continue
if 'func' in str(globals()[var]): continue
if 'module' in str(globals()[var]): continue
del globals()[var]
def generate_bit_strings(n, basis):
'''Generates all bit strings of length n.'''
return ["".join(seq) for seq in itertools.product(basis, repeat=n)]
def list_2_str(_list):
'''Converts a list of objects into a concatenation of strings.'''
return ' '.join(map(str, _list))
def load_workspace():
''' Loads the variables in Python workspaces (similar to MATLAB)'''
my_shelf = shelve.open(filename)
for key in my_shelf:
globals()[key]=my_shelf[key]
my_shelf.close()
def save_workspace():
''' Saves the variables in Python workspace (similar to MATLAB)'''
filename='shelve.out'
my_shelf = shelve.open(filename,'n') # 'n' for new
for key in dir():
try:
my_shelf[key] = globals()[key]
except TypeError:
#
# __builtins__, my_shelf, and imported modules can not be shelved.
#
print('ERROR shelving: {0}'.format(key))
my_shelf.close()
| gpl-2.0 | -4,372,464,292,768,497,000 | 25.547619 | 79 | 0.541256 | false |
jorge-marques/wagtail | wagtail/wagtailadmin/views/pages.py | 1 | 33468 | import warnings
from django.http import Http404, HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.core.exceptions import ValidationError, PermissionDenied
from django.contrib.contenttypes.models import ContentType
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.utils.translation import ugettext as _
from django.utils.http import is_safe_url
from django.views.decorators.http import require_GET, require_POST
from django.views.decorators.vary import vary_on_headers
from django.db.models import Count
from wagtail.wagtailadmin.edit_handlers import TabbedInterface, ObjectList
from wagtail.wagtailadmin.forms import SearchForm, CopyForm
from wagtail.wagtailadmin import tasks, signals
from wagtail.wagtailcore import hooks
from wagtail.wagtailcore.models import Page, PageRevision, get_navigation_menu_items
from wagtail.wagtailcore.validators import validate_not_whitespace
from wagtail.wagtailadmin import messages
def explorer_nav(request):
return render(request, 'wagtailadmin/shared/explorer_nav.html', {
'nodes': get_navigation_menu_items(),
})
def index(request, parent_page_id=None):
if parent_page_id:
parent_page = get_object_or_404(Page, id=parent_page_id)
else:
parent_page = Page.get_first_root_node()
pages = parent_page.get_children().prefetch_related('content_type')
# Get page ordering
ordering = request.GET.get('ordering', '-latest_revision_created_at')
if ordering not in ['title', '-title', 'content_type', '-content_type', 'live', '-live', 'latest_revision_created_at', '-latest_revision_created_at', 'ord']:
ordering = '-latest_revision_created_at'
# Pagination
if ordering != 'ord':
ordering_no_minus = ordering
if ordering_no_minus.startswith('-'):
ordering_no_minus = ordering[1:]
pages = pages.order_by(ordering).annotate(null_position=Count(ordering_no_minus)).order_by('-null_position', ordering)
p = request.GET.get('p', 1)
paginator = Paginator(pages, 50)
try:
pages = paginator.page(p)
except PageNotAnInteger:
pages = paginator.page(1)
except EmptyPage:
pages = paginator.page(paginator.num_pages)
return render(request, 'wagtailadmin/pages/index.html', {
'parent_page': parent_page,
'ordering': ordering,
'pages': pages,
})
def add_subpage(request, parent_page_id):
parent_page = get_object_or_404(Page, id=parent_page_id).specific
if not parent_page.permissions_for_user(request.user).can_add_subpage():
raise PermissionDenied
page_types = sorted(parent_page.allowed_subpage_types(),
key=lambda pagetype: pagetype.model_class().get_verbose_name().lower()
)
if len(page_types) == 1:
# Only one page type is available - redirect straight to the create form rather than
# making the user choose
content_type = page_types[0]
return redirect('wagtailadmin_pages_create', content_type.app_label, content_type.model, parent_page.id)
return render(request, 'wagtailadmin/pages/add_subpage.html', {
'parent_page': parent_page,
'page_types': page_types,
})
def content_type_use(request, content_type_app_name, content_type_model_name):
try:
content_type = ContentType.objects.get_by_natural_key(content_type_app_name, content_type_model_name)
except ContentType.DoesNotExist:
raise Http404
p = request.GET.get("p", 1)
page_class = content_type.model_class()
# page_class must be a Page type and not some other random model
if not issubclass(page_class, Page):
raise Http404
pages = page_class.objects.all()
paginator = Paginator(pages, 10)
try:
pages = paginator.page(p)
except PageNotAnInteger:
pages = paginator.page(1)
except EmptyPage:
pages = paginator.page(paginator.num_pages)
return render(request, 'wagtailadmin/pages/content_type_use.html', {
'pages': pages,
'app_name': content_type_app_name,
'content_type': content_type,
'page_class': page_class,
})
def create(request, content_type_app_name, content_type_model_name, parent_page_id):
parent_page = get_object_or_404(Page, id=parent_page_id).specific
parent_page_perms = parent_page.permissions_for_user(request.user)
if not parent_page_perms.can_add_subpage():
raise PermissionDenied
try:
content_type = ContentType.objects.get_by_natural_key(content_type_app_name, content_type_model_name)
except ContentType.DoesNotExist:
raise Http404
# Get class
page_class = content_type.model_class()
# Make sure the class is a descendant of Page
if not issubclass(page_class, Page):
raise Http404
# page must be in the list of allowed subpage types for this parent ID
if content_type not in parent_page.allowed_subpage_types():
raise PermissionDenied
page = page_class(owner=request.user)
edit_handler_class = get_page_edit_handler(page_class)
form_class = edit_handler_class.get_form_class(page_class)
if request.POST:
form = form_class(request.POST, request.FILES, instance=page)
# Stick an extra validator into the form to make sure that the slug is not already in use
def clean_slug(slug):
# Make sure the slug isn't already in use
if parent_page.get_children().filter(slug=slug).count() > 0:
raise ValidationError(_("This slug is already in use"))
return slug
form.fields['slug'].clean = clean_slug
# Validate title and seo_title are not entirely whitespace
def clean_title(title):
validate_not_whitespace(title)
return title
form.fields['title'].clean = clean_title
def clean_seo_title(seo_title):
if not seo_title:
return ''
validate_not_whitespace(seo_title)
return seo_title
form.fields['seo_title'].clean = clean_seo_title
# Stick another validator into the form to check that the scheduled publishing settings are set correctly
def clean():
cleaned_data = form_class.clean(form)
# Go live must be before expire
go_live_at = cleaned_data.get('go_live_at')
expire_at = cleaned_data.get('expire_at')
if go_live_at and expire_at:
if go_live_at > expire_at:
msg = _('Go live date/time must be before expiry date/time')
form._errors['go_live_at'] = form.error_class([msg])
form._errors['expire_at'] = form.error_class([msg])
del cleaned_data['go_live_at']
del cleaned_data['expire_at']
# Expire must be in the future
expire_at = cleaned_data.get('expire_at')
if expire_at and expire_at < timezone.now():
form._errors['expire_at'] = form.error_class([_('Expiry date/time must be in the future')])
del cleaned_data['expire_at']
return cleaned_data
form.clean = clean
if form.is_valid():
page = form.save(commit=False)
is_publishing = bool(request.POST.get('action-publish')) and parent_page_perms.can_publish_subpage()
is_submitting = bool(request.POST.get('action-submit'))
# Set live to False and has_unpublished_changes to True if we are not publishing
if not is_publishing:
page.live = False
page.has_unpublished_changes = True
# Save page
parent_page.add_child(instance=page)
# Save revision
revision = page.save_revision(
user=request.user,
submitted_for_moderation=is_submitting,
)
# Publish
if is_publishing:
revision.publish()
# Notifications
if is_publishing:
messages.success(request, _("Page '{0}' published.").format(page.title))
elif is_submitting:
messages.success(request, _("Page '{0}' submitted for moderation.").format(page.title))
tasks.send_notification.delay(page.get_latest_revision().id, 'submitted', request.user.id)
else:
messages.success(request, _("Page '{0}' created.").format(page.title))
for fn in hooks.get_hooks('after_create_page'):
result = fn(request, page)
if hasattr(result, 'status_code'):
return result
if is_publishing or is_submitting:
# we're done here - redirect back to the explorer
return redirect('wagtailadmin_explore', page.get_parent().id)
else:
# Just saving - remain on edit page for further edits
return redirect('wagtailadmin_pages_edit', page.id)
else:
messages.error(request, _("The page could not be created due to validation errors"))
edit_handler = edit_handler_class(instance=page, form=form)
else:
signals.init_new_page.send(sender=create, page=page, parent=parent_page)
form = form_class(instance=page)
edit_handler = edit_handler_class(instance=page, form=form)
return render(request, 'wagtailadmin/pages/create.html', {
'content_type': content_type,
'page_class': page_class,
'parent_page': parent_page,
'edit_handler': edit_handler,
'preview_modes': page.preview_modes,
'form': form, # Used in unit tests
})
def edit(request, page_id):
latest_revision = get_object_or_404(Page, id=page_id).get_latest_revision()
page = get_object_or_404(Page, id=page_id).get_latest_revision_as_page()
parent = page.get_parent()
content_type = ContentType.objects.get_for_model(page)
page_perms = page.permissions_for_user(request.user)
if not page_perms.can_edit():
raise PermissionDenied
edit_handler_class = get_page_edit_handler(page.__class__)
form_class = edit_handler_class.get_form_class(page.__class__)
errors_debug = None
if request.POST:
form = form_class(request.POST, request.FILES, instance=page)
# Stick an extra validator into the form to make sure that the slug is not already in use
def clean_slug(slug):
# Make sure the slug isn't already in use
if parent.get_children().filter(slug=slug).exclude(id=page_id).count() > 0:
raise ValidationError(_("This slug is already in use"))
return slug
form.fields['slug'].clean = clean_slug
# Validate title and seo_title are not entirely whitespace
def clean_title(title):
validate_not_whitespace(title)
return title
form.fields['title'].clean = clean_title
def clean_seo_title(seo_title):
if not seo_title:
return ''
validate_not_whitespace(seo_title)
return seo_title
form.fields['seo_title'].clean = clean_seo_title
# Stick another validator into the form to check that the scheduled publishing settings are set correctly
def clean():
cleaned_data = form_class.clean(form)
# Go live must be before expire
go_live_at = cleaned_data.get('go_live_at')
expire_at = cleaned_data.get('expire_at')
if go_live_at and expire_at:
if go_live_at > expire_at:
msg = _('Go live date/time must be before expiry date/time')
form._errors['go_live_at'] = form.error_class([msg])
form._errors['expire_at'] = form.error_class([msg])
del cleaned_data['go_live_at']
del cleaned_data['expire_at']
# Expire must be in the future
expire_at = cleaned_data.get('expire_at')
if expire_at and expire_at < timezone.now():
form._errors['expire_at'] = form.error_class([_('Expiry date/time must be in the future')])
del cleaned_data['expire_at']
return cleaned_data
form.clean = clean
if form.is_valid() and not page.locked:
page = form.save(commit=False)
is_publishing = bool(request.POST.get('action-publish')) and page_perms.can_publish()
is_submitting = bool(request.POST.get('action-submit'))
# Save revision
revision = page.save_revision(
user=request.user,
submitted_for_moderation=is_submitting,
)
# Publish
if is_publishing:
revision.publish()
else:
# Set has_unpublished_changes flag
if page.live:
# To avoid overwriting the live version, we only save the page
# to the revisions table
Page.objects.filter(id=page.id).update(has_unpublished_changes=True)
else:
page.has_unpublished_changes = True
page.save()
# Notifications
if is_publishing:
messages.success(request, _("Page '{0}' published.").format(page.title), buttons=[
messages.button(page.url, _('View live')),
messages.button(reverse('wagtailadmin_pages_edit', args=(page_id,)), _('Edit'))
])
elif is_submitting:
messages.success(request, _("Page '{0}' submitted for moderation.").format(page.title), buttons=[
messages.button(reverse('wagtailadmin_pages_view_draft', args=(page_id,)), _('View draft')),
messages.button(reverse('wagtailadmin_pages_edit', args=(page_id,)), _('Edit'))
])
tasks.send_notification.delay(page.get_latest_revision().id, 'submitted', request.user.id)
else:
messages.success(request, _("Page '{0}' updated.").format(page.title))
for fn in hooks.get_hooks('after_edit_page'):
result = fn(request, page)
if hasattr(result, 'status_code'):
return result
if is_publishing or is_submitting:
# we're done here - redirect back to the explorer
return redirect('wagtailadmin_explore', page.get_parent().id)
else:
# Just saving - remain on edit page for further edits
return redirect('wagtailadmin_pages_edit', page.id)
else:
if page.locked:
messages.error(request, _("The page could not be saved as it is locked"))
else:
messages.error(request, _("The page could not be saved due to validation errors"))
edit_handler = edit_handler_class(instance=page, form=form)
errors_debug = (
repr(edit_handler.form.errors)
+ repr([(name, formset.errors) for (name, formset) in edit_handler.form.formsets.items() if formset.errors])
)
else:
form = form_class(instance=page)
edit_handler = edit_handler_class(instance=page, form=form)
# Check for revisions still undergoing moderation and warn
if latest_revision and latest_revision.submitted_for_moderation:
messages.warning(request, _("This page is currently awaiting moderation"))
return render(request, 'wagtailadmin/pages/edit.html', {
'page': page,
'content_type': content_type,
'edit_handler': edit_handler,
'errors_debug': errors_debug,
'preview_modes': page.preview_modes,
'form': form, # Used in unit tests
})
def delete(request, page_id):
page = get_object_or_404(Page, id=page_id)
if not page.permissions_for_user(request.user).can_delete():
raise PermissionDenied
if request.method == 'POST':
parent_id = page.get_parent().id
page.delete()
messages.success(request, _("Page '{0}' deleted.").format(page.title))
for fn in hooks.get_hooks('after_delete_page'):
result = fn(request, page)
if hasattr(result, 'status_code'):
return result
return redirect('wagtailadmin_explore', parent_id)
return render(request, 'wagtailadmin/pages/confirm_delete.html', {
'page': page,
'descendant_count': page.get_descendant_count()
})
def view_draft(request, page_id):
page = get_object_or_404(Page, id=page_id).get_latest_revision_as_page()
return page.serve_preview(page.dummy_request(), page.default_preview_mode)
def preview_on_edit(request, page_id):
# Receive the form submission that would typically be posted to the 'edit' view. If submission is valid,
# return the rendered page; if not, re-render the edit form
page = get_object_or_404(Page, id=page_id).get_latest_revision_as_page()
edit_handler_class = get_page_edit_handler(page.__class__)
form_class = edit_handler_class.get_form_class(page.__class__)
form = form_class(request.POST, request.FILES, instance=page)
if form.is_valid():
form.save(commit=False)
preview_mode = request.GET.get('mode', page.default_preview_mode)
response = page.serve_preview(page.dummy_request(), preview_mode)
response['X-Wagtail-Preview'] = 'ok'
return response
else:
edit_handler = edit_handler_class(instance=page, form=form)
response = render(request, 'wagtailadmin/pages/edit.html', {
'page': page,
'edit_handler': edit_handler,
'preview_modes': page.preview_modes,
})
response['X-Wagtail-Preview'] = 'error'
return response
def preview_on_create(request, content_type_app_name, content_type_model_name, parent_page_id):
# Receive the form submission that would typically be posted to the 'create' view. If submission is valid,
# return the rendered page; if not, re-render the edit form
try:
content_type = ContentType.objects.get_by_natural_key(content_type_app_name, content_type_model_name)
except ContentType.DoesNotExist:
raise Http404
page_class = content_type.model_class()
page = page_class()
edit_handler_class = get_page_edit_handler(page_class)
form_class = edit_handler_class.get_form_class(page_class)
form = form_class(request.POST, request.FILES, instance=page)
if form.is_valid():
form.save(commit=False)
# ensure that our unsaved page instance has a suitable url set
parent_page = get_object_or_404(Page, id=parent_page_id).specific
page.set_url_path(parent_page)
# Set treebeard attributes
page.depth = parent_page.depth + 1
page.path = Page._get_children_path_interval(parent_page.path)[1]
preview_mode = request.GET.get('mode', page.default_preview_mode)
response = page.serve_preview(page.dummy_request(), preview_mode)
response['X-Wagtail-Preview'] = 'ok'
return response
else:
edit_handler = edit_handler_class(instance=page, form=form)
parent_page = get_object_or_404(Page, id=parent_page_id).specific
response = render(request, 'wagtailadmin/pages/create.html', {
'content_type': content_type,
'page_class': page_class,
'parent_page': parent_page,
'edit_handler': edit_handler,
'preview_modes': page.preview_modes,
})
response['X-Wagtail-Preview'] = 'error'
return response
def preview(request):
"""
The HTML of a previewed page is written to the destination browser window using document.write.
This overwrites any previous content in the window, while keeping its URL intact. This in turn
means that any content we insert that happens to trigger an HTTP request, such as an image or
stylesheet tag, will report that original URL as its referrer.
In Webkit browsers, a new window opened with window.open('', 'window_name') will have a location
of 'about:blank', causing it to omit the Referer header on those HTTP requests. This means that
any third-party font services that use the Referer header for access control will refuse to
serve us.
So, instead, we need to open the window on some arbitrary URL on our domain. (Provided that's
also the same domain as our editor JS code, the browser security model will happily allow us to
document.write over the page in question.)
This, my friends, is that arbitrary URL.
Since we're going to this trouble, we'll also take the opportunity to display a spinner on the
placeholder page, providing some much-needed visual feedback.
"""
return render(request, 'wagtailadmin/pages/preview.html')
def preview_loading(request):
"""
This page is blank, but must be real HTML so its DOM can be written to once the preview of the page has rendered
"""
return HttpResponse("<html><head><title></title></head><body></body></html>")
def unpublish(request, page_id):
page = get_object_or_404(Page, id=page_id).specific
if not page.permissions_for_user(request.user).can_unpublish():
raise PermissionDenied
if request.method == 'POST':
page.unpublish()
messages.success(request, _("Page '{0}' unpublished.").format(page.title))
return redirect('wagtailadmin_explore', page.get_parent().id)
return render(request, 'wagtailadmin/pages/confirm_unpublish.html', {
'page': page,
})
def move_choose_destination(request, page_to_move_id, viewed_page_id=None):
page_to_move = get_object_or_404(Page, id=page_to_move_id)
page_perms = page_to_move.permissions_for_user(request.user)
if not page_perms.can_move():
raise PermissionDenied
if viewed_page_id:
viewed_page = get_object_or_404(Page, id=viewed_page_id)
else:
viewed_page = Page.get_first_root_node()
viewed_page.can_choose = page_perms.can_move_to(viewed_page)
child_pages = []
for target in viewed_page.get_children():
# can't move the page into itself or its descendants
target.can_choose = page_perms.can_move_to(target)
target.can_descend = not(target == page_to_move or target.is_child_of(page_to_move)) and target.get_children_count()
child_pages.append(target)
return render(request, 'wagtailadmin/pages/move_choose_destination.html', {
'page_to_move': page_to_move,
'viewed_page': viewed_page,
'child_pages': child_pages,
})
def move_confirm(request, page_to_move_id, destination_id):
page_to_move = get_object_or_404(Page, id=page_to_move_id).specific
destination = get_object_or_404(Page, id=destination_id)
if not page_to_move.permissions_for_user(request.user).can_move_to(destination):
raise PermissionDenied
if request.POST:
# any invalid moves *should* be caught by the permission check above,
# so don't bother to catch InvalidMoveToDescendant
page_to_move.move(destination, pos='last-child')
messages.success(request, _("Page '{0}' moved.").format(page_to_move.title))
return redirect('wagtailadmin_explore', destination.id)
return render(request, 'wagtailadmin/pages/confirm_move.html', {
'page_to_move': page_to_move,
'destination': destination,
})
def set_page_position(request, page_to_move_id):
page_to_move = get_object_or_404(Page, id=page_to_move_id)
parent_page = page_to_move.get_parent()
if not parent_page.permissions_for_user(request.user).can_reorder_children():
raise PermissionDenied
if request.POST:
# Get position parameter
position = request.GET.get('position', None)
# Find page thats already in this position
position_page = None
if position is not None:
try:
position_page = parent_page.get_children()[int(position)]
except IndexError:
pass # No page in this position
# Move page
# any invalid moves *should* be caught by the permission check above,
# so don't bother to catch InvalidMoveToDescendant
if position_page:
# If the page has been moved to the right, insert it to the
# right. If left, then left.
old_position = list(parent_page.get_children()).index(page_to_move)
if int(position) < old_position:
page_to_move.move(position_page, pos='left')
elif int(position) > old_position:
page_to_move.move(position_page, pos='right')
else:
# Move page to end
page_to_move.move(parent_page, pos='last-child')
return HttpResponse('')
def copy(request, page_id):
page = Page.objects.get(id=page_id)
# Parent page defaults to parent of source page
parent_page = page.get_parent()
# Check if the user has permission to publish subpages on the parent
can_publish = parent_page.permissions_for_user(request.user).can_publish_subpage()
# Create the form
form = CopyForm(request.POST or None, page=page, can_publish=can_publish)
# Check if user is submitting
if request.method == 'POST':
# Prefill parent_page in case the form is invalid (as prepopulated value for the form field,
# because ModelChoiceField seems to not fall back to the user given value)
parent_page = Page.objects.get(id=request.POST['new_parent_page'])
if form.is_valid():
# Receive the parent page (this should never be empty)
if form.cleaned_data['new_parent_page']:
parent_page = form.cleaned_data['new_parent_page']
# Make sure this user has permission to add subpages on the parent
if not parent_page.permissions_for_user(request.user).can_add_subpage():
raise PermissionDenied
# Re-check if the user has permission to publish subpages on the new parent
can_publish = parent_page.permissions_for_user(request.user).can_publish_subpage()
# Copy the page
new_page = page.copy(
recursive=form.cleaned_data.get('copy_subpages'),
to=parent_page,
update_attrs={
'title': form.cleaned_data['new_title'],
'slug': form.cleaned_data['new_slug'],
},
keep_live=(can_publish and form.cleaned_data.get('publish_copies')),
user=request.user,
)
# Give a success message back to the user
if form.cleaned_data.get('copy_subpages'):
messages.success(request, _("Page '{0}' and {1} subpages copied.").format(page.title, new_page.get_descendants().count()))
else:
messages.success(request, _("Page '{0}' copied.").format(page.title))
# Redirect to explore of parent page
return redirect('wagtailadmin_explore', parent_page.id)
return render(request, 'wagtailadmin/pages/copy.html', {
'page': page,
'form': form,
})
PAGE_EDIT_HANDLERS = {}
def get_page_edit_handler(page_class):
if page_class not in PAGE_EDIT_HANDLERS:
if hasattr(page_class, 'edit_handler'):
# use the edit handler specified on the page class
edit_handler = page_class.edit_handler
else:
# construct a TabbedInterface made up of content_panels, promote_panels
# and settings_panels, skipping any which are empty
tabs = []
if page_class.content_panels:
tabs.append(ObjectList(page_class.content_panels, heading='Content'))
if page_class.promote_panels:
tabs.append(ObjectList(page_class.promote_panels, heading='Promote'))
if page_class.settings_panels:
tabs.append(ObjectList(page_class.settings_panels, heading='Settings', classname="settings"))
edit_handler = TabbedInterface(tabs)
PAGE_EDIT_HANDLERS[page_class] = edit_handler.bind_to_model(page_class)
return PAGE_EDIT_HANDLERS[page_class]
@vary_on_headers('X-Requested-With')
def search(request):
pages = []
q = None
is_searching = False
if 'q' in request.GET:
form = SearchForm(request.GET)
if form.is_valid():
q = form.cleaned_data['q']
# page number
p = request.GET.get("p", 1)
is_searching = True
pages = Page.search(q, show_unpublished=True, search_title_only=True, prefetch_related=['content_type'])
# Pagination
paginator = Paginator(pages, 20)
try:
pages = paginator.page(p)
except PageNotAnInteger:
pages = paginator.page(1)
except EmptyPage:
pages = paginator.page(paginator.num_pages)
else:
form = SearchForm()
if request.is_ajax():
return render(request, "wagtailadmin/pages/search_results.html", {
'pages': pages,
'is_searching': is_searching,
'query_string': q,
})
else:
return render(request, "wagtailadmin/pages/search.html", {
'search_form': form,
'pages': pages,
'is_searching': is_searching,
'query_string': q,
})
def approve_moderation(request, revision_id):
revision = get_object_or_404(PageRevision, id=revision_id)
if not revision.page.permissions_for_user(request.user).can_publish():
raise PermissionDenied
if not revision.submitted_for_moderation:
messages.error(request, _("The page '{0}' is not currently awaiting moderation.").format(revision.page.title))
return redirect('wagtailadmin_home')
if request.method == 'POST':
revision.approve_moderation()
messages.success(request, _("Page '{0}' published.").format(revision.page.title))
tasks.send_notification.delay(revision.id, 'approved', request.user.id)
return redirect('wagtailadmin_home')
def reject_moderation(request, revision_id):
revision = get_object_or_404(PageRevision, id=revision_id)
if not revision.page.permissions_for_user(request.user).can_publish():
raise PermissionDenied
if not revision.submitted_for_moderation:
messages.error(request, _("The page '{0}' is not currently awaiting moderation.").format( revision.page.title))
return redirect('wagtailadmin_home')
if request.method == 'POST':
revision.reject_moderation()
messages.success(request, _("Page '{0}' rejected for publication.").format(revision.page.title))
tasks.send_notification.delay(revision.id, 'rejected', request.user.id)
return redirect('wagtailadmin_home')
@require_GET
def preview_for_moderation(request, revision_id):
revision = get_object_or_404(PageRevision, id=revision_id)
if not revision.page.permissions_for_user(request.user).can_publish():
raise PermissionDenied
if not revision.submitted_for_moderation:
messages.error(request, _("The page '{0}' is not currently awaiting moderation.").format(revision.page.title))
return redirect('wagtailadmin_home')
page = revision.as_page_object()
request.revision_id = revision_id
# pass in the real user request rather than page.dummy_request(), so that request.user
# and request.revision_id will be picked up by the wagtail user bar
return page.serve_preview(request, page.default_preview_mode)
@require_POST
def lock(request, page_id):
# Get the page
page = get_object_or_404(Page, id=page_id).specific
# Check permissions
if not page.permissions_for_user(request.user).can_lock():
raise PermissionDenied
# Lock the page
if not page.locked:
page.locked = True
page.save()
messages.success(request, _("Page '{0}' is now locked.").format(page.title))
# Redirect
redirect_to = request.POST.get('next', None)
if redirect_to and is_safe_url(url=redirect_to, host=request.get_host()):
return redirect(redirect_to)
else:
return redirect('wagtailadmin_explore', page.get_parent().id)
@require_POST
def unlock(request, page_id):
# Get the page
page = get_object_or_404(Page, id=page_id).specific
# Check permissions
if not page.permissions_for_user(request.user).can_lock():
raise PermissionDenied
# Unlock the page
if page.locked:
page.locked = False
page.save()
messages.success(request, _("Page '{0}' is now unlocked.").format(page.title))
# Redirect
redirect_to = request.POST.get('next', None)
if redirect_to and is_safe_url(url=redirect_to, host=request.get_host()):
return redirect(redirect_to)
else:
return redirect('wagtailadmin_explore', page.get_parent().id)
| bsd-3-clause | 4,888,389,496,633,200,000 | 37.424799 | 161 | 0.627853 | false |
JohnLZeller/dd-agent | tests/test_autorestart.py | 1 | 3249 | import time
import unittest
import logging
import subprocess
import shlex
import os
import signal
from nose.plugins.skip import SkipTest
from daemon import AgentSupervisor
class TestAutoRestart(unittest.TestCase):
""" Test the auto-restart and forking of the agent """
def setUp(self):
self.agent_foreground = None
self.agent_daemon = None
def tearDown(self):
if self.agent_foreground:
self.agent_foreground.kill()
if self.agent_daemon:
args = shlex.split('python agent.py stop')
subprocess.Popen(args).communicate()
def _start_foreground(self):
# Run the agent in the foreground with auto-restarting on.
args = shlex.split('python agent.py foreground --autorestart')
self.agent_foreground = subprocess.Popen(args)
time.sleep(5)
def _start_daemon(self):
args = shlex.split('python agent.py start --autorestart')
self.agent_daemon = subprocess.Popen(args)
time.sleep(5)
def _get_child_parent_pids(self, grep_str):
args = shlex.split('pgrep -f "%s"' % grep_str)
pgrep = subprocess.Popen(args, stdout=subprocess.PIPE,
close_fds=True).communicate()[0]
pids = pgrep.strip().split('\n')
assert len(pids) == 2, pgrep
return sorted([int(p) for p in pids], reverse=True)
def test_foreground(self):
self._start_foreground()
grep_str = 'agent.py foreground'
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Try killing the parent proc, confirm that the child is killed as well.
os.kill(parent_pid, signal.SIGTERM)
os.waitpid(parent_pid, 0)
time.sleep(6)
self.assertRaises(OSError, os.kill, child_pid, signal.SIGTERM)
# Restart the foreground agent.
self._start_foreground()
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Set a SIGUSR1 to the child to force an auto-restart exit.
os.kill(child_pid, signal.SIGUSR1)
time.sleep(6)
# Confirm that the child is still alive
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Kill the foreground process.
self.agent_foreground.terminate()
self.agent_foreground = None
def test_daemon(self):
self._start_daemon()
grep_str = 'agent.py start'
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Try killing the parent proc, confirm that the child is killed as well.
os.kill(parent_pid, signal.SIGTERM)
time.sleep(6)
self.assertRaises(OSError, os.kill, child_pid, signal.SIGTERM)
# Restart the daemon agent.
self._start_daemon()
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Set a SIGUSR1 to the child to force an auto-restart exit.
os.kill(child_pid, signal.SIGUSR1)
time.sleep(6)
# Confirm that the child is still alive
child_pid, parent_pid = self._get_child_parent_pids(grep_str)
# Kill the daemon process.
os.kill(parent_pid, signal.SIGTERM)
self.agent_daemon = None
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -6,652,089,203,656,290,000 | 31.49 | 80 | 0.632195 | false |
jmchilton/galaxy-central | modules/docutils/parsers/rst/languages/eo.py | 1 | 3616 | # Author: Marcelo Huerta San Martin
# Contact: [email protected]
# Revision: $Revision: 3189 $
# Date: $Date: 2005-04-08 05:05:45 +0200 (Fri, 08 Apr 2005) $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Esperanto-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'atentu': 'attention',
u'zorgu': 'caution',
u'dangxero': 'danger',
u'dan\u011dero': 'danger',
u'eraro': 'error',
u'spuro': 'hint',
u'grava': 'important',
u'noto': 'note',
u'helpeto': 'tip',
u'averto': 'warning',
u'admono': 'admonition',
u'flankteksto': 'sidebar',
u'temo': 'topic',
u'linea-bloko': 'line-block',
u'analizota-literalo': 'parsed-literal',
u'rubriko': 'rubric',
u'epigrafo': 'epigraph',
u'elstarajxoj': 'highlights',
u'elstara\u0135oj': 'highlights',
u'ekstera-citajxo': 'pull-quote',
u'ekstera-cita\u0135o': 'pull-quote',
u'kombinajxo': 'compound',
u'kombina\u0135o': 'compound',
#'questions': 'questions',
#'qa': 'questions',
#'faq': 'questions',
u'tabelo': 'table',
u'tabelo-vdk': 'csv-table', # "valoroj disigitaj per komoj"
u'tabelo-csv': 'csv-table',
u'tabelo-lista': 'list-table',
u'meta': 'meta',
#'imagemap': 'imagemap',
u'bildo': 'image',
u'figuro': 'figure',
u'inkludi': 'include',
u'senanaliza': 'raw',
u'anstatauxi': 'replace',
u'anstata\u016di': 'replace',
u'unicode': 'unicode',
u'klaso': 'class',
u'rolo': 'role',
u'enhavo': 'contents',
u'seknum': 'sectnum',
u'sekcia-numerado': 'sectnum',
u'kapsekcio': 'header',
u'piedsekcio': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'celaj-notoj': 'target-notes',
u'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""Esperanto name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'mallongigo': 'abbreviation',
u'mall': 'abbreviation',
u'komenclitero': 'acronym',
u'kl': 'acronym',
u'indekso': 'index',
u'i': 'index',
u'subskribo': 'subscript',
u'sub': 'subscript',
u'supraskribo': 'superscript',
u'sup': 'superscript',
u'titola-referenco': 'title-reference',
u'titolo': 'title-reference',
u't': 'title-reference',
u'pep-referenco': 'pep-reference',
u'pep': 'pep-reference',
u'rfc-referenco': 'rfc-reference',
u'rfc': 'rfc-reference',
u'emfazo': 'emphasis',
u'forta': 'strong',
u'litera': 'literal',
u'nomita-referenco': 'named-reference',
u'nenomita-referenco': 'anonymous-reference',
u'piednota-referenco': 'footnote-reference',
u'citajxo-referenco': 'citation-reference',
u'cita\u0135o-referenco': 'citation-reference',
u'anstatauxa-referenco': 'substitution-reference',
u'anstata\u016da-referenco': 'substitution-reference',
u'celo': 'target',
u'uri-referenco': 'uri-reference',
u'uri': 'uri-reference',
u'url': 'uri-reference',
u'senanaliza': 'raw',
}
"""Mapping of Esperanto role names to canonical role names for interpreted text.
"""
| mit | -8,322,646,266,598,600,000 | 31.576577 | 80 | 0.615874 | false |
Strassengezwitscher/Strassengezwitscher | crowdgezwitscher/events/views.py | 1 | 1531 | from django.contrib.auth.mixins import PermissionRequiredMixin
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import DeleteView
from django.urls import reverse_lazy
from extra_views import CreateWithInlinesView, UpdateWithInlinesView
from events.models import Event
from events.forms import EventForm, AttachmentFormSet
class EventListView(PermissionRequiredMixin, ListView):
permission_required = 'events.view_event'
model = Event
template_name = 'events/list.html'
context_object_name = 'events'
ordering = '-date'
class EventDetail(PermissionRequiredMixin, DetailView):
permission_required = 'events.view_event'
model = Event
template_name = 'events/detail.html'
context_object_name = 'event'
class EventCreate(PermissionRequiredMixin, CreateWithInlinesView):
permission_required = 'events.add_event'
model = Event
inlines = [AttachmentFormSet]
template_name = 'events/form.html'
form_class = EventForm
class EventUpdate(PermissionRequiredMixin, UpdateWithInlinesView):
permission_required = 'events.change_event'
model = Event
inlines = [AttachmentFormSet]
template_name = 'events/form.html'
form_class = EventForm
class EventDelete(PermissionRequiredMixin, DeleteView):
permission_required = 'events.delete_event'
model = Event
template_name = 'events/delete.html'
success_url = reverse_lazy('events:list')
context_object_name = 'event'
| mit | -7,796,877,386,715,528,000 | 30.895833 | 68 | 0.758981 | false |
hryamzik/ansible | lib/ansible/module_utils/network/cnos/cnos_devicerules.py | 1 | 91032 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their
# own license to the complete work.
#
# Copyright (C) 2017 Lenovo, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Contains device rule and methods
# Lenovo Networking
def getRuleString(deviceType, variableId):
retVal = variableId + ":"
if(deviceType == 'g8272_cnos'):
if variableId in g8272_cnos:
retVal = retVal + g8272_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'g8296_cnos'):
if variableId in g8296_cnos:
retVal = retVal + g8296_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'g8332_cnos'):
if variableId in g8332_cnos:
retVal = retVal + g8332_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1072T'):
if variableId in NE1072T:
retVal = retVal + NE1072T[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1032'):
if variableId in NE1032:
retVal = retVal + NE1032[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1032T'):
if variableId in NE1032T:
retVal = retVal + NE1032T[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE10032'):
if variableId in NE10032:
retVal = retVal + NE10032[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE2572'):
if variableId in NE2572:
retVal = retVal + NE2572[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
else:
if variableId in default_cnos:
retVal = retVal + default_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
return retVal
# EOM
default_cnos = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,\
interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,\
trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,\
vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE2572 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1032T = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1032 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1072T = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE10032 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8272_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8296_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-128',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-96',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-96',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8332_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-128',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,arp,\
dhcp,ospf,port,port-unreachable,redirects,router,unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
| gpl-3.0 | 5,255,834,514,839,444,000 | 51.650087 | 80 | 0.625945 | false |
codepongo/cook | script/cook.py | 1 | 4343 | #coding:utf-8
import datetime
import sys
import os
sys.path.append(os.path.dirname(__file__))
import shutil
import web
import markdown2
perpage = 5
try:
import conf
path = conf.path
css_path = conf.csspath
web.config.debug=conf.debug
domain = conf.domain
suffix = conf.suffix
except:
icopath = './'
path = './md'
css_path = './css'
web.config.debug=True
domain ='http://127.0.0.1:8080'
suffix = '.md'
class base:
def __init__(self):
self.entities = []
if not os.path.isdir(path):
os.mkdir(path)
for p in os.listdir(path):
if os.path.isdir(p):
continue
ext = os.path.splitext(p)[1]
if ext == suffix:
self.entities.append(os.path.join(path,p))
self.entities.sort(reverse=True)
def entity(self, idx):
return self.generate(idx, idx+1)
def entities(self):
return self.generate(0, len(self.entities))
def generate(self, begin, end):
es = [] #entities in page
if len(self.entities) == 0:
return es
for i in range(begin, end):
e = {}
e['date'] = os.path.splitext(self.entities[i])[0].replace(path+os.sep, '')[:10]
with open(self.entities[i], 'rb') as f:
e['id'] = os.path.splitext(os.path.basename(self.entities[i]))[0]
title = f.readline()
title_tag = f.readline()
image = f.readline()
e['title'] = title #markdown2.markdown(title)
e['image'] = markdown2.markdown(image).replace('<img src="', '<img width="160" height="120" src="/').replace('<p>', '').replace('</p>', '')
content = title + title_tag + image + f.read()
c = markdown2.markdown(content)#.replace('<img src="', '<img width="480" height="360" src="/')
e['content'] = c
es.append(e)
f.close()
return es
class static:
def GET(self, name):
if name == 'favicon.ico':
with open(os.path.join(icopath, name), 'rb') as f:
content = f.read()
f.close()
web.header('content-type', 'image/x-icon')
return content
if os.path.splitext(name)[1][1:] == 'css':
web.header('content-type', 'text/css')
with open(os.path.join(css_path, name), 'rb') as f:
content = f.read()
f.close()
return content
if name == 'robots.txt':
web.header('content-type', 'text/plain')
elif os.path.splitext(name)[1][1:] == 'jpg':
web.header('content-type', 'image/jpeg')
else:
web.header('content-type', 'image/%s' % os.path.splitext(name)[1][1:].lower())
with open(os.path.join(path,name), 'rb') as f:
content = f.read()
f.close()
return content
class feed(base):
def GET(self):
date = datetime.datetime.today().strftime("%a, %d %b %Y %H:%M:%S +0200")
web.header('Content-Type', 'application/xml')
templates = os.path.join(os.path.dirname(__file__), 'templates')
render = web.template.render(templates)
return render.feed(entities=base.entities(self)[:5], date=date,domain=domain)
class cook(base):
def GET(self, name=''):
count = len(self.entities)
templates = os.path.join(os.path.dirname(__file__), 'templates')
render = web.template.render(templates)
if name == '':
return render.index(base.entities(self))
try:
idx = self.entities.index(os.path.join(path, name + suffix))
p = n = True
if idx <= 0:
p = False
if idx >= count - 1:
n = False
return render.entity(base.entity(self,idx), idx, p, n)
except:
return render.index(base.entities(self))
urls = (
'/(.*.JPEG)', static,
'/(.*.jpeg)', static,
'/(.*.jpg)', static,
'/(.*.css)', static,
'/(favicon.ico)', static,
'/feed', feed,
'/rss', feed,
'/(robots.txt)',static,
'/(.*)',cook,
)
app = web.application(urls, globals())
if __name__ == '__main__':
app.run()
else:
application = app.wsgifunc()
| unlicense | 2,263,865,417,661,544,200 | 32.666667 | 155 | 0.519226 | false |
akretion/odoo | addons/mrp/report/mrp_report_bom_structure.py | 2 | 12989 | # -*- coding: utf-8 -*-
import json
from odoo import api, models, _
from odoo.tools import float_round
class ReportBomStructure(models.AbstractModel):
_name = 'report.mrp.report_bom_structure'
_description = 'BOM Structure Report'
@api.model
def _get_report_values(self, docids, data=None):
docs = []
for bom_id in docids:
bom = self.env['mrp.bom'].browse(bom_id)
candidates = bom.product_id or bom.product_tmpl_id.product_variant_ids
for product_variant_id in candidates.ids:
if data and data.get('childs'):
doc = self._get_pdf_line(bom_id, product_id=product_variant_id, qty=float(data.get('quantity')), child_bom_ids=json.loads(data.get('childs')))
else:
doc = self._get_pdf_line(bom_id, product_id=product_variant_id, unfolded=True)
doc['report_type'] = 'pdf'
doc['report_structure'] = data and data.get('report_type') or 'all'
docs.append(doc)
if not candidates:
if data and data.get('childs'):
doc = self._get_pdf_line(bom_id, qty=float(data.get('quantity')), child_bom_ids=json.loads(data.get('childs')))
else:
doc = self._get_pdf_line(bom_id, unfolded=True)
doc['report_type'] = 'pdf'
doc['report_structure'] = data and data.get('report_type') or 'all'
docs.append(doc)
return {
'doc_ids': docids,
'doc_model': 'mrp.bom',
'docs': docs,
}
@api.model
def get_html(self, bom_id=False, searchQty=1, searchVariant=False):
res = self._get_report_data(bom_id=bom_id, searchQty=searchQty, searchVariant=searchVariant)
res['lines']['report_type'] = 'html'
res['lines']['report_structure'] = 'all'
res['lines']['has_attachments'] = res['lines']['attachments'] or any(component['attachments'] for component in res['lines']['components'])
res['lines'] = self.env.ref('mrp.report_mrp_bom').render({'data': res['lines']})
return res
@api.model
def get_bom(self, bom_id=False, product_id=False, line_qty=False, line_id=False, level=False):
lines = self._get_bom(bom_id=bom_id, product_id=product_id, line_qty=line_qty, line_id=line_id, level=level)
return self.env.ref('mrp.report_mrp_bom_line').render({'data': lines})
@api.model
def get_operations(self, bom_id=False, qty=0, level=0):
bom = self.env['mrp.bom'].browse(bom_id)
lines = self._get_operation_line(bom.routing_id, float_round(qty / bom.product_qty, precision_rounding=1, rounding_method='UP'), level)
values = {
'bom_id': bom_id,
'currency': self.env.user.company_id.currency_id,
'operations': lines,
}
return self.env.ref('mrp.report_mrp_operation_line').render({'data': values})
def _get_bom_reference(self, bom):
return bom.display_name
@api.model
def _get_report_data(self, bom_id, searchQty=0, searchVariant=False):
lines = {}
bom = self.env['mrp.bom'].browse(bom_id)
bom_quantity = searchQty or bom.product_qty or 1
bom_product_variants = {}
bom_uom_name = ''
if bom:
bom_uom_name = bom.product_uom_id.name
# Get variants used for search
if not bom.product_id:
for variant in bom.product_tmpl_id.product_variant_ids:
bom_product_variants[variant.id] = variant.display_name
lines = self._get_bom(bom_id, product_id=searchVariant, line_qty=bom_quantity, level=1)
return {
'lines': lines,
'variants': bom_product_variants,
'bom_uom_name': bom_uom_name,
'bom_qty': bom_quantity,
'is_variant_applied': self.env.user.user_has_groups('product.group_product_variant') and len(bom_product_variants) > 1,
'is_uom_applied': self.env.user.user_has_groups('uom.group_uom')
}
def _get_bom(self, bom_id=False, product_id=False, line_qty=False, line_id=False, level=False):
bom = self.env['mrp.bom'].browse(bom_id)
bom_quantity = line_qty
if line_id:
current_line = self.env['mrp.bom.line'].browse(int(line_id))
bom_quantity = current_line.product_uom_id._compute_quantity(line_qty, bom.product_uom_id)
# Display bom components for current selected product variant
if product_id:
product = self.env['product.product'].browse(int(product_id))
else:
product = bom.product_id or bom.product_tmpl_id.product_variant_id
if product:
attachments = self.env['mrp.document'].search(['|', '&', ('res_model', '=', 'product.product'),
('res_id', '=', product.id), '&', ('res_model', '=', 'product.template'), ('res_id', '=', product.product_tmpl_id.id)])
else:
product = bom.product_tmpl_id
attachments = self.env['mrp.document'].search([('res_model', '=', 'product.template'), ('res_id', '=', product.id)])
operations = []
if bom.product_qty > 0:
operations = self._get_operation_line(bom.routing_id, float_round(bom_quantity / bom.product_qty, precision_rounding=1, rounding_method='UP'), 0)
lines = {
'bom': bom,
'bom_qty': bom_quantity,
'bom_prod_name': product.display_name,
'currency': self.env.user.company_id.currency_id,
'product': product,
'code': bom and self._get_bom_reference(bom) or '',
'price': product.uom_id._compute_price(product.standard_price, bom.product_uom_id) * bom_quantity,
'total': sum([op['total'] for op in operations]),
'level': level or 0,
'operations': operations,
'operations_cost': sum([op['total'] for op in operations]),
'attachments': attachments,
'operations_time': sum([op['duration_expected'] for op in operations])
}
components, total = self._get_bom_lines(bom, bom_quantity, product, line_id, level)
lines['components'] = components
lines['total'] += total
return lines
def _get_bom_lines(self, bom, bom_quantity, product, line_id, level):
components = []
total = 0
for line in bom.bom_line_ids:
line_quantity = (bom_quantity / (bom.product_qty or 1.0)) * line.product_qty
if line._skip_bom_line(product):
continue
price = line.product_id.uom_id._compute_price(line.product_id.standard_price, line.product_uom_id) * line_quantity
if line.child_bom_id:
factor = line.product_uom_id._compute_quantity(line_quantity, line.child_bom_id.product_uom_id) / line.child_bom_id.product_qty
sub_total = self._get_price(line.child_bom_id, factor, line.product_id)
else:
sub_total = price
sub_total = self.env.user.company_id.currency_id.round(sub_total)
components.append({
'prod_id': line.product_id.id,
'prod_name': line.product_id.display_name,
'code': line.child_bom_id and self._get_bom_reference(line.child_bom_id) or '',
'prod_qty': line_quantity,
'prod_uom': line.product_uom_id.name,
'prod_cost': self.env.user.company_id.currency_id.round(price),
'parent_id': bom.id,
'line_id': line.id,
'level': level or 0,
'total': sub_total,
'child_bom': line.child_bom_id.id,
'phantom_bom': line.child_bom_id and line.child_bom_id.type == 'phantom' or False,
'attachments': self.env['mrp.document'].search(['|', '&',
('res_model', '=', 'product.product'), ('res_id', '=', line.product_id.id), '&', ('res_model', '=', 'product.template'), ('res_id', '=', line.product_id.product_tmpl_id.id)]),
})
total += sub_total
return components, total
def _get_operation_line(self, routing, qty, level):
operations = []
total = 0.0
for operation in routing.operation_ids:
operation_cycle = float_round(qty / operation.workcenter_id.capacity, precision_rounding=1, rounding_method='UP')
duration_expected = operation_cycle * operation.time_cycle + operation.workcenter_id.time_stop + operation.workcenter_id.time_start
total = ((duration_expected / 60.0) * operation.workcenter_id.costs_hour)
operations.append({
'level': level or 0,
'operation': operation,
'name': operation.name + ' - ' + operation.workcenter_id.name,
'duration_expected': duration_expected,
'total': self.env.user.company_id.currency_id.round(total),
})
return operations
def _get_price(self, bom, factor, product):
price = 0
if bom.routing_id:
# routing are defined on a BoM and don't have a concept of quantity.
# It means that the operation time are defined for the quantity on
# the BoM (the user produces a batch of products). E.g the user
# product a batch of 10 units with a 5 minutes operation, the time
# will be the 5 for a quantity between 1-10, then doubled for
# 11-20,...
operation_cycle = float_round(factor, precision_rounding=1, rounding_method='UP')
operations = self._get_operation_line(bom.routing_id, operation_cycle, 0)
price += sum([op['total'] for op in operations])
for line in bom.bom_line_ids:
if line._skip_bom_line(product):
continue
if line.child_bom_id:
qty = line.product_uom_id._compute_quantity(line.product_qty * factor, line.child_bom_id.product_uom_id) / line.child_bom_id.product_qty
sub_price = self._get_price(line.child_bom_id, qty, line.product_id)
price += sub_price
else:
prod_qty = line.product_qty * factor
not_rounded_price = line.product_id.uom_id._compute_price(line.product_id.standard_price, line.product_uom_id) * prod_qty
price += self.env.user.company_id.currency_id.round(not_rounded_price)
return price
def _get_pdf_line(self, bom_id, product_id=False, qty=1, child_bom_ids=[], unfolded=False):
data = self._get_bom(bom_id=bom_id, product_id=product_id, line_qty=qty)
def get_sub_lines(bom, product_id, line_qty, line_id, level):
data = self._get_bom(bom_id=bom.id, product_id=product_id, line_qty=line_qty, line_id=line_id, level=level)
bom_lines = data['components']
lines = []
for bom_line in bom_lines:
lines.append({
'name': bom_line['prod_name'],
'type': 'bom',
'quantity': bom_line['prod_qty'],
'uom': bom_line['prod_uom'],
'prod_cost': bom_line['prod_cost'],
'bom_cost': bom_line['total'],
'level': bom_line['level'],
'code': bom_line['code']
})
if bom_line['child_bom'] and (unfolded or bom_line['child_bom'] in child_bom_ids):
line = self.env['mrp.bom.line'].browse(bom_line['line_id'])
lines += (get_sub_lines(line.child_bom_id, line.product_id, bom_line['prod_qty'], line, level + 1))
if data['operations']:
lines.append({
'name': _('Operations'),
'type': 'operation',
'quantity': data['operations_time'],
'uom': _('minutes'),
'bom_cost': data['operations_cost'],
'level': level,
})
for operation in data['operations']:
if unfolded or 'operation-' + str(bom.id) in child_bom_ids:
lines.append({
'name': operation['name'],
'type': 'operation',
'quantity': operation['duration_expected'],
'uom': _('minutes'),
'bom_cost': operation['total'],
'level': level + 1,
})
return lines
bom = self.env['mrp.bom'].browse(bom_id)
product = product_id or bom.product_id or bom.product_tmpl_id.product_variant_id
pdf_lines = get_sub_lines(bom, product, qty, False, 1)
data['components'] = []
data['lines'] = pdf_lines
return data
| agpl-3.0 | 8,412,093,710,462,063,000 | 49.344961 | 195 | 0.553314 | false |
serzans/wagtail | wagtail/wagtailadmin/edit_handlers.py | 1 | 26782 | from __future__ import unicode_literals
import copy
from modelcluster.forms import ClusterForm, ClusterFormMetaclass
from django.db import models
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.six import text_type
from django import forms
from django.forms.models import fields_for_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy
from taggit.managers import TaggableManager
from wagtail.wagtailadmin import widgets
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.utils import camelcase_to_underscore, resolve_model_string
from wagtail.utils.compat import get_related_model, get_related_parent_model
# Form field properties to override whenever we encounter a model field
# that matches one of these types - including subclasses
FORM_FIELD_OVERRIDES = {
models.DateField: {'widget': widgets.AdminDateInput},
models.TimeField: {'widget': widgets.AdminTimeInput},
models.DateTimeField: {'widget': widgets.AdminDateTimeInput},
TaggableManager: {'widget': widgets.AdminTagWidget},
}
# Form field properties to override whenever we encounter a model field
# that matches one of these types exactly, ignoring subclasses.
# (This allows us to override the widget for models.TextField, but leave
# the RichTextField widget alone)
DIRECT_FORM_FIELD_OVERRIDES = {
models.TextField: {'widget': widgets.AdminAutoHeightTextInput},
}
# Callback to allow us to override the default form fields provided for each model field.
def formfield_for_dbfield(db_field, **kwargs):
# adapted from django/contrib/admin/options.py
overrides = None
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
if db_field.__class__ in DIRECT_FORM_FIELD_OVERRIDES:
overrides = DIRECT_FORM_FIELD_OVERRIDES[db_field.__class__]
else:
for klass in db_field.__class__.mro():
if klass in FORM_FIELD_OVERRIDES:
overrides = FORM_FIELD_OVERRIDES[klass]
break
if overrides:
kwargs = dict(copy.deepcopy(overrides), **kwargs)
return db_field.formfield(**kwargs)
def widget_with_script(widget, script):
return mark_safe('{0}<script>{1}</script>'.format(widget, script))
class WagtailAdminModelFormMetaclass(ClusterFormMetaclass):
# Override the behaviour of the regular ModelForm metaclass -
# which handles the translation of model fields to form fields -
# to use our own formfield_for_dbfield function to do that translation.
# This is done by sneaking a formfield_callback property into the class
# being defined (unless the class already provides a formfield_callback
# of its own).
# while we're at it, we'll also set extra_form_count to 0, as we're creating
# extra forms in JS
extra_form_count = 0
def __new__(cls, name, bases, attrs):
if 'formfield_callback' not in attrs or attrs['formfield_callback'] is None:
attrs['formfield_callback'] = formfield_for_dbfield
new_class = super(WagtailAdminModelFormMetaclass, cls).__new__(cls, name, bases, attrs)
return new_class
WagtailAdminModelForm = WagtailAdminModelFormMetaclass(str('WagtailAdminModelForm'), (ClusterForm,), {})
# Now, any model forms built off WagtailAdminModelForm instead of ModelForm should pick up
# the nice form fields defined in FORM_FIELD_OVERRIDES.
def get_form_for_model(
model,
fields=None, exclude=None, formsets=None, exclude_formsets=None, widgets=None
):
# django's modelform_factory with a bit of custom behaviour
# (dealing with Treebeard's tree-related fields that really should have
# been editable=False)
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
if issubclass(model, Page):
attrs['exclude'] = attrs.get('exclude', []) + ['content_type', 'path', 'depth', 'numchild']
if widgets is not None:
attrs['widgets'] = widgets
if formsets is not None:
attrs['formsets'] = formsets
if exclude_formsets is not None:
attrs['exclude_formsets'] = exclude_formsets
# Give this new form class a reasonable name.
class_name = model.__name__ + str('Form')
form_class_attrs = {
'Meta': type(str('Meta'), (object,), attrs)
}
return WagtailAdminModelFormMetaclass(class_name, (WagtailAdminModelForm,), form_class_attrs)
def extract_panel_definitions_from_model_class(model, exclude=None):
if hasattr(model, 'panels'):
return model.panels
panels = []
_exclude = []
if exclude:
_exclude.extend(exclude)
if issubclass(model, Page):
_exclude = ['content_type', 'path', 'depth', 'numchild']
fields = fields_for_model(model, exclude=_exclude, formfield_callback=formfield_for_dbfield)
for field_name, field in fields.items():
try:
panel_class = field.widget.get_panel()
except AttributeError:
panel_class = FieldPanel
panel = panel_class(field_name)
panels.append(panel)
return panels
class EditHandler(object):
"""
Abstract class providing sensible default behaviours for objects implementing
the EditHandler API
"""
# return list of widget overrides that this EditHandler wants to be in place
# on the form it receives
@classmethod
def widget_overrides(cls):
return {}
# return list of fields that this EditHandler expects to find on the form
@classmethod
def required_fields(cls):
return []
# return a dict of formsets that this EditHandler requires to be present
# as children of the ClusterForm; the dict is a mapping from relation name
# to parameters to be passed as part of get_form_for_model's 'formsets' kwarg
@classmethod
def required_formsets(cls):
return {}
# return any HTML that needs to be output on the edit page once per edit handler definition.
# Typically this will be used to define snippets of HTML within <script type="text/x-template"></script> blocks
# for Javascript code to work with.
@classmethod
def html_declarations(cls):
return ''
# the top-level edit handler is responsible for providing a form class that can produce forms
# acceptable to the edit handler
_form_class = None
@classmethod
def get_form_class(cls, model):
if cls._form_class is None:
cls._form_class = get_form_for_model(
model,
fields=cls.required_fields(),
formsets=cls.required_formsets(), widgets=cls.widget_overrides())
return cls._form_class
def __init__(self, instance=None, form=None):
if not instance:
raise ValueError("EditHandler did not receive an instance object")
self.instance = instance
if not form:
raise ValueError("EditHandler did not receive a form object")
self.form = form
# Heading / help text to display to the user
heading = ""
help_text = ""
def classes(self):
"""
Additional CSS classnames to add to whatever kind of object this is at output.
Subclasses of EditHandler should override this, invoking super(B, self).classes() to
append more classes specific to the situation.
"""
classes = []
try:
classes.append(self.classname)
except AttributeError:
pass
return classes
def field_type(self):
"""
The kind of field it is e.g boolean_field. Useful for better semantic markup of field display based on type
"""
return ""
def id_for_label(self):
"""
The ID to be used as the 'for' attribute of any <label> elements that refer
to this object but are rendered outside of it. Leave blank if this object does not render
as a single input field.
"""
return ""
def render_as_object(self):
"""
Render this object as it should appear within an ObjectList. Should not
include the <h2> heading or help text - ObjectList will supply those
"""
# by default, assume that the subclass provides a catch-all render() method
return self.render()
def render_as_field(self):
"""
Render this object as it should appear within a <ul class="fields"> list item
"""
# by default, assume that the subclass provides a catch-all render() method
return self.render()
def render_missing_fields(self):
"""
Helper function: render all of the fields that are defined on the form but not "claimed" by
any panels via required_fields. These fields are most likely to be hidden fields introduced
by the forms framework itself, such as ORDER / DELETE fields on formset members.
(If they aren't actually hidden fields, then they will appear as ugly unstyled / label-less fields
outside of the panel furniture. But there's not much we can do about that.)
"""
rendered_fields = self.required_fields()
missing_fields_html = [
text_type(self.form[field_name])
for field_name in self.form.fields
if field_name not in rendered_fields
]
return mark_safe(''.join(missing_fields_html))
def render_form_content(self):
"""
Render this as an 'object', ensuring that all fields necessary for a valid form
submission are included
"""
return mark_safe(self.render_as_object() + self.render_missing_fields())
class BaseCompositeEditHandler(EditHandler):
"""
Abstract class for EditHandlers that manage a set of sub-EditHandlers.
Concrete subclasses must attach a 'children' property
"""
_widget_overrides = None
@classmethod
def widget_overrides(cls):
if cls._widget_overrides is None:
# build a collated version of all its children's widget lists
widgets = {}
for handler_class in cls.children:
widgets.update(handler_class.widget_overrides())
cls._widget_overrides = widgets
return cls._widget_overrides
_required_fields = None
@classmethod
def required_fields(cls):
if cls._required_fields is None:
fields = []
for handler_class in cls.children:
fields.extend(handler_class.required_fields())
cls._required_fields = fields
return cls._required_fields
_required_formsets = None
@classmethod
def required_formsets(cls):
if cls._required_formsets is None:
formsets = {}
for handler_class in cls.children:
formsets.update(handler_class.required_formsets())
cls._required_formsets = formsets
return cls._required_formsets
@classmethod
def html_declarations(cls):
return mark_safe(''.join([c.html_declarations() for c in cls.children]))
def __init__(self, instance=None, form=None):
super(BaseCompositeEditHandler, self).__init__(instance=instance, form=form)
self.children = [
handler_class(instance=self.instance, form=self.form)
for handler_class in self.__class__.children
]
def render(self):
return mark_safe(render_to_string(self.template, {
'self': self
}))
class BaseTabbedInterface(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/tabbed_interface.html"
class TabbedInterface(object):
def __init__(self, children):
self.children = children
def bind_to_model(self, model):
return type(str('_TabbedInterface'), (BaseTabbedInterface,), {
'model': model,
'children': [child.bind_to_model(model) for child in self.children],
})
class BaseObjectList(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/object_list.html"
class ObjectList(object):
def __init__(self, children, heading="", classname=""):
self.children = children
self.heading = heading
self.classname = classname
def bind_to_model(self, model):
return type(str('_ObjectList'), (BaseObjectList,), {
'model': model,
'children': [child.bind_to_model(model) for child in self.children],
'heading': self.heading,
'classname': self.classname,
})
class BaseFieldRowPanel(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/field_row_panel.html"
class FieldRowPanel(object):
def __init__(self, children, classname=""):
self.children = children
self.classname = classname
def bind_to_model(self, model):
return type(str('_FieldRowPanel'), (BaseFieldRowPanel,), {
'model': model,
'children': [child.bind_to_model(model) for child in self.children],
'classname': self.classname,
})
class BaseMultiFieldPanel(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/multi_field_panel.html"
def classes(self):
classes = super(BaseMultiFieldPanel, self).classes()
classes.append("multi-field")
return classes
class MultiFieldPanel(object):
def __init__(self, children, heading="", classname=""):
self.children = children
self.heading = heading
self.classname = classname
def bind_to_model(self, model):
return type(str('_MultiFieldPanel'), (BaseMultiFieldPanel,), {
'model': model,
'children': [child.bind_to_model(model) for child in self.children],
'heading': self.heading,
'classname': self.classname,
})
class BaseFieldPanel(EditHandler):
TEMPLATE_VAR = 'field_panel'
@classmethod
def widget_overrides(cls):
"""check if a specific widget has been defined for this field"""
if hasattr(cls, 'widget'):
return {cls.field_name: cls.widget}
else:
return {}
def __init__(self, instance=None, form=None):
super(BaseFieldPanel, self).__init__(instance=instance, form=form)
self.bound_field = self.form[self.field_name]
self.heading = self.bound_field.label
self.help_text = self.bound_field.help_text
def classes(self):
classes = super(BaseFieldPanel, self).classes()
if self.bound_field.field.required:
classes.append("required")
if self.bound_field.errors:
classes.append("error")
classes.append(self.field_type())
return classes
def field_type(self):
return camelcase_to_underscore(self.bound_field.field.__class__.__name__)
def id_for_label(self):
return self.bound_field.id_for_label
object_template = "wagtailadmin/edit_handlers/single_field_panel.html"
def render_as_object(self):
return mark_safe(render_to_string(self.object_template, {
'self': self,
self.TEMPLATE_VAR: self,
'field': self.bound_field,
}))
field_template = "wagtailadmin/edit_handlers/field_panel_field.html"
def render_as_field(self):
context = {
'field': self.bound_field,
'field_type': self.field_type(),
}
return mark_safe(render_to_string(self.field_template, context))
@classmethod
def required_fields(self):
return [self.field_name]
class FieldPanel(object):
def __init__(self, field_name, classname="", widget=None):
self.field_name = field_name
self.classname = classname
self.widget = widget
def bind_to_model(self, model):
base = {
'model': model,
'field_name': self.field_name,
'classname': self.classname,
}
if self.widget:
base['widget'] = self.widget
return type(str('_FieldPanel'), (BaseFieldPanel,), base)
class BaseRichTextFieldPanel(BaseFieldPanel):
pass
class RichTextFieldPanel(object):
def __init__(self, field_name):
self.field_name = field_name
def bind_to_model(self, model):
return type(str('_RichTextFieldPanel'), (BaseRichTextFieldPanel,), {
'model': model,
'field_name': self.field_name,
})
class BaseChooserPanel(BaseFieldPanel):
"""
Abstract superclass for panels that provide a modal interface for choosing (or creating)
a database object such as an image, resulting in an ID that is used to populate
a hidden foreign key input.
Subclasses provide:
* field_template (only required if the default template of field_panel_field.html is not usable)
* object_type_name - something like 'image' which will be used as the var name
for the object instance in the field_template
"""
def get_chosen_item(self):
field = self.instance._meta.get_field(self.field_name)
related_model = get_related_parent_model(field.related)
try:
return getattr(self.instance, self.field_name)
except related_model.DoesNotExist:
# if the ForeignKey is null=False, Django decides to raise
# a DoesNotExist exception here, rather than returning None
# like every other unpopulated field type. Yay consistency!
return None
def render_as_field(self):
instance_obj = self.get_chosen_item()
context = {
'field': self.bound_field,
self.object_type_name: instance_obj,
'is_chosen': bool(instance_obj), # DEPRECATED - passed to templates for backwards compatibility only
}
return mark_safe(render_to_string(self.field_template, context))
class BasePageChooserPanel(BaseChooserPanel):
object_type_name = "page"
_target_content_type = None
@classmethod
def widget_overrides(cls):
return {cls.field_name: widgets.AdminPageChooser(
content_type=cls.target_content_type(), can_choose_root=cls.can_choose_root)}
@classmethod
def target_content_type(cls):
if cls._target_content_type is None:
if cls.page_type:
target_models = []
for page_type in cls.page_type:
try:
target_models.append(resolve_model_string(page_type))
except LookupError:
raise ImproperlyConfigured(
"{0}.page_type must be of the form 'app_label.model_name', given {1!r}".format(
cls.__name__, page_type
)
)
except ValueError:
raise ImproperlyConfigured(
"{0}.page_type refers to model {1!r} that has not been installed".format(
cls.__name__, page_type
)
)
cls._target_content_type = list(ContentType.objects.get_for_models(*target_models).values())
else:
target_model = cls.model._meta.get_field(cls.field_name).rel.to
cls._target_content_type = [ContentType.objects.get_for_model(target_model)]
return cls._target_content_type
class PageChooserPanel(object):
def __init__(self, field_name, page_type=None, can_choose_root=False):
self.field_name = field_name
if page_type:
# Convert single string/model into list
if not isinstance(page_type, (list, tuple)):
page_type = [page_type]
else:
page_type = []
self.page_type = page_type
self.can_choose_root = can_choose_root
def bind_to_model(self, model):
return type(str('_PageChooserPanel'), (BasePageChooserPanel,), {
'model': model,
'field_name': self.field_name,
'page_type': self.page_type,
'can_choose_root': self.can_choose_root,
})
class BaseInlinePanel(EditHandler):
@classmethod
def get_panel_definitions(cls):
# Look for a panels definition in the InlinePanel declaration
if cls.panels is not None:
return cls.panels
# Failing that, get it from the model
else:
return extract_panel_definitions_from_model_class(
get_related_model(cls.related),
exclude=[cls.related.field.name]
)
_child_edit_handler_class = None
@classmethod
def get_child_edit_handler_class(cls):
if cls._child_edit_handler_class is None:
panels = cls.get_panel_definitions()
cls._child_edit_handler_class = MultiFieldPanel(
panels,
heading=cls.heading
).bind_to_model(get_related_model(cls.related))
return cls._child_edit_handler_class
@classmethod
def required_formsets(cls):
child_edit_handler_class = cls.get_child_edit_handler_class()
return {
cls.relation_name: {
'fields': child_edit_handler_class.required_fields(),
'widgets': child_edit_handler_class.widget_overrides(),
'min_num': cls.min_num,
'validate_min': cls.min_num is not None,
'max_num': cls.max_num,
'validate_max': cls.max_num is not None
}
}
def __init__(self, instance=None, form=None):
super(BaseInlinePanel, self).__init__(instance=instance, form=form)
self.formset = form.formsets[self.__class__.relation_name]
child_edit_handler_class = self.__class__.get_child_edit_handler_class()
self.children = []
for subform in self.formset.forms:
# override the DELETE field to have a hidden input
subform.fields['DELETE'].widget = forms.HiddenInput()
# ditto for the ORDER field, if present
if self.formset.can_order:
subform.fields['ORDER'].widget = forms.HiddenInput()
self.children.append(
child_edit_handler_class(instance=subform.instance, form=subform)
)
# if this formset is valid, it may have been re-ordered; respect that
# in case the parent form errored and we need to re-render
if self.formset.can_order and self.formset.is_valid():
self.children = sorted(self.children, key=lambda x: x.form.cleaned_data['ORDER'])
empty_form = self.formset.empty_form
empty_form.fields['DELETE'].widget = forms.HiddenInput()
if self.formset.can_order:
empty_form.fields['ORDER'].widget = forms.HiddenInput()
self.empty_child = child_edit_handler_class(instance=empty_form.instance, form=empty_form)
template = "wagtailadmin/edit_handlers/inline_panel.html"
def render(self):
formset = render_to_string(self.template, {
'self': self,
'can_order': self.formset.can_order,
})
js = self.render_js_init()
return widget_with_script(formset, js)
js_template = "wagtailadmin/edit_handlers/inline_panel.js"
def render_js_init(self):
return mark_safe(render_to_string(self.js_template, {
'self': self,
'can_order': self.formset.can_order,
}))
class InlinePanel(object):
def __init__(self, relation_name, panels=None, label='', help_text='', min_num=None, max_num=None):
self.relation_name = relation_name
self.panels = panels
self.label = label
self.help_text = help_text
self.min_num = min_num
self.max_num = max_num
def bind_to_model(self, model):
return type(str('_InlinePanel'), (BaseInlinePanel,), {
'model': model,
'relation_name': self.relation_name,
'related': getattr(model, self.relation_name).related,
'panels': self.panels,
'heading': self.label,
'help_text': self.help_text,
# TODO: can we pick this out of the foreign key definition as an alternative?
# (with a bit of help from the inlineformset object, as we do for label/heading)
'min_num': self.min_num,
'max_num': self.max_num
})
# This allows users to include the publishing panel in their own per-model override
# without having to write these fields out by hand, potentially losing 'classname'
# and therefore the associated styling of the publishing panel
def PublishingPanel():
return MultiFieldPanel([
FieldRowPanel([
FieldPanel('go_live_at'),
FieldPanel('expire_at'),
], classname="label-above"),
], ugettext_lazy('Scheduled publishing'), classname="publishing")
# Now that we've defined EditHandlers, we can set up wagtailcore.Page to have some.
Page.content_panels = [
FieldPanel('title', classname="full title"),
]
Page.promote_panels = [
MultiFieldPanel([
FieldPanel('slug'),
FieldPanel('seo_title'),
FieldPanel('show_in_menus'),
FieldPanel('search_description'),
], ugettext_lazy('Common page configuration')),
]
Page.settings_panels = [
PublishingPanel()
]
class BaseStreamFieldPanel(BaseFieldPanel):
def classes(self):
classes = super(BaseStreamFieldPanel, self).classes()
classes.append("stream-field")
# In case of a validation error, BlockWidget will take care of outputting the error on the
# relevant sub-block, so we don't want the stream block as a whole to be wrapped in an 'error' class.
if 'error' in classes:
classes.remove("error")
return classes
@classmethod
def html_declarations(cls):
return cls.block_def.all_html_declarations()
def id_for_label(self):
# a StreamField may consist of many input fields, so it's not meaningful to
# attach the label to any specific one
return ""
class StreamFieldPanel(object):
def __init__(self, field_name):
self.field_name = field_name
def bind_to_model(self, model):
return type(str('_StreamFieldPanel'), (BaseStreamFieldPanel,), {
'model': model,
'field_name': self.field_name,
'block_def': model._meta.get_field(self.field_name).stream_block
})
| bsd-3-clause | -5,252,673,871,227,266,000 | 33.073791 | 115 | 0.631581 | false |
birknilson/oyster | src/oyster.py | 1 | 27361 | # -*- coding: utf-8 -*-
"""
Oyster
~~~~~
**A Python parser of shell commands.**
This module strives to support commands executed within the sh, bash
and zsh shells alike. An important limitation to mention is that Oyster
does not support parsing of scripted commands, i.e:
for i in $(seq 10); do echo $i; done
This might change in a future version of Oyster - at least in order to
support one-liners like the one above.
*Features to be included in upcoming releases:*
- Extended :class:`Chain` API to ease extending the chain with
additional commands and various control operators.
- Parse command substitutions
:copyright: (c) 2014 by Birk Nilson.
:license: MIT, see LICENSE for more details.
"""
import shlex
from subprocess import list2cmdline
__author__ = 'Birk Nilson <[email protected]>'
__copyright__ = 'Copyright 2014, Birk Nilson'
__license__ = 'MIT'
__version__ = '0.1.0'
__all__ = [
# Constants
'RESERVED_WORDS', 'CONTROL_OPERATORS', 'STDIN',
'STDOUT', 'STDERR', 'STDFD_MAPPING', 'DEBUG',
# Classes
'Redirect', 'Chain', 'Command',
# Functions
'split_token_by_operators', 'tokenize', 'is_comment',
'is_script', 'is_quoted', 'is_command', 'parse',
]
#: How verbose Oyster debugging should be::
#: * 0 turns of debugging
#: * 1 adds basic parse debugging
#: * 2 adds tokenize debugging
DEBUG = 0
#: Set of words which are reserved in the shell.
#: See: http://bit.ly/1baSfhM#tag_02_04
RESERVED_WORDS = frozenset([
'!', ';', '{', '}', 'case',
'do', 'done', 'elif', 'else',
'esac', 'fi', 'for', 'if',
'in', 'then', 'until', 'while',
])
#: Control operators which chain multiple commands
CONTROL_OPERATORS = frozenset([';', '|', '&&', '||'])
#: Lookup dictionary of control operators
CONTROL_OPERATOR_LOOKUP = dict(zip(CONTROL_OPERATORS, CONTROL_OPERATORS))
#: The file descriptor of the standard input file
STDIN = 0
#: The file descriptor of the standard output file
STDOUT = 1
#: The file descriptor of the standard error file
STDERR = 2
#: Mapping of the standard file descriptors and their common names
STDFD_MAPPING = {
STDIN: 'stdin',
STDOUT: 'stdout',
STDERR: 'stderr',
}
class Redirect(object):
"""A :class:`Redirect` instance represents the various output redirections
performed by the command it is attached to.
Each redirect has a :attr:`source` and :attr:`destination` in which the
source is the value of the standard file descriptor to be redirected to
the given :attr:`destination` - which can be either a
file descriptor or a filename.
The method in which the redirect is performed is determined by the
:attr:`mode` which can be either ``w`` or ``a``. The ``w`` mode will
write to the :attr:`destination` while ``a`` will append to
it, i.e '>' vs. '>>'.
When a shell command is parsed all redirects will automatically be
initiated and assigned to their respective command as shown below:
>>> import oyster
>>> cmd = 'cp -v -r myfiles/* >> copied.log 2>> errors.log'
>>> command = oyster.parse(cmd)[0]
>>> str(command.redirects[0])
'>> copied.log'
>>> str(command.redirects[1])
'2>> errors.log'
>>> command.redirects[0].is_source_stdout()
True
:param source: An integer representing the standard file descriptor to
be redirected.
:param destination: Either an integer representing the standard file
descriptor which output should be redirected to or
a string representing the filename.
:param mode: Either ``w`` or ``a`` depending on whether the redirect should
write or append its output to the :attr:`destination`.
"""
def __init__(self, source, destination, mode='w'):
#: Which standard file descriptor to be redirected
self.source = source
#: The destination of the redirect which can either be a standard
#: file descriptor (integer) or a filename (string.
self.destination = destination
if self.is_destination_stdfd():
mode = 'w'
#: The mode in which the redirect should be performed.
#: ``w`` represents writes (>) & ``a`` represents appends (>>).
self.mode = mode
def is_source_stdin(self):
"""Check if the source is the standard input file descriptor."""
return self.source == STDIN
def is_source_stdout(self):
"""Check if the source is the standard output file descriptor."""
return self.source == STDOUT
def is_source_stderr(self):
"""Check if the source is the standard error file descriptor."""
return self.source == STDERR
def is_destination_stdfd(self):
"""Check if the destination is a standard file descriptor."""
return self.destination in STDFD_MAPPING
def is_destination_stdin(self):
"""Check if the destination is the standard input file descriptor."""
return self.destination == STDIN
def is_destination_stdout(self):
"""Check if the destination is the standard output file descriptor."""
return self.destination == STDOUT
def is_destination_stderr(self):
"""Check if the destination is the standard error file descriptor."""
return self.destination == STDERR
def __str__(self):
source = str(self.source) if not self.is_source_stdout() else ''
if not self.is_destination_stdfd():
separator = ' '
operator = '>' if self.mode == 'w' else '>>'
else:
separator = ''
operator = '>&'
destination = str(self.destination)
as_string = '{source}{operator}{separator}{destination}'
return as_string.format(source=source, operator=operator,
separator=separator, destination=destination)
class Chain(object):
"""A list-like object containing all the individual commands which have
been chained together using control operators in the shell.
Unlike a regular Python list the :class:`Chain` instance does not implement the
``.extend``, ``.sort`` and ``.count`` methods. Also it introduces the
``chain_by`` parameter to the ``.append`` and ``.insert`` methods.
Oyster treats all shell commands as a chain even in the case of a single
program being executed. This is a design choice to simplify usage of the
module since it is easier if :func:`parse` consistently returns the
same type. As shown here:
>>> import oyster
>>> commands = oyster.parse('ps aux | grep python')
>>> len(commands)
2
>>> ps, grep = commands
>>> ps.arguments
('aux',)
>>> ps = oyster.parse('ps aux')[0]
>>> ps.program
'ps'
"""
def __init__(self):
#: A list containing all the individual :class:`Command` instances
self.commands = []
self._strings = []
self._operators = []
def append(self, command, chained_by=None):
"""C.append(command[, chained_by=';'])
Append given ``command`` to the chain with the
``chained_by`` as the separating control operator.
:param command: A string representing the command or an
instance of :class:`Command`
:param chained_by: One of the control operators defined in the
:attr:`CONTROL_OPERATORS` constant. The default
is ``;``.
"""
command = self._normalize_command(command)
chained_by = self._normalize_chained_by(chained_by)
self.commands.append(command)
self._strings.append(str(command))
self._operators.append(chained_by)
def insert(self, index, command, chained_by=None):
"""C.insert(index, command[, chained_by=';'])
Insert given ``command`` to the chain at ``index`` with the
``chained_by`` as the separating control operator.
:param index: At which index of the chain to insert the command
:param command: A string representing the command or an
instance of :class:`Command`
:param chained_by: One of the control operators defined in the
:attr:`CONTROL_OPERATORS` constant. The default
is ``;``.
"""
command = self._normalize_command(command)
chained_by = self._normalize_chained_by(chained_by)
self.commands.insert(index, command)
self._strings.insert(index, str(command))
self._operators.insert(index, chained_by)
def index(self, command, *args):
"""C.index(command, [start, [stop]]) -> first index of command.
Raises ValueError if the command is not present.
:param command: A string representing the command or an
instance of :class:`Command`
:param start: At which index to start the search
:param stop: At which index to stop the search
"""
if hasattr(command, 'get_options'):
return self.commands.index(command, *args)
return self._strings.index(command, *args)
def pop(self, *args):
"""C.pop([index]) -> command -- remove and return item at index (default last).
Raises IndexError if list is empty or index is out of range.
:param index: Which command to pop by index
"""
ret = self.commands.pop(*args)
self._strings.pop(*args)
self._operators.pop(*args)
return ret
def remove(self, command):
"""C.remove(command) -- remove first occurrence of command.
Raises ValueError if the value is not present.
:param command: A string representing the command or an
instance of :class:`Command`
"""
index = self.index(command)
del self.commands[index]
del self._strings[index]
del self._operators[index]
def __add__(self, chain):
if hasattr(chain, 'isalpha'):
chain = parse(chain)
c = Chain()
c.commands = self.commands + chain.commands
c._strings = self._strings + chain._strings
c._operators = self._operators + chain._operators
return c
def __iadd__(self, chain):
if hasattr(chain, 'isalpha'):
chain = parse(chain)
self.commands += chain.commands
self._strings += chain._strings
self._operators += chain._operators
return self
def __contains__(self, command):
if not hasattr(command, 'isalpha'):
return command in self.commands
return command in self._strings
def __delitem__(self, *args):
self.commands.__delitem__(*args)
self._strings.__delitem__(*args)
self._operators.__delitem__(*args)
def __delslice__(self, *args):
self.commands.__delslice__(*args)
self._strings.__delslice__(*args)
self._operators.__delslice__(*args)
def __eq__(self, chain):
return str(self) == str(chain)
def __ne__(self, chain):
return not self.__eq__(chain)
def __getitem__(self, index):
return self.commands.__getitem__(index)
def __getslice__(self, *args):
c = Chain()
c.commands = self.commands.__getslice__(*args)
c._strings = self._strings.__getslice__(*args)
c._operators = self._operators.__getslice__(*args)
return c
def __len__(self):
return self.commands.__len__()
def __str__(self):
operators = self._operators[:]
operators[0] = None
commands = [str(command) for command in self.commands]
components = []
for index, operator in enumerate(operators):
if operator:
whitespace = ' '
if operator == ';':
whitespace = ''
components.append('{0}{1} '.format(whitespace, operator))
components.append(commands[index])
return ''.join(components)
def _normalize_command(self, command):
if hasattr(command, 'get_options'):
return command
chain = parse(command)
if not chain:
raise ValueError('invalid command')
return chain.pop()
def _normalize_chained_by(self, chained_by):
if not chained_by:
return ';'
if chained_by in CONTROL_OPERATORS:
return chained_by
raise ValueError('invalid control operator given')
class Command(object):
"""A representation of a single - unchained - command.
Contains the name of the program being executed along with all the
arguments passed to it. Furthermore, it processes the given arguments
to convert them into ``options``: A dictionary mapping options to their
given values.
An argument is considered an option in case it is prefixed with ``-``.
In other words ``-v``, ``--install`` and ``-c`` are all considered
to be options.
**Caveat #1:**
How their values are retrieved is an interesting topic. The easiest case
is the scenario of an argument being --foo=bar. Then the option name is
``foo`` and its corresponding value ``bar``. Single-hyphenated arguments
is a trickier matter though. Consider the following:
pip install -v -r requirements.txt
In the case above Oyster will treat the ``-v`` argument as a boolean
option, i.e giving it a value of ``True``. In case all single-hyphenated
arguments would be considered boolean options then, everyone who knows
pip, will know that the stored value would be useless & incorrect.
Therefore, in the case a single-hypenhated argument is followed by a
non-hypenated argument the latter is considered the formers value.
Naturally, this is not bulletproof neither, but it is better to be
more greedy in this scenario since the arguments are also kept, untouched,
in the :attr:`arguments` attribute. After all: Determening how the
arguments should be handled is ultimately up to the targetted
program in the command.
**Caveat #2:**
The :attr:`as_string` and thus str(self) value is retrieved using
the ``subprocess.list2cmdline`` function. In case the command is retrieved
via :func:`parse` this opens up for the possibility of minor differences
in how command arguments are quoted. Therefore, a direct comparison
of the input command and the string representation of its instance
is not guaranteed to be successful.
:param tokens: A list of all the tokens the command consists of
"""
def __init__(self, tokens):
#: Name of the program which the command is executing
self.program = tokens[0]
#: A tuple of all the arguments passed to the program
self.arguments = tuple(tokens[1:])
#: A tuple containing all tokens which the command consists of.
#: In other words: tuple([self.program] + list(self.arguments))
self.tokens = tuple(tokens)
#: The string representation of the command. Used in str(self)
self.as_string = list2cmdline(self.tokens)
#: A tuple containing all the instances of :class:`Redirect`
#: found during processing of the command.
self.redirects = tuple([])
self._process_arguments(self.arguments)
def get_options(self):
"""Retrieve a copy of the command options.
A copy is returned to prevent tampering with the instance options.
The :class:`Command` class is not designed to support mutations.
"""
# Changes to the options dict will not propagate to the
# tokens, arguments or string representation of the command.
# Therefore, the options are intended to be read-only which this
# API hopefully makes clear by making the attribute "private" and
# the accessor return a copy of the dict.
return self._options.copy()
def has_option(self, name):
"""Check whether the command includes the given option ``name``.
:param name: Name of the option including hyphens.
"""
return name in self._options
def get_option_values(self, name, *args):
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.
:param name: Name of the option including hyphens.
"""
return self._options.get(name, *args)
def get_option_count(self, name):
"""Return the amount of values stored for the given options.
:param name: Name of the option including hyphens.
"""
values = self.get_option_values(name)
if values:
return len(values)
return 0
def __str__(self):
return self.as_string
def _register_redirect(self, token, output_file=None):
if is_quoted(token):
return
index = token.find('>')
if index == -1:
return
source = 1
if index:
try:
source = int(token[index - 1])
except ValueError:
pass
mode = 'w'
destination = None
try:
next_index = index + 1
if token[next_index] == '&':
destination = int(token[next_index:])
elif token[next_index] == '>':
mode = 'a'
destination = output_file
except (IndexError, ValueError):
pass
if not destination:
return
if hasattr(destination, 'lstrip'):
destination = destination.lstrip()
r = Redirect(source, destination, mode=mode)
redirects = list(self.redirects)
redirects.append(r)
self.redirects = tuple(redirects)
def _process_arguments(self, arguments):
def sanitize_value(value):
if not hasattr(value, 'isalpha'):
return value
if is_quoted(value):
value = value[1:-1]
return value
def get_value(next_token):
if (hasattr(next_token, 'startswith') and
not next_token.startswith('-')):
return sanitize_value(next_token)
return True
options = {}
for index, token in enumerate(arguments):
try:
next_token = arguments[index + 1]
except IndexError:
next_token = None
if not token.startswith('-'):
self._register_redirect(token, output_file=next_token)
continue
if token.startswith('--'):
key, _, value = token.partition('=')
if value:
value = sanitize_value(value)
else:
value = get_value(next_token)
options.setdefault(key, []).append(value)
else:
keys = list(token[1:])
for key in keys:
value = get_value(next_token)
options.setdefault('-' + key, []).append(value)
self._options = options
def debug(message, level=1, exit=False):
if DEBUG >= level:
print message
def debug_section(key, value, level=1):
debug("""
%(key)s:
%(value)s
""" % dict(key=key.upper(), value=value))
def split_token_by_operators(token):
"""Split the given ``token`` by all containing :attr:`CONTROL_OPERATORS`.
Each unquoted token longer than a single character is required to do this
during tokenization of a command. Otherwise, commands which are not
properly spaced will be treated incorrectly. As illustrated below:
>>> import shlex
>>> import oyster
>>> cmd = 'cd /some/path;ls'
>>> tokens = shlex.split(cmd, posix=True)
>>> tokens
['cd', '/some/path;ls']
>>> processed = oyster.split_token_by_operators(tokens[1])
>>> processed
['/some/path', ';', 'ls']
>>> tokens = [tokens[0]]
>>> tokens.extend(processed)
>>> tokens
['cd', '/some/path', ';', 'ls']
:param token: The token to check for control operators
"""
if len(token) <= 1 or is_quoted(token):
return [token]
tokens = []
characters = []
consume_next = False
previous_character = None
for index, character in enumerate(token):
if consume_next:
consume_next = False
previous_character = character
continue
try:
next_character = token[index + 1]
except IndexError:
next_character = ''
is_escaped = (character == '\\' and
previous_character != '\\' and
next_character != '\\')
if is_escaped:
characters.append(character)
characters.append(next_character)
consume_next = True
continue
found = False
for operator in CONTROL_OPERATORS:
if operator == character:
found = True
break
if operator == character + next_character:
found = True
consume_next = True
break
previous_character = character
if found:
tokens.append(''.join(characters))
tokens.append(operator)
characters = []
else:
characters.append(character)
if characters:
tokens.append(''.join(characters))
return tokens
def tokenize(string):
"""Tokenize given ``string`` and return a list containing all the tokens.
The workhorse behind this function is the ``shlex`` module. However, tokens
found via ``shlex`` are processed to ensure we handle command substitutions
along with chained commands properly.
:paramter string: The command - as a string - to tokenize
"""
processed = []
lex = shlex.shlex(string, posix=True)
lex.whitespace_split = True
lex.commenters = ''
in_substitution = False
substitution_closer = None
substitution_tokens = []
while True:
token = lex.get_token()
title = '[TOKEN | IN SUBSTITUTION]' if in_substitution else '[TOKEN]'
debug_section(title, token, level=2)
if token is None:
debug('- Abort. Empty token', level=2)
break
if in_substitution:
substitution_tokens.append(token)
if token.endswith(substitution_closer):
debug('- Command substitution closed.')
processed.append(''.join(substitution_tokens))
substitution_tokens = []
in_substitution = False
continue
if token.startswith('$('):
debug('- Command substitution detected using $(', level=2)
in_substitution = True
substitution_closer = ')'
substitution_tokens.append(token)
continue
if token.startswith('`'):
debug('- Command substitution detected using `', level=2)
in_substitution = True
substitution_closer = '`'
substitution_tokens.append(token)
continue
# Handle the case of: cd /some/path&&ls
processed.extend(split_token_by_operators(token))
if substitution_tokens:
processed.append(''.join(substitution_tokens))
return processed
def is_comment(string):
"""Check whether given string is considered to be a comment.
:param string: The string, i.e command, to check
"""
return string.lstrip()[0] == '#'
def is_script(string):
"""Check whether given string is considered to be a script.
This function oversimplifies what a shell script is, but covers
the necessary basics for this module.
:param string: The string, i.e command, to check
"""
is_script = False
string = string.lstrip()
for reserved in RESERVED_WORDS:
if string.startswith(reserved):
is_script = True
break
return is_script
def is_quoted(string):
"""Check whether given string is quoted.
:param string: The string, i.e command, to check
"""
string = string.lstrip()
return ((string.startswith('"') and string.endswith('"')) or
(string.startswith("'") and string.endswith("'")))
def is_command(string, tokens=None):
"""Check whether given string is considered to be a command.
:param string: The string, i.e command, to check
"""
if not string:
return False
if is_comment(string):
return False
if is_quoted(string):
return False
if is_script(string):
return False
return True
def parse(string):
"""Parse given ``string`` into a :class:`Chain` of :class:`Command` s.
>>> import oyster
>>> cmd = 'pip search -vvv --timeout=5 flask | grep session | less'
>>> chain = oyster.parse(cmd)
>>> len(chain)
3
>>> pip, grep, less = chain
>>> pip.has_option('--timeout')
True
>>> pip.get_option_values('--timeout')
['5']
>>> pip.get_option_count('-v')
3
>>> pip.arguments
('search', '--timeout=5', 'flask')
>>> str(grep)
'grep session'
>>> str(less)
'less'
>>> chain.remove('less')
>>> str(chain)
'pip search -vvv --timeout=5 flask | grep session'
>>> chain += 'date -u'
>>> str(chain)
'pip search -vvv --timeout=5 flask | grep session; date -u'
>>> utc_date = chain[chain.index('date -u')]
>>> str(utc_date)
'date -u'
>>> utc_date.get_option_values('-u')
[True]
:param string: The string, i.e command, to parse
"""
try:
chain = Chain()
string = string.strip()
if DEBUG:
print '**********************************************************'
debug_section('String to parse', string)
if not (string or hasattr(string, 'isalpha')):
debug_section('Abort', 'Given command is not a string')
return chain
tokens = tokenize(string)
debug_section('Tokens', tokens)
if not is_command(string, tokens):
debug_section('Abort', 'Given string was not a command')
return chain
chained_by = None
command_tokens = []
to_parse = tokens + [';']
for index, token in enumerate(to_parse):
if token not in CONTROL_OPERATOR_LOOKUP:
command_tokens.append(token)
continue
if is_script(command_tokens[0]):
# Abort entire chain if script is detected
chain = Chain()
debug_section('Abort', 'Script detected')
break
command = Command(command_tokens)
chain.append(command, chained_by=chained_by)
debug_section('Command chained (%s)' % chained_by, command)
chained_by = token
command_tokens = []
except Exception as e:
debug_section('Exception thrown', e)
raise
return chain
| mit | -4,775,107,700,802,405,000 | 32.164848 | 87 | 0.588173 | false |
altugkarakurt/morty | morty/classifiers/knnclassifier.py | 1 | 17794 | # -*- coding: utf-8 -*-
import numpy as np
import pickle
import json
import copy
from .inputparser import InputParser
from .knn import KNN
from ..converter import Converter
from ..pitchdistribution import PitchDistribution
class KNNClassifier(InputParser):
def __init__(self, step_size=7.5, kernel_width=15.0, feature_type='pcd',
model=None):
"""--------------------------------------------------------------------
These attributes are wrapped as an object since these are used in both
training and estimation stages and must be consistent in both processes
-----------------------------------------------------------------------
step_size : Step size of the distribution bins
kernel_width : Standart deviation of the gaussian kernel used to
smoothen the distributions. For further details,
see generate_pd() of ModeFunctions.
feature_type : The feature type to be used in training and testing
("pd" for pitch distribution, "pcd" for pitch
class distribution)
model : Pre-trained model
--------------------------------------------------------------------"""
super(KNNClassifier, self).__init__(
step_size=step_size, kernel_width=kernel_width,
feature_type=feature_type, model=model)
def train(self, pitches, tonics, modes, sources=None, model_type='multi'):
if model_type == 'single':
return self._train_single_distrib_per_mode(
pitches, tonics, modes, sources=sources)
elif model_type == 'multi':
return self._train_multi_distrib_per_mode(
pitches, tonics, modes, sources=sources)
else:
raise ValueError("Unknown training model")
def _train_single_distrib_per_mode(self, pitches, tonics, modes,
sources=None):
"""--------------------------------------------------------------------
For the mode trainings, the requirements are a set of recordings with
annotated tonics for each mode under consideration. This function only
expects the recordings' pitch tracks and corresponding tonics as lists.
The two lists should be indexed in parallel, so the tonic of ith pitch
track in the pitch track list should be the ith element of tonic list.
Once training is completed for a mode, the model would be generated
as a PitchDistribution object and saved in a JSON file. For loading
these objects and other relevant information about the data structure,
see the PitchDistribution class.
-----------------------------------------------------------------------
pitches : List of pitch tracks or the list of files with
stored pitch tracks (i.e. single-column
lists/numpy arrays/files with frequencies)
tonics : List of annotated tonic frequencies of recordings
modes : Name of the modes of each training sample.
--------------------------------------------------------------------"""
assert len(pitches) == len(modes) == len(tonics), \
'The inputs should have the same length!'
# get the pitch tracks for each mode and convert them to cent unit
tmp_model = {m: {'sources': [], 'cent_pitch': []} for m in set(modes)}
for p, t, m, s in zip(pitches, tonics, modes, sources):
# parse the pitch track from txt file, list or numpy array and
# normalize with respect to annotated tonic
pitch_cent = self._parse_pitch_input(p, t)
# convert to cent track and append to the mode data
tmp_model[m]['cent_pitch'].extend(pitch_cent)
tmp_model[m]['sources'].append(s)
# compute the feature for each model from the normalized pitch tracks
for data_point in tmp_model.values():
data_point['feature'] = PitchDistribution.from_cent_pitch(
data_point.pop('cent_pitch', None),
kernel_width=self.kernel_width, step_size=self.step_size)
# convert to pitch-class distribution if requested
if self.feature_type == 'pcd':
data_point['feature'].to_pcd()
# make the model a list of dictionaries by collapsing the mode keys
# inside the values
model = []
for mode_name, data_point in tmp_model.items():
data_point['mode'] = mode_name
model.append(data_point)
self.model = model
def _train_multi_distrib_per_mode(self, pitches, tonics, modes,
sources=None):
"""--------------------------------------------------------------------
For the mode trainings, the requirements are a set of recordings with
annotated tonics for each mode under consideration. This function only
expects the recordings' pitch tracks and corresponding tonics as lists.
The two lists should be indexed in parallel, so the tonic of ith pitch
track in the pitch track list should be the ith element of tonic list.
Each pitch track would be sliced into chunks of size chunk_size and
their pitch distributions are generated. Then, each of such chunk
distributions are appended to a list. This list represents the mode
by sample points as much as the number of chunks. So, the result is
a list of PitchDistribution objects, i.e. list of structured
dictionaries and this is what is saved.
-----------------------------------------------------------------------
mode_name : Name of the mode to be trained. This is only used for
naming the resultant JSON file, in the form
"mode_name.json"
pitch_files : List of pitch tracks (i.e. 1-D list of frequencies)
tonic_freqs : List of annotated tonics of recordings
feature : Whether the model should be octave wrapped (Pitch Class
Distribution: PCD) or not (Pitch Distribution: PD)
save_dir : Where to save the resultant JSON files.
--------------------------------------------------------------------"""
assert len(pitches) == len(modes) == len(tonics), \
'The inputs should have the same length!'
# get the pitch tracks for each mode and convert them to cent unit
model = []
for p, t, m, s in zip(pitches, tonics, modes, sources):
# parse the pitch track from txt file, list or numpy array and
# normalize with respect to annotated tonic
pitch_cent = self._parse_pitch_input(p, t)
feature = PitchDistribution.from_cent_pitch(
pitch_cent, kernel_width=self.kernel_width,
step_size=self.step_size)
# convert to pitch-class distribution if requested
if self.feature_type == 'pcd':
feature.to_pcd()
data_point = {'source': s, 'tonic': t, 'mode': m,
'feature': feature}
# convert to cent track and append to the mode data
model.append(data_point)
self.model = model
def identify_tonic(self, test_input, mode, min_peak_ratio=0.1,
distance_method='bhat', k_neighbor=15, rank=1):
"""--------------------------------------------------------------------
Tonic Identification: The mode of the recording is known and the
tonic is to be estimated.
:param test_input: - precomputed feature (PD or PCD in Hz)
- pitch track in Hz (list or numpy array)
:param mode: input mode label
:param min_peak_ratio: The minimum ratio between the max peak value and
the value of a detected peak
:param distance_method: distance used in KNN
:param k_neighbor: number of neighbors to select in KNN classification
:param rank: number of estimations to return
:return: ranked mode estimations
--------------------------------------------------------------------"""
test_feature = self._parse_tonic_and_joint_estimate_input(test_input)
# Tonic Estimation
estimations = self._estimate(
test_feature, est_tonic=True, mode=mode,
min_peak_ratio=min_peak_ratio, distance_method=distance_method,
k_neighbor=k_neighbor, rank=rank)
# remove the dummy tonic estimation
tonics_ranked = [(e[0][0], e[1]) for e in estimations]
return tonics_ranked
def estimate_tonic(self, test_input, mode, min_peak_ratio=0.1,
distance_method='bhat', k_neighbor=1, rank=1):
"""
Alias of "identify_tonic" method. See the documentation of
"identify_tonic" for more information.
"""
return self.identify_tonic(
test_input, mode, min_peak_ratio=min_peak_ratio,
distance_method=distance_method, k_neighbor=k_neighbor, rank=rank)
def recognize_mode(self, feature_in, tonic=None, distance_method='bhat',
k_neighbor=15, rank=1):
"""--------------------------------------------------------------------
Mode recognition: The tonic of the recording is known and the mode is
to be estimated.
:param feature_in: - precomputed feature (PitchDistribution object)
- pitch track (list or numpy array)
:param tonic: tonic frequency (float). It is needed if the feature_in
has not been normalized with respect to the tonic earlier
:param distance_method: distance used in KNN
:param k_neighbor: number of neighbors to select in KNN classification
:param rank: number of estimations to return
:return: ranked mode estimations
--------------------------------------------------------------------"""
test_feature = self._parse_mode_estimate_input(feature_in, tonic)
# Mode Estimation
estimations = self._estimate(
test_feature, est_tonic=False, mode=None,
distance_method=distance_method, k_neighbor=k_neighbor, rank=rank)
# remove the dummy tonic estimation
modes_ranked = [(e[0][1], e[1]) for e in estimations]
return modes_ranked
def estimate_mode(self, feature_in, tonic=None, distance_method='bhat',
k_neighbor=15, rank=1):
return self.recognize_mode(
feature_in, tonic=tonic, distance_method=distance_method,
k_neighbor=k_neighbor, rank=rank)
def estimate_joint(self, test_input, min_peak_ratio=0.1,
distance_method='bhat', k_neighbor=15, rank=1):
"""--------------------------------------------------------------------
Joint estimation: Estimate both the tonic and mode together
:param test_input: - precomputed feature (PD or PCD in Hz)
- pitch track in Hz (list or numpy array)
:param min_peak_ratio: The minimum ratio between the max peak value and
the value of a detected peak
:param distance_method: distance used in KNN
:param k_neighbor: number of neighbors to select in KNN classification
:param rank: number of estimations to return
:return: ranked mode and tonic estimations
--------------------------------------------------------------------"""
test_feature = self._parse_tonic_and_joint_estimate_input(test_input)
# Mode Estimation
joint_estimations = self._estimate(
test_feature, est_tonic=True, mode=None,
min_peak_ratio=min_peak_ratio, distance_method=distance_method,
k_neighbor=k_neighbor, rank=rank)
return joint_estimations
def _estimate(self, test_feature, mode=None, est_tonic=True,
min_peak_ratio=0.1, distance_method='bhat', k_neighbor=15,
rank=1):
assert est_tonic or mode is None, 'Nothing to estimate.'
if est_tonic is True:
# find the tonic candidates of the input feature
test_feature, tonic_cands, peak_idx = self._get_tonic_candidates(
test_feature, min_peak_ratio=min_peak_ratio)
else:
# dummy assign the first index
tonic_cands = np.array([test_feature.ref_freq])
peak_idx = np.array([0])
training_features, training_modes = self._get_training_model(mode)
dist_mat = KNN.generate_distance_matrix(
test_feature, peak_idx, training_features,
distance_method=distance_method)
# sort results
sorted_idx = np.argsort(dist_mat, axis=None)
sorted_dists = np.sort(dist_mat, axis=None)
sorted_tonic_cand_idx, sorted_mode_idx = np.unravel_index(
sorted_idx, dist_mat.shape)
# convert from sorted index to sorted tonic frequency and mode
sorted_tonics = tonic_cands[sorted_tonic_cand_idx]
sorted_modes = training_modes[sorted_mode_idx]
sorted_pairs = [((t, m), d) for t, m, d in
zip(sorted_tonics, sorted_modes, sorted_dists)]
# there might be enough options to get estimations up to the
# requested rank. Max is the number of unique sortd pairs
max_rank = len(set(sp[0] for sp in sorted_pairs))
# compute ranked estimations
ranked_pairs = []
for r in range(min(rank, max_rank)):
cand_pairs = KNN.get_nearest_neighbors(sorted_pairs, k_neighbor)
estimation, sorted_pairs = KNN.classify(cand_pairs, sorted_pairs)
ranked_pairs.append(estimation)
return ranked_pairs
@staticmethod
def _get_tonic_candidates(test_feature, min_peak_ratio=0.1):
# find the global minima and shift the distribution there so
# peak detection does not fail locate a peak in the boundary in
# octave-wrapped features. For features that are not
# octave-wrapped this step is harmless.
shift_feature = copy.deepcopy(test_feature)
global_minima_idx = np.argmin(shift_feature.vals)
shift_feature.shift(global_minima_idx)
# get the peaks of the feature as the tonic candidate indices and
# compute the stable frequencies from the peak indices
peak_idx = shift_feature.detect_peaks(min_peak_ratio=min_peak_ratio)[0]
peaks_cent = shift_feature.bins[peak_idx]
freqs = Converter.cent_to_hz(peaks_cent, shift_feature.ref_freq)
# return the shifted feature, stable frequencies and their
# corresponding index in the shifted feature
return shift_feature, freqs, peak_idx
def _get_training_model(self, mode):
if mode is None:
training_features = [m['feature'] for m in self.model]
feature_modes = np.array([m['mode'] for m in self.model])
else:
training_features = [m['feature'] for m in self.model
if m['mode'] == mode]
# create dummy array with annotated mode
feature_modes = np.array(
[mode for _ in range(len(training_features))])
return training_features, feature_modes
def model_from_pickle(self, input_str):
try: # file given
self.model = pickle.load(open(input_str, 'rb'))
except IOError: # string given
self.model = pickle.loads(input_str, 'rb')
@staticmethod
def model_to_pickle(model, file_name=None):
if file_name is None:
return pickle.dumps(model)
else:
pickle.dump(model, open(file_name, 'wb'))
def model_from_json(self, file_name):
"""--------------------------------------------------------------------
Loads a the training model from JSON file.
-----------------------------------------------------------------------
file_name : The filename of the JSON file
--------------------------------------------------------------------
"""
try:
temp_model = json.load(open(file_name, 'r'))
except IOError: # json string
temp_model = json.loads(file_name)
for tm in temp_model:
tm['feature'] = tm['feature'] if isinstance(tm['feature'], dict) \
else tm['feature'][0]
tm['feature'] = PitchDistribution.from_dict(tm['feature'])
self.model = temp_model
@staticmethod
def model_to_json(model, file_name=None):
"""--------------------------------------------------------------------
Saves the training model to a JSON file.
-----------------------------------------------------------------------
model : Training model
file_name : The file path of the JSON file to be created. None to
return a json string
--------------------------------------------------------------------"""
temp_model = copy.deepcopy(model)
for tm in temp_model:
try:
tm['feature'] = tm['feature'].to_dict()
except AttributeError: # already a dict
assert isinstance(tm['feature'], dict), \
'The feature should have been a dict'
if file_name is None:
return json.dumps(temp_model, indent=4)
else:
json.dump(temp_model, open(file_name, 'w'), indent=4)
| agpl-3.0 | -8,946,108,269,907,752,000 | 47.485014 | 79 | 0.553951 | false |
kinshuk4/MoocX | misc/deep_learning_notes/Proj_Centroid_Loss_LeNet/convnet_2_deep/MNIST_train.py | 1 | 3905 | import os, sys, numpy as np, tensorflow as tf
from pathlib import Path
from termcolor import colored as c, cprint
sys.path.append(str(Path(__file__).resolve().parents[1]))
import convnet_2_deep
__package__ = 'convnet_2_deep'
from . import network
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
BATCH_SIZE = 64
FILENAME = os.path.basename(__file__)
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SUMMARIES_DIR = SCRIPT_DIR
SAVE_PATH = SCRIPT_DIR + "/network.ckpt"
### configure devices for this eval script.
USE_DEVICE = '/gpu:2'
session_config = tf.ConfigProto(log_device_placement=True)
session_config.gpu_options.allow_growth = True
# this is required if want to use GPU as device.
# see: https://github.com/tensorflow/tensorflow/issues/2292
session_config.allow_soft_placement = True
if __name__ == "__main__":
with tf.Graph().as_default() as g, tf.device(USE_DEVICE):
# inference()
input, deep_feature = network.inference()
labels, logits, loss_op = network.loss(deep_feature)
train, global_step = network.training(loss_op, 1)
eval = network.evaluation(logits, labels)
init = tf.initialize_all_variables()
with tf.Session(config=session_config) as sess:
# Merge all the summaries and write them out to /tmp/mnist_logs (by default)
# to see the tensor graph, fire up the tensorboard with --logdir="./train"
all_summary = tf.merge_all_summaries()
train_writer = tf.train.SummaryWriter(SUMMARIES_DIR + '/summaries/train', sess.graph)
test_writer = tf.train.SummaryWriter(SUMMARIES_DIR + '/summaries/test')
saver = tf.train.Saver()
# try:
# saver.restore(sess, SAVE_PATH)
# except ValueError:
# print('checkpoint file not found. Moving on to initializing automatically.')
# sess.run(init)
sess.run(init)
for i in range(500000):
batch_xs, batch_labels = mnist.train.next_batch(BATCH_SIZE)
if i % 100 == 0:
summaries, step, logits_output, loss_value, accuracy = \
sess.run(
[all_summary, global_step, logits, loss_op, eval],
feed_dict={
input: mnist.test.images,
labels: mnist.test.labels
})
test_writer.add_summary(summaries, global_step=step)
cprint(
c("#" + str(i), 'grey') +
c(" training accuracy", 'green') + " is " +
c(accuracy, 'red') + ", " +
c("loss", 'green') + " is " +
c(loss_value, 'red')
)
print('logits => ', logits_output[0])
if i % 500 == 0:
saver.save(sess, SAVE_PATH)
print('=> saved network in checkfile.')
summaries, step, _ = sess.run([all_summary, global_step, train], feed_dict={
input: batch_xs,
labels: batch_labels
})
train_writer.add_summary(summaries, global_step=step)
# now let's test!
TEST_BATCH_SIZE = np.shape(mnist.test.labels)[0]
summaries, step, logits_output, loss_value, accuracy = \
sess.run(
[all_summary, global_step, logits, loss_op, eval], feed_dict={
input: mnist.test.images,
labels: mnist.test.labels
})
test_writer.add_summary(summaries, global_step=step)
print("MNIST Test accuracy is ", accuracy)
| mit | -591,876,572,340,954,200 | 40.105263 | 97 | 0.540845 | false |
pyGBot/pyGBot | pyGBot/Plugins/system/CommandSpec/Restart.py | 1 | 1082 | ##
## pyGBot - Versatile IRC Bot
## Copyright (C) 2008 Morgan Lokhorst-Blight, Alex Soborov
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from pyGBot import log
from pyGBot.Plugins.system.Commands import BaseCommand
from pyGBot.Plugins.system.Auth import AuthLevels as AL
class Restart(BaseCommand):
level = AL.Admin
def __init__(self, bot, channel, user, args):
log.logger.info("Shutdown requested")
bot.restart() | gpl-3.0 | -3,236,118,717,411,400,000 | 39.111111 | 75 | 0.711645 | false |
Ronnasayd/Ifuzzy2py | test_gauss.py | 1 | 2012 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright 2017 Ronnasayd Machado <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from IMfuzzy2 import IMfuzzy2
from Mfunction import Mfunction
from Antecedent import Antecedent
from Consequent import Consequent
from Rule import Rule
from Rules import Rules
from Input import Input
from Output import Output
from Inputs import Inputs
from Outputs import Outputs
from Ifuzzy2 import Ifuzzy2
TRAP = 1
TRIA = 2
GAUS = 3
rules = Rules()
upperBI = Mfunction(GAUS, 2, 0.8)
lowerBI = Mfunction(GAUS, 2, 0.2)
BaixaI = IMfuzzy2(lowerBI, upperBI)
upperAI = Mfunction(GAUS, 3, 0.8)
lowerAI = Mfunction(GAUS, 3, 0.2)
AltaI = IMfuzzy2(lowerAI, upperAI)
upperBO = Mfunction(GAUS, 2, 0.8)
lowerBO = Mfunction(GAUS, 2, 0.2)
BaixaO = IMfuzzy2(lowerBO, upperBO)
upperAO = Mfunction(GAUS, 3, 0.8)
lowerAO = Mfunction(GAUS, 3, 0.2)
AltaO = IMfuzzy2(lowerAO, upperAO)
I = Input(0, 5)
I.addMf(BaixaI)
I.addMf(AltaI)
O = Output(0, 5)
O.addMf(BaixaO)
O.addMf(AltaO)
inputs = Inputs()
inputs.addInput(I)
outputs = Outputs()
outputs.addOutput(O)
ant = Antecedent()
ant.addMf(BaixaI)
cont = Consequent()
cont.addMf(AltaO)
rule = Rule(ant, cont)
rules.addRule(rule)
ant = Antecedent()
ant.addMf(AltaI)
cont = Consequent()
cont.addMf(BaixaO)
rule = Rule(ant, cont)
rules.addRule(rule)
fuzzy = Ifuzzy2(inputs, outputs, rules,99)
for x in range(0, 6):
fuzzy.fuzzyfy([x])
y = fuzzy.defuzzyfy(1)
[yl, yr] = fuzzy.getReducedFuzzy(1)
print (yl,":", yr,":", y)
| apache-2.0 | -5,231,015,777,263,714,000 | 21.606742 | 72 | 0.73161 | false |
edwardbadboy/vdsm-ubuntu | vdsm/API.py | 1 | 56320 | #
# Copyright (C) 2012 Adam Litke, IBM Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
# pylint: disable=R0904
import os
import signal
import copy
import subprocess
import pickle
import time
import threading
import logging
from vdsm import utils
from clientIF import clientIF
import configNetwork
from netmodels import Bond
from netmodels import Vlan
from vdsm import netinfo
from vdsm import constants
import storage.misc
import storage.clusterlock
import storage.volume
import storage.sd
import storage.image
import vm
from vdsm.define import doneCode, errCode, Kbytes, Mbytes
import caps
from vdsm.config import config
import ksm
import supervdsm
# default message for system shutdown, will be displayed in guest
USER_SHUTDOWN_MESSAGE = 'System going down'
PAGE_SIZE_BYTES = os.sysconf('SC_PAGESIZE')
class APIBase(object):
ctorArgs = []
def __init__(self):
self._cif = clientIF.getInstance()
self._irs = self._cif.irs
self.log = self._cif.log
class ConnectionRefs(APIBase):
ctorArgs = []
def __init__(self):
APIBase.__init__(self)
def acquire(self, conRefArgs):
return self._irs.storageServer_ConnectionRefs_acquire(conRefArgs)
def release(self, refIDs):
return self._irs.storageServer_ConnectionRefs_release(refIDs)
def statuses(self):
return self._irs.storageServer_ConnectionRefs_statuses()
class Task(APIBase):
ctorArgs = ['taskID']
def __init__(self, UUID):
APIBase.__init__(self)
self._UUID = UUID
def clear(self):
return self._irs.clearTask(self._UUID)
def getInfo(self):
return self._irs.getTaskInfo(self._UUID)
def getStatus(self):
return self._irs.getTaskStatus(self._UUID)
def revert(self):
return self._irs.revertTask(self._UUID)
def stop(self):
return self._irs.stopTask(self._UUID)
class VM(APIBase):
BLANK_UUID = '00000000-0000-0000-0000-000000000000'
ctorArgs = ['vmID']
def __init__(self, UUID):
APIBase.__init__(self)
self._UUID = UUID
def changeCD(self, driveSpec):
"""
Change the CD in the specified VM.
:param vmId: uuid of specific VM.
:type vmId: UUID
:param driveSpec: specification of the new CD image. Either an
image path or a `storage`-centric quartet.
"""
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.changeCD(driveSpec)
def changeFloppy(self, driveSpec):
"""
Change the floppy disk in the specified VM.
:param vmId: uuid of specific VM.
:type vmId: UUID
:param driveSpec: specification of the new CD image. Either an
image path or a `storage`-centric quartet.
"""
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.changeFloppy(driveSpec)
def cont(self):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.cont()
def create(self, vmParams):
"""
Start up a virtual machine.
:param vmParams: required and optional VM parameters.
:type vmParams: dict
"""
vmParams['vmId'] = self._UUID
try:
if vmParams.get('vmId') in self._cif.vmContainer:
self.log.warning('vm %s already exists' % vmParams['vmId'])
return errCode['exist']
if 'hiberVolHandle' in vmParams:
vmParams['restoreState'], paramFilespec = \
self._getHibernationPaths(vmParams.pop('hiberVolHandle'))
try: # restore saved vm parameters
# NOTE: pickled params override command-line params. this
# might cause problems if an upgrade took place since the
# parmas were stored.
fname = self._cif.prepareVolumePath(paramFilespec)
try:
with file(fname) as f:
pickledMachineParams = pickle.load(f)
if type(pickledMachineParams) == dict:
self.log.debug('loaded pickledMachineParams ' +
str(pickledMachineParams))
self.log.debug('former conf ' + str(vmParams))
vmParams.update(pickledMachineParams)
finally:
self._cif.teardownVolumePath(paramFilespec)
except:
self.log.error("Error restoring VM parameters",
exc_info=True)
requiredParams = ['vmId', 'memSize', 'display']
for param in requiredParams:
if param not in vmParams:
self.log.error('Missing required parameter %s' % (param))
return {'status': {'code': errCode['MissParam']
['status']['code'],
'message': 'Missing required '
'parameter %s' % (param)}}
try:
storage.misc.validateUUID(vmParams['vmId'])
except:
return {'status': {'code': errCode['MissParam']
['status']['code'],
'message': 'vmId must be a valid UUID'}}
if vmParams['memSize'] == 0:
return {'status': {'code': errCode['MissParam']
['status']['code'],
'message': 'Must specify nonzero memSize'}}
if vmParams.get('boot') == 'c' and not 'hda' in vmParams \
and not vmParams.get('drives'):
return {'status': {'code': errCode['MissParam']
['status']['code'],
'message': 'missing boot disk'}}
if 'vmType' not in vmParams:
vmParams['vmType'] = 'kvm'
elif vmParams['vmType'] == 'kvm':
if 'kvmEnable' not in vmParams:
vmParams['kvmEnable'] = 'true'
if 'sysprepInf' in vmParams:
if not vmParams.get('floppy'):
vmParams['floppy'] = '%s%s.vfd' % (constants.P_VDSM_RUN,
vmParams['vmId'])
vmParams['volatileFloppy'] = True
if caps.osversion()['name'] == caps.OSName.UNKNOWN:
return {'status': {'code': errCode['createErr']
['status']['code'],
'message': 'Unknown host operating system'}}
if 'sysprepInf' in vmParams:
if not self._createSysprepFloppyFromInf(vmParams['sysprepInf'],
vmParams['floppy']):
return {'status': {'code': errCode['createErr']
['status']['code'],
'message': 'Failed to create '
'sysprep floppy image. '
'No space on /tmp?'}}
return errCode['createErr']
if vmParams.get('display') not in ('vnc', 'qxl', 'qxlnc'):
return {'status': {'code': errCode['createErr']
['status']['code'],
'message': 'Unknown display type %s' %
vmParams.get('display')}}
if 'nicModel' not in vmParams:
vmParams['nicModel'] = config.get('vars', 'nic_model')
vmParams['displayIp'] = self._getNetworkIp(vmParams.get(
'displayNetwork'))
vmParams['displayPort'] = '-1' # selected by libvirt
vmParams['displaySecurePort'] = '-1'
return self._cif.createVm(vmParams)
except OSError as e:
self.log.debug("OS Error creating VM", exc_info=True)
return {'status': {'code': errCode['createErr']['status']['code'],
'message': 'Failed to create VM. '
'No space on /tmp? ' + e.message}}
except:
self.log.debug("Error creating VM", exc_info=True)
return errCode['unexpected']
def desktopLock(self):
"""
Lock user session in guest operating system using guest agent.
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
v.guestAgent.desktopLock()
if v.guestAgent.isResponsive():
return {'status': doneCode}
else:
return errCode['nonresp']
def desktopLogin(self, domain, username, password):
"""
Log into guest operating system using guest agent.
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
v.guestAgent.desktopLogin(domain, username, password)
if v.guestAgent.isResponsive():
return {'status': doneCode}
else:
return errCode['nonresp']
def desktopLogoff(self, force):
"""
Log out of guest operating system using guest agent.
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
v.guestAgent.desktopLogoff(force)
if v.guestAgent.isResponsive():
return {'status': doneCode}
else:
return errCode['nonresp']
def desktopSendHcCommand(self, message):
"""
Send a command to the guest agent (depricated).
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
v.guestAgent.sendHcCmdToDesktop(message)
if v.guestAgent.isResponsive():
return {'status': doneCode}
else:
return errCode['nonresp']
def destroy(self):
"""
Destroy the specified VM.
"""
self._cif.vmContainerLock.acquire()
self.log.info("vmContainerLock acquired by vm %s", self._UUID)
try:
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
res = v.destroy()
status = copy.deepcopy(res)
if status['status']['code'] == 0:
status['status']['message'] = "Machine destroyed"
return status
finally:
self._cif.vmContainerLock.release()
def getMigrationStatus(self):
"""
Report status of a currently outgoing migration.
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
return v.migrateStatus()
def getStats(self):
"""
Obtain statistics of the specified VM
"""
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
stats = v.getStats().copy()
stats['vmId'] = self._UUID
return {'status': doneCode, 'statsList': [stats]}
def hibernate(self, hibernationVolHandle):
"""
Hibernate a VM.
:param hiberVolHandle: opaque string, indicating the location of
hibernation images.
"""
params = {'vmId': self._UUID, 'mode': 'file',
'hiberVolHandle': hibernationVolHandle}
response = self.migrate(params)
if not response['status']['code']:
response['status']['message'] = 'Hibernation process starting'
return response
def vmUpdateDevice(self, params):
if 'deviceType' not in params:
self.log.error('Missing a required parameters: deviceType')
return {'status': {'code': errCode['MissParam']['status']['code'],
'message': 'Missing one of required '
'parameters: deviceType'}}
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
self.log.warning("vm %s doesn't exist", self._UUID)
return errCode['noVM']
if params['deviceType'] == vm.NIC_DEVICES:
if 'alias' not in params:
self.log.error('Missing the required alias parameters.')
return {'status':
{'code': errCode['MissParam']['status']['code'],
'message': 'Missing the required alias parameter'}}
return v.updateDevice(params)
def hotplugNic(self, params):
try:
utils.validateMinimalKeySet(params, ('vmId', 'nic'))
except ValueError:
self.log.error('Missing one of required parameters: vmId, nic')
return {'status': {'code': errCode['MissParam']['status']['code'],
'message': 'Missing one of required '
'parameters: vmId, nic'}}
try:
curVm = self._cif.vmContainer[self._UUID]
except KeyError:
self.log.warning("vm %s doesn't exist", self._UUID)
return errCode['noVM']
return curVm.hotplugNic(params)
def hotunplugNic(self, params):
try:
utils.validateMinimalKeySet(params, ('vmId', 'nic'))
except ValueError:
self.log.error('Missing one of required parameters: vmId, nic')
return {'status': {'code': errCode['MissParam']['status']['code'],
'message': 'Missing one of required '
'parameters: vmId, nic'}}
try:
curVm = self._cif.vmContainer[self._UUID]
except KeyError:
self.log.warning("vm %s doesn't exist", self._UUID)
return errCode['noVM']
return curVm.hotunplugNic(params)
def hotplugDisk(self, params):
try:
utils.validateMinimalKeySet(params, ('vmId', 'drive'))
except ValueError:
self.log.error('Missing one of required parameters: vmId, drive')
return {'status': {'code': errCode['MissParam']['status']['code'],
'message': 'Missing one of required '
'parameters: vmId, drive'}}
try:
curVm = self._cif.vmContainer[self._UUID]
except KeyError:
self.log.warning("vm %s doesn't exist", self._UUID)
return errCode['noVM']
return curVm.hotplugDisk(params)
def hotunplugDisk(self, params):
try:
utils.validateMinimalKeySet(params, ('vmId', 'drive'))
except ValueError:
self.log.error('Missing one of required parameters: vmId, drive')
return {'status': {'code': errCode['MissParam']['status']['code'],
'message': 'Missing one of required '
'parameters: vmId, drive'}}
try:
curVm = self._cif.vmContainer[self._UUID]
except KeyError:
self.log.warning("vm %s doesn't exist", self._UUID)
return errCode['noVM']
return curVm.hotunplugDisk(params)
def migrate(self, params):
"""
Migrate a VM to a remote host.
:param params: a dictionary containing:
*dst* - remote host or hibernation image filename
*dstparams* - hibernation image filename for vdsm parameters
*mode* - ``remote``/``file``
*method* - ``online``
*downtime* - allowed down time during online migration
*dstqemu* - remote host address dedicated for migration
"""
params['vmId'] = self._UUID
self.log.debug(params)
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
vmParams = v.status()
if vmParams['status'] in ('WaitForLaunch', 'Down'):
return errCode['noVM']
if params.get('mode') == 'file':
if 'dst' not in params:
params['dst'], params['dstparams'] = \
self._getHibernationPaths(params['hiberVolHandle'])
else:
params['mode'] = 'remote'
return v.migrate(params)
def migrateCancel(self):
"""
Cancel a currently outgoing migration process.
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
return v.migrateCancel()
def migrationCreate(self, params):
"""
Start a migration-destination VM.
:param params: parameters of new VM, to be passed to
*:meth:* - `~clientIF.create`.
:type params: dict
"""
self.log.debug('Migration create')
params['vmId'] = self._UUID
response = self.create(params)
if response['status']['code']:
self.log.debug('Migration create - Failed')
return response
v = self._cif.vmContainer.get(self._UUID)
if not v.waitForMigrationDestinationPrepare():
return errCode['createErr']
self.log.debug('Destination VM creation succeeded')
return {'status': doneCode, 'migrationPort': 0,
'params': response['vmList']}
def monitorCommand(self, command):
"""
Send a monitor command to the specified VM and wait for the answer.
:param vmId: uuid of the specified VM
:type vmId: UUID
:param command: a single monitor command (without terminating newline)
:type command: string
"""
return errCode['noimpl']
def diskReplicateStart(self, srcDisk, dstDisk):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.diskReplicateStart(srcDisk, dstDisk)
def diskReplicateFinish(self, srcDisk, dstDisk):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.diskReplicateFinish(srcDisk, dstDisk)
def diskSizeExtend(self, driveSpecs, newSize):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.diskSizeExtend(driveSpecs, newSize)
def pause(self):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.pause()
def reset(self):
"""
Press the virtual reset button for the specified VM.
"""
return errCode['noimpl']
def sendKeys(self, keySequence):
"""
Send a string of keys to a guest's keyboard (OBSOLETE)
Used only by QA and might be discontinued in next version.
"""
return errCode['noimpl']
def setTicket(self, password, ttl, existingConnAction, params):
"""
Set the ticket (password) to be used to connect to a VM display
:param vmId: specify the VM whos ticket is to be changed.
:param password: new password
:type password: string
:param ttl: ticket lifetime (seconds)
:param existingConnAction: what to do with a currently-connected
client (SPICE only):
``disconnect`` - disconnect old client when a new client
connects.
``keep`` - allow existing client to remain
connected.
``fail`` - abort command without disconnecting
the current client.
:param additional parameters in dict format
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
return v.setTicket(password, ttl, existingConnAction, params)
def shutdown(self, delay=None, message=None):
"""
Shut a VM down politely.
:param message: message to be shown to guest user before shutting down
his machine.
:param delay: grace period (seconds) to let guest user close his
applications.
"""
try:
v = self._cif.vmContainer[self._UUID]
except KeyError:
return errCode['noVM']
if not delay:
delay = config.get('vars', 'user_shutdown_timeout')
if not message:
message = USER_SHUTDOWN_MESSAGE
return v.shutdown(delay, message)
def _createSysprepFloppyFromInf(self, infFileBinary, floppyImage):
try:
rc, out, err = utils.execCmd([constants.EXT_MK_SYSPREP_FLOPPY,
floppyImage],
sudo=True, data=infFileBinary.data)
if rc:
return False
else:
return True
except:
self.log.error("Error creating sysprep floppy", exc_info=True)
return False
def _getHibernationPaths(self, hiberVolHandle):
"""
Break *hiberVolHandle* into the "quartets" of hibernation images.
"""
domainID, poolID, stateImageID, stateVolumeID, \
paramImageID, paramVolumeID = hiberVolHandle.split(',')
return dict(domainID=domainID, poolID=poolID, imageID=stateImageID,
volumeID=stateVolumeID, device='disk'), \
dict(domainID=domainID, poolID=poolID,
imageID=paramImageID, volumeID=paramVolumeID,
device='disk')
def _getNetworkIp(self, network):
try:
nets = netinfo.networks()
device = nets[network].get('iface', network)
ip = netinfo.getaddr(device)
except:
ip = config.get('addresses', 'guests_gateway_ip')
if ip == '':
ip = '0'
self.log.info('network %s: using %s', network, ip)
return ip
def snapshot(self, snapDrives, snapMemVolHandle):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
memoryParams = {}
if snapMemVolHandle:
memoryParams['dst'], memoryParams['dstparams'] = \
self._getHibernationPaths(snapMemVolHandle)
return v.snapshot(snapDrives, memoryParams)
def merge(self, mergeDrives):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.merge(mergeDrives)
def mergeStatus(self):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.mergeStatus()
def setBalloonTarget(self, target):
v = self._cif.vmContainer.get(self._UUID)
if not v:
return errCode['noVM']
return v.setBalloonTarget(target)
def getDiskAlignment(self, drive):
if self._UUID != VM.BLANK_UUID:
return errCode['noimpl']
return self._cif.getDiskAlignment(drive)
class Volume(APIBase):
ctorArgs = ['volumeID', 'storagepoolID', 'storagedomainID', 'imageID']
class Types:
UNKNOWN = storage.volume.UNKNOWN_VOL
PREALLOCATED = storage.volume.PREALLOCATED_VOL
SPARSE = storage.volume.SPARSE_VOL
class Formats:
UNKNOWN = storage.volume.UNKNOWN_FORMAT
COW = storage.volume.COW_FORMAT
RAW = storage.volume.RAW_FORMAT
class Roles:
SHARED = storage.volume.SHARED_VOL
LEAF = storage.volume.LEAF_VOL
BLANK_UUID = storage.volume.BLANK_UUID
def __init__(self, UUID, spUUID, sdUUID, imgUUID):
APIBase.__init__(self)
self._UUID = UUID
self._spUUID = spUUID
self._sdUUID = sdUUID
self._imgUUID = imgUUID
def copy(self, dstSdUUID, dstImgUUID, dstVolUUID, desc, volType,
volFormat, preallocate, postZero, force):
vmUUID = '' # vmUUID is never used
return self._irs.copyImage(self._sdUUID, self._spUUID, vmUUID,
self._imgUUID, self._UUID, dstImgUUID,
dstVolUUID, desc, dstSdUUID, volType,
volFormat, preallocate, postZero, force)
def create(self, size, volFormat, preallocate, diskType, desc,
srcImgUUID, srcVolUUID):
return self._irs.createVolume(self._sdUUID, self._spUUID,
self._imgUUID, size, volFormat,
preallocate, diskType, self._UUID, desc,
srcImgUUID, srcVolUUID)
def delete(self, postZero, force):
return self._irs.deleteVolume(self._sdUUID, self._spUUID,
self._imgUUID, [self._UUID], postZero,
force)
def extendSize(self, newSize):
return self._irs.extendVolumeSize(
self._spUUID, self._sdUUID, self._imgUUID, self._UUID, newSize)
def updateSize(self, newSize):
return self._irs.updateVolumeSize(
self._spUUID, self._sdUUID, self._imgUUID, self._UUID, newSize)
def getInfo(self):
return self._irs.getVolumeInfo(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID)
def getPath(self):
return self._irs.getVolumePath(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID)
def getSize(self):
return self._irs.getVolumeSize(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID)
def setSize(self, newSize):
return self._irs.setVolumeSize(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID, newSize)
def prepare(self, rw):
return self._irs.prepareVolume(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID, rw)
def refresh(self):
return self._irs.refreshVolume(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID)
def setDescription(self, description):
return self._irs.setVolumeDescription(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID,
description)
def setLegality(self, legality):
return self._irs.setVolumeLegality(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID, legality)
def tearDown(self):
return self._irs.teardownVolume(self._sdUUID, self._spUUID,
self._imgUUID, self._UUID)
class Image(APIBase):
ctorArgs = ['imageID', 'storagepoolID', 'storagedomainID']
BLANK_UUID = storage.volume.BLANK_UUID
class DiskTypes:
UNKNOWN = storage.image.UNKNOWN_DISK_TYPE
SYSTEM = storage.image.SYSTEM_DISK_TYPE
DATA = storage.image.DATA_DISK_TYPE
SHARED = storage.image.SHARED_DISK_TYPE
SWAP = storage.image.SWAP_DISK_TYPE
TEMP = storage.image.TEMP_DISK_TYPE
def __init__(self, UUID, spUUID, sdUUID):
APIBase.__init__(self)
self._UUID = UUID
self._spUUID = spUUID
self._sdUUID = sdUUID
def delete(self, postZero, force):
return self._irs.deleteImage(self._sdUUID, self._spUUID, self._UUID,
postZero, force)
def deleteVolumes(self, volumeList, postZero=False, force=False):
return self._irs.deleteVolume(self._sdUUID, self._spUUID, self._UUID,
volumeList, postZero, force)
def getVolumes(self):
return self._irs.getVolumesList(self._sdUUID, self._spUUID, self._UUID)
def mergeSnapshots(self, ancestor, successor, postZero):
vmUUID = '' # Not used
# XXX: On success, self._sdUUID needs to be updated
return self._irs.mergeSnapshots(self._sdUUID, self._spUUID, vmUUID,
self._UUID, ancestor, successor,
postZero)
def move(self, dstSdUUID, operation, postZero, force):
vmUUID = '' # Not used
# XXX: On success, self._sdUUID needs to be updated
return self._irs.moveImage(self._spUUID, self._sdUUID, dstSdUUID,
self._UUID, vmUUID, operation, postZero,
force)
def cloneStructure(self, dstSdUUID):
return self._irs.cloneImageStructure(self._spUUID, self._sdUUID,
self._UUID, dstSdUUID)
def syncData(self, dstSdUUID, syncType):
return self._irs.syncImageData(self._spUUID, self._sdUUID, self._UUID,
dstSdUUID, syncType)
def upload(self, methodArgs, volUUID=None):
return self._irs.uploadImage(
methodArgs, self._spUUID, self._sdUUID, self._UUID, volUUID)
def download(self, methodArgs, volUUID=None):
return self._irs.downloadImage(
methodArgs, self._spUUID, self._sdUUID, self._UUID, volUUID)
class LVMVolumeGroup(APIBase):
ctorArgs = ['lvmvolumegroupID']
def __init__(self, UUID=None):
APIBase.__init__(self)
self._UUID = UUID
def create(self, name, devlist, force=False):
return self._irs.createVG(name, devlist, force)
def getInfo(self):
if self._UUID is not None:
return self._irs.getVGInfo(self._UUID)
else:
# FIXME: Add proper error return
return None
def remove(self):
if self._UUID is not None:
return self._irs.removeVG(self._UUID)
else:
# FIXME: Add proper error return
return None
class ISCSIConnection(APIBase):
ctorArgs = ['host', 'port', 'user', 'password']
def __init__(self, host, port, user="", password=""):
APIBase.__init__(self)
self._host = host
self._port = port
self._user = user
self._pass = password
def discoverSendTargets(self):
params = {'connection': self._host, 'port': self._port,
'user': self._user, 'password': self._pass}
return self._irs.discoverSendTargets(params)
class StorageDomain(APIBase):
ctorArgs = ['storagedomainID']
class Types:
UNKNOWN = storage.sd.UNKNOWN_DOMAIN
NFS = storage.sd.NFS_DOMAIN
FCP = storage.sd.FCP_DOMAIN
ISCSI = storage.sd.ISCSI_DOMAIN
LOCALFS = storage.sd.LOCALFS_DOMAIN
CIFS = storage.sd.CIFS_DOMAIN
POSIXFS = storage.sd.POSIXFS_DOMAIN
class Classes:
DATA = storage.sd.DATA_DOMAIN
ISO = storage.sd.ISO_DOMAIN
BACKUP = storage.sd.BACKUP_DOMAIN
BLANK_UUID = storage.sd.BLANK_UUID
def __init__(self, UUID):
APIBase.__init__(self)
self._UUID = UUID
def activate(self, spUUID):
return self._irs.activateStorageDomain(self._UUID, spUUID)
def attach(self, spUUID):
return self._irs.attachStorageDomain(self._UUID, spUUID)
def create(self, type, typeArgs, name, domainClass, version=None):
if version is None:
version = constants.SUPPORTED_DOMAIN_VERSIONS[0]
return self._irs.createStorageDomain(type, self._UUID, name, typeArgs,
domainClass, version)
def deactivate(self, spUUID, masterSdUUID, masterVersion):
return self._irs.deactivateStorageDomain(self._UUID, spUUID,
masterSdUUID, masterVersion)
def detach(self, spUUID, masterSdUUID, masterVersion, force):
if force:
return self._irs.forcedDetachStorageDomain(self._UUID, spUUID)
else:
return self._irs.detachStorageDomain(self._UUID, spUUID,
masterSdUUID, masterVersion)
def extend(self, spUUID, devlist, force=False):
return self._irs.extendStorageDomain(self._UUID, spUUID, devlist,
force)
def format(self, autoDetach):
return self._irs.formatStorageDomain(self._UUID, autoDetach)
def getFileList(self, pattern):
return self._irs.getFileList(self._UUID, pattern)
def getImages(self):
return self._irs.getImagesList(self._UUID)
def getInfo(self):
return self._irs.getStorageDomainInfo(self._UUID)
def getStats(self):
return self._irs.getStorageDomainStats(self._UUID)
def getVolumes(self, spUUID, imgUUID=Image.BLANK_UUID):
return self._irs.getVolumesList(self._UUID, spUUID, imgUUID)
def setDescription(self, description):
return self._irs.setStorageDomainDescription(self._UUID, description)
def uploadVolume(self, spUUID, imgUUID, volUUID, srcPath, size, method):
return self._irs.uploadVolume(self._UUID, spUUID, imgUUID, volUUID,
srcPath, size, method)
def validate(self):
return self._irs.validateStorageDomain(self._UUID)
class StoragePool(APIBase):
ctorArgs = ['storagepoolID']
def __init__(self, UUID):
APIBase.__init__(self)
self._UUID = UUID
def connect(self, hostID, scsiKey, masterSdUUID, masterVersion):
return self._irs.connectStoragePool(self._UUID, hostID, scsiKey,
masterSdUUID, masterVersion)
def connectStorageServer(self, domainType, connectionParams):
return self._irs.connectStorageServer(domainType, self._UUID,
connectionParams)
def create(self, name, masterSdUUID, masterVersion, domainList,
lockRenewalIntervalSec, leaseTimeSec, ioOpTimeoutSec,
leaseRetries):
poolType = None # Not used
lockPolicy = None # Not used
return self._irs.createStoragePool(
poolType, self._UUID, name, masterSdUUID, domainList,
masterVersion, lockPolicy, lockRenewalIntervalSec, leaseTimeSec,
ioOpTimeoutSec, leaseRetries)
def destroy(self, hostID, scsiKey):
return self._irs.destroyStoragePool(self._UUID, hostID, scsiKey)
def disconnect(self, hostID, scsiKey, remove):
return self._irs.disconnectStoragePool(self._UUID, hostID, scsiKey,
remove)
def disconnectStorageServer(self, domainType, connectionParams):
return self._irs.disconnectStorageServer(domainType, self._UUID,
connectionParams)
def fence(self):
lastOwner = None # Unused
lastLver = None # Unused
return self._irs.fenceSpmStorage(self._UUID, lastOwner, lastLver)
def getBackedUpVmsInfo(self, sdUUID, vmList):
return self._irs.getVmsInfo(self._UUID, sdUUID, vmList)
def getBackedUpVmsList(self, sdUUID):
return self._irs.getVmsList(self._UUID, sdUUID)
def getFloppyList(self):
return self._irs.getFloppyList(self._UUID)
def getDomainsContainingImage(self, imgUUID):
return self._irs.getImageDomainsList(self._UUID, imgUUID)
def getIsoList(self, filenameExtension='iso'):
return self._irs.getIsoList(self._UUID, filenameExtension)
def getSpmStatus(self):
return self._irs.getSpmStatus(self._UUID)
def getInfo(self):
return self._irs.getStoragePoolInfo(self._UUID)
def moveMultipleImages(self, srcSdUUID, dstSdUUID, imgDict,
force=False):
vmUUID = None # Unused parameter
return self._irs.moveMultipleImages(self._UUID, srcSdUUID, dstSdUUID,
imgDict, vmUUID, force)
def reconstructMaster(self, hostId, name, masterSdUUID, masterVersion,
domainDict, lockRenewalIntervalSec, leaseTimeSec,
ioOpTimeoutSec, leaseRetries):
lockPolicy = None # Not used
return self._irs.reconstructMaster(
self._UUID, name, masterSdUUID, domainDict, masterVersion,
lockPolicy, lockRenewalIntervalSec, leaseTimeSec, ioOpTimeoutSec,
leaseRetries, hostId)
def refresh(self, masterSdUUID, masterVersion):
return self._irs.refreshStoragePool(self._UUID, masterSdUUID,
masterVersion)
def setDescription(self, description):
return self._irs.setStoragePoolDescription(self._UUID, description)
def spmStart(self, prevID, prevLver, enableScsiFencing,
maxHostID=None, domVersion=None):
if maxHostID is None:
maxHostID = storage.clusterlock.MAX_HOST_ID
recoveryMode = None # unused
return self._irs.spmStart(self._UUID, prevID, prevLver, recoveryMode,
enableScsiFencing, maxHostID, domVersion)
def spmStop(self):
return self._irs.spmStop(self._UUID)
def upgrade(self, targetDomVersion):
return self._irs.upgradeStoragePool(self._UUID, targetDomVersion)
def validateStorageServerConnection(self, domainType,
connectionParams):
return self._irs.validateStorageServerConnection(
domainType, self._UUID, connectionParams)
def updateVMs(self, vmList, sdUUID):
return self._irs.updateVM(self._UUID, vmList, sdUUID)
def removeVM(self, vmUUID, sdUUID):
return self._irs.removeVM(self._UUID, vmUUID, sdUUID)
class Global(APIBase):
ctorArgs = []
def __init__(self):
APIBase.__init__(self)
# General Host functions
def fenceNode(self, addr, port, agent, username, password, action,
secure=False, options=''):
"""Send a fencing command to a remote node.
agent is one of (rsa, ilo, drac5, ipmilan, etc)
action can be one of (status, on, off, reboot)."""
def waitForPid(p, inp):
""" Wait until p.pid exits. Kill it if vdsm exists before. """
try:
p.stdin.write(inp)
p.stdin.close()
while p.poll() is None:
if not self._cif._enabled:
self.log.debug('killing fence script pid %s', p.pid)
os.kill(p.pid, signal.SIGTERM)
time.sleep(1)
try:
# improbable race: p.pid may now belong to another
# process
os.kill(p.pid, signal.SIGKILL)
except:
pass
return
time.sleep(1)
self.log.debug('rc %s inp %s out %s err %s', p.returncode,
hidePasswd(inp),
p.stdout.read(), p.stderr.read())
except:
self.log.error("Error killing fence script", exc_info=True)
def hidePasswd(text):
cleantext = ''
for line in text.splitlines(True):
if line.startswith('passwd='):
line = 'passwd=XXXX\n'
cleantext += line
return cleantext
self.log.debug('fenceNode(addr=%s,port=%s,agent=%s,user=%s,passwd=%s,'
'action=%s,secure=%s,options=%s)', addr, port, agent,
username, 'XXXX', action, secure, options)
if action not in ('status', 'on', 'off', 'reboot'):
raise ValueError('illegal action ' + action)
script = constants.EXT_FENCE_PREFIX + agent
try:
p = subprocess.Popen([script], stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE, close_fds=True)
except OSError as e:
if e.errno == os.errno.ENOENT:
return errCode['fenceAgent']
raise
inp = ('agent=fence_%s\nipaddr=%s\nlogin=%s\noption=%s\n'
'passwd=%s\n') % (agent, addr, username, action, password)
if port != '':
inp += 'port=%s\n' % (port,)
if utils.tobool(secure):
inp += 'secure=yes\n'
inp += options
if action == 'status':
out, err = p.communicate(inp)
self.log.debug('rc %s in %s out %s err %s', p.returncode,
hidePasswd(inp), out, err)
if not 0 <= p.returncode <= 2:
return {'status': {'code': 1,
'message': out + err}}
message = doneCode['message']
if p.returncode == 0:
power = 'on'
elif p.returncode == 2:
power = 'off'
else:
power = 'unknown'
message = out + err
return {'status': {'code': 0, 'message': message},
'power': power}
threading.Thread(target=waitForPid, args=(p, inp)).start()
return {'status': doneCode}
def ping(self):
"Ping the server. Useful for tests"
return {'status': doneCode}
def getCapabilities(self):
"""
Report host capabilities.
"""
c = caps.get()
c['netConfigDirty'] = str(self._cif._netConfigDirty)
return {'status': doneCode, 'info': c}
def getHardwareInfo(self):
"""
Report host hardware information
"""
try:
hw = supervdsm.getProxy().getHardwareInfo()
return {'status': doneCode, 'info': hw}
except:
self.log.error("failed to retrieve hardware info", exc_info=True)
return errCode['hwInfoErr']
def getStats(self):
"""
Report host statistics.
"""
def _readSwapTotalFree():
meminfo = utils.readMemInfo()
return meminfo['SwapTotal'] / 1024, meminfo['SwapFree'] / 1024
stats = {}
decStats = self._cif._hostStats.get()
for var in decStats:
stats[var] = utils.convertToStr(decStats[var])
stats['memAvailable'] = self._memAvailable() / Mbytes
stats['memCommitted'] = self._memCommitted() / Mbytes
stats['memFree'] = self._memFree() / Mbytes
stats['swapTotal'], stats['swapFree'] = _readSwapTotalFree()
stats['vmCount'], stats['vmActive'], stats['vmMigrating'] = \
self._countVms()
(tm_year, tm_mon, tm_day, tm_hour, tm_min, tm_sec,
dummy, dummy, dummy) = time.gmtime(time.time())
stats['dateTime'] = '%02d-%02d-%02dT%02d:%02d:%02d GMT' % (
tm_year, tm_mon, tm_day, tm_hour, tm_min, tm_sec)
if self._cif.mom:
stats['momStatus'] = self._cif.mom.getStatus()
stats.update(self._cif.mom.getKsmStats())
else:
stats['momStatus'] = 'disabled'
stats['ksmState'] = ksm.running()
stats['ksmPages'] = ksm.npages()
stats['ksmCpu'] = self._cif.ksmMonitor.cpuUsage
stats['memShared'] = self._memShared() / Mbytes
stats['netConfigDirty'] = str(self._cif._netConfigDirty)
stats['generationID'] = self._cif._generationID
return {'status': doneCode, 'info': stats}
def setLogLevel(self, level):
"""
Set verbosity level of vdsm's log.
params
level: requested logging level. `logging.DEBUG` `logging.ERROR`
Doesn't survive a restart
"""
logging.getLogger('clientIF.setLogLevel').info('Setting loglevel '
'to %s' % level)
handlers = logging.getLogger().handlers
[fileHandler] = [h for h in handlers if
isinstance(h, logging.FileHandler)]
fileHandler.setLevel(int(level))
return dict(status=doneCode)
# VM-related functions
def getVMList(self, fullStatus=False, vmList=[]):
""" return a list of known VMs with full (or partial) config each """
def reportedStatus(v, full):
d = v.status()
if full:
return d
else:
return {'vmId': d['vmId'], 'status': d['status']}
# To improve complexity, convert 'vms' to set(vms)
vmSet = set(vmList)
return {'status': doneCode,
'vmList': [reportedStatus(v, fullStatus)
for v in self._cif.vmContainer.values()
if not vmSet or v.id in vmSet]}
# Networking-related functions
def setupNetworks(self, networks, bondings, options):
"""Add a new network to this vds, replacing an old one."""
if not self._cif._networkSemaphore.acquire(blocking=False):
self.log.warn('concurrent network verb already executing')
return errCode['unavail']
try:
self._cif._netConfigDirty = True
try:
supervdsm.getProxy().setupNetworks(networks, bondings, options)
except configNetwork.ConfigNetworkError as e:
self.log.error(e.message, exc_info=True)
return {'status': {'code': e.errCode, 'message': e.message}}
return {'status': doneCode}
finally:
self._cif._networkSemaphore.release()
def addNetwork(self, bridge, vlan=None, bond=None, nics=None,
options=None):
"""Add a new network to this vds.
Network topology is bridge--[vlan--][bond--]nics.
vlan(number) and bond are optional - pass the empty string to discard
them. """
if options is None:
options = {}
self.translateNetOptionsToNew(options)
if not self._cif._networkSemaphore.acquire(blocking=False):
self.log.warn('concurrent network verb already executing')
return errCode['unavail']
try:
self._cif._netConfigDirty = True
if vlan:
options['vlan'] = vlan
if bond:
options['bonding'] = bond
if nics:
options['nics'] = list(nics)
try:
supervdsm.getProxy().addNetwork(bridge, options)
except configNetwork.ConfigNetworkError as e:
self.log.error(e.message, exc_info=True)
return {'status': {'code': e.errCode, 'message': e.message}}
return {'status': doneCode}
finally:
self._cif._networkSemaphore.release()
def delNetwork(self, bridge, vlan=None, bond=None, nics=None,
options=None):
"""Delete a network from this vds."""
if options is None:
options = {}
self.translateNetOptionsToNew(options)
try:
if not self._cif._networkSemaphore.acquire(blocking=False):
self.log.warn('concurrent network verb already executing')
return errCode['unavail']
if vlan or bond or nics:
# Backwards compatibility
self.log.warn('Specifying vlan, '
'bond or nics to delNetwork is deprecated')
_netinfo = netinfo.NetInfo()
try:
if bond:
Bond.validateName(bond)
if vlan:
Vlan.validateTag(vlan)
if nics and bond and set(nics) != \
set(_netinfo.bondings[bond]["slaves"]):
self.log.error('delNetwork: not all nics specified '
'are enslaved (%s != %s)' %
(nics,
_netinfo.bondings[bond]["slaves"]))
raise configNetwork.ConfigNetworkError(
configNetwork.ne.ERR_BAD_NIC,
"not all nics are enslaved")
except configNetwork.ConfigNetworkError as e:
self.log.error(e.message, exc_info=True)
return {'status': {'code': e.errCode,
'message': e.message}}
self._cif._netConfigDirty = True
try:
supervdsm.getProxy().delNetwork(bridge, options)
except configNetwork.ConfigNetworkError as e:
self.log.error(e.message, exc_info=True)
return {'status': {'code': e.errCode, 'message': e.message}}
return {'status': doneCode}
finally:
self._cif._networkSemaphore.release()
def editNetwork(self, oldBridge, newBridge, vlan=None, bond=None,
nics=None, options=None):
"""Add a new network to this vds, replacing an old one."""
if options is None:
options = {}
self.translateNetOptionsToNew(options)
if not self._cif._networkSemaphore.acquire(blocking=False):
self.log.warn('concurrent network verb already executing')
return errCode['unavail']
try:
if vlan:
options['vlan'] = vlan
if bond:
options['bonding'] = bond
if nics:
options['nics'] = list(nics)
self._cif._netConfigDirty = True
try:
supervdsm.getProxy().editNetwork(oldBridge, newBridge, options)
except configNetwork.ConfigNetworkError as e:
self.log.error(e.message, exc_info=True)
return {'status': {'code': e.errCode, 'message': e.message}}
return {'status': doneCode}
finally:
self._cif._networkSemaphore.release()
def setSafeNetworkConfig(self):
"""Declare current network configuration as 'safe'"""
if not self._cif._networkSemaphore.acquire(blocking=False):
self.log.warn('concurrent network verb already executing')
return errCode['unavail']
try:
self._cif._netConfigDirty = False
supervdsm.getProxy().setSafeNetworkConfig()
return {'status': doneCode}
finally:
self._cif._networkSemaphore.release()
# Top-level storage functions
def getStorageDomains(self, spUUID=None, domainClass=None,
storageType=None, remotePath=None):
return self._irs.getStorageDomainsList(spUUID, domainClass,
storageType, remotePath)
def getConnectedStoragePools(self):
return self._irs.getConnectedStoragePoolsList()
def getStorageRepoStats(self):
return self._irs.repoStats()
def getLVMVolumeGroups(self, storageType=None):
return self._irs.getVGList(storageType)
def getDeviceList(self, storageType=None):
return self._irs.getDeviceList(storageType)
def getDevicesVisibility(self, guidList):
return self._irs.getDevicesVisibility(guidList)
def getAllTasksInfo(self):
return self._irs.getAllTasksInfo()
def getAllTasksStatuses(self):
return self._irs.getAllTasksStatuses()
def getAllTasks(self):
return self._irs.getAllTasks()
def setMOMPolicy(self, policy):
try:
self._cif.mom.setPolicy(policy)
return dict(status=doneCode)
except:
return errCode['momErr']
def setMOMPolicyParameters(self, key_value_store):
try:
self._cif.mom.setPolicyParameters(key_value_store)
return dict(status=doneCode)
except:
return errCode['momErr']
# take a rough estimate on how much free mem is available for new vm
# memTotal = memFree + memCached + mem_used_by_non_qemu + resident .
# simply returning (memFree + memCached) is not good enough, as the
# resident set size of qemu processes may grow - up to memCommitted.
# Thus, we deduct the growth potential of qemu processes, which is
# (memCommitted - resident)
def _memAvailable(self):
"""
Return an approximation of available memory for new VMs.
"""
memCommitted = self._memCommitted()
resident = 0
for v in self._cif.vmContainer.values():
if v.conf['pid'] == '0':
continue
try:
statmfile = file('/proc/' + v.conf['pid'] + '/statm')
resident += int(statmfile.read().split()[1])
except:
pass
resident *= PAGE_SIZE_BYTES
meminfo = utils.readMemInfo()
freeOrCached = (meminfo['MemFree'] +
meminfo['Cached'] + meminfo['Buffers']) * Kbytes
return freeOrCached + resident - memCommitted - \
config.getint('vars', 'host_mem_reserve') * Mbytes
def _memFree(self):
"""
Return the actual free mem on host.
"""
meminfo = utils.readMemInfo()
return (meminfo['MemFree'] +
meminfo['Cached'] + meminfo['Buffers']) * Kbytes
def _memShared(self):
"""
Return an approximation of memory shared by VMs thanks to KSM.
"""
return (self._cif.ksmMonitor.memsharing() * PAGE_SIZE_BYTES)
def _memCommitted(self):
"""
Return the amount of memory (Mb) committed for VMs
"""
committed = 0
for v in self._cif.vmContainer.values():
committed += v.memCommitted
return committed
def _countVms(self):
count = active = migrating = 0
for vmId, v in self._cif.vmContainer.items():
try:
count += 1
status = v.lastStatus
if status == 'Up':
active += 1
elif 'Migration' in status:
migrating += 1
except:
self.log.error(vmId + ': Lost connection to VM')
return count, active, migrating
@staticmethod
def translateNetOptionsToNew(options):
_translationMap = {
'IPADDR': 'ipaddr',
'NETMASK': 'netmask',
'PREFIX': 'prefix',
'GATEWAY': 'gateway',
'BOOTPROTO': 'bootproto',
'DELAY': 'delay',
'ONBOOT': 'onboot',
'BONDING_OPTS': 'bondingOptions',
}
for k, v in options.items():
if k in _translationMap:
logging.warn("options %s is deprecated. Use %s instead" %
(k, _translationMap[k]))
options[_translationMap[k]] = options.pop(k)
| gpl-2.0 | -4,730,397,504,926,118,000 | 36.248677 | 79 | 0.548171 | false |
Wyn10/Cnchi | cnchi/misc/keyboard_names.py | 1 | 9010 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# keyboard_names.py
#
# Copyright © 2013-2016 Antergos
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
""" Parse base.xml """
import logging
import os
from gi.repository import GObject
from collections import OrderedDict
try:
import xml.etree.cElementTree as eTree
except ImportError as err:
import xml.etree.ElementTree as eTree
class Model(GObject.GObject):
""" Represents a keyboard model """
def __init__(self, name, description, vendor):
GObject.GObject.__init__(self)
self.name = name
self.description = description
self.vendor = vendor
def __repr__(self):
""" Return model description """
return self.description
class Variant(GObject.GObject):
""" Keymap variant layout """
def __init__(self, name, short_description, description, language_list):
GObject.GObject.__init__(self)
self.name = name
self.short_description = short_description
self.description = description
self.language_list = language_list
def __repr__(self):
""" Return variant description """
return self.description
class Layout(GObject.GObject):
""" Keymap layout """
def __init__(self, name, short_description, description, language_list):
GObject.GObject.__init__(self)
self.name = name
self.short_description = short_description
self.description = description
self.language_list = language_list
self.variants = {}
def __repr__(self):
""" Return layout description """
return self.description
def add_variant(self, variant):
""" Add new layout variant """
self.variants[variant.name] = variant
def sort_variants(self):
""" Sort variants """
self.variants = OrderedDict(
sorted(self.variants.items(), key=lambda t: str(t[1])))
class KeyboardNames(object):
""" Read all keyboard info (models, layouts and variants) """
def __init__(self, filename):
self.models = None
self.layouts = None
self._filename = filename
self._load_file()
def _clear(self):
""" Clear all data """
self.models = {}
self.layouts = {}
def _load_file(self):
""" Load info from xml file """
if not os.path.exists(self._filename):
logging.error("Can't find %s file!", self._filename)
return
self._clear()
xml_tree = eTree.parse(self._filename)
xml_root = xml_tree.getroot()
for model in xml_root.iter('model'):
for config_item in model.iter('configItem'):
model_name = ""
model_description = ""
model_vendor = ""
for item in config_item:
if item.tag == "name":
model_name = item.text
elif item.tag == "description":
model_description = item.text
elif item.tag == "vendor":
model_vendor = item.text
# Store model
self.models[model_name] = Model(
model_name,
model_description,
model_vendor)
for layout in xml_root.iter('layout'):
for layout_item in layout:
layout_language_list = []
if layout_item.tag == "configItem":
for item in layout_item:
if item.tag == "name":
layout_name = item.text
elif item.tag == "shortDescription":
layout_short_description = item.text
elif item.tag == "description":
layout_description = item.text
elif item.tag == "languageList":
for lang in item:
layout_language_list.append(lang.text)
self.layouts[layout_name] = Layout(
layout_name,
layout_short_description,
layout_description,
layout_language_list)
if layout_item.tag == "variantList":
for variant in layout_item:
variant_language_list = []
for config_item in variant:
for item in config_item:
if item.tag == "name":
variant_name = item.text
elif item.tag == "shortDescription":
variant_short_description = item.text
elif item.tag == "description":
variant_description = item.text
elif item.tag == "languageList":
for lang in item:
variant_language_list.append(lang.text)
self.layouts[layout_name].add_variant(
Variant(
variant_name,
variant_short_description,
variant_description,
variant_language_list))
self.sort_layouts()
def sort_layouts(self):
""" Sort stored layouts """
self.layouts = OrderedDict(
sorted(self.layouts.items(), key=lambda t: str(t[1])))
for name in self.layouts:
self.layouts[name].sort_variants()
def get_layout(self, name):
""" Get layout by its name """
if name in self.layouts:
return self.layouts[name]
else:
return None
def get_layouts(self):
""" Return all layouts """
return self.layouts
def get_layout_description(self, name):
""" Get layout description by its name """
if name in self.layouts:
return str(self.layouts[name])
else:
return None
def get_layout_by_description(self, description):
""" Get layout by its description """
for name in self.layouts:
if description == str(self.layouts[name]):
return self.layouts[name]
return None
def get_layout_name_by_description(self, description):
""" Get layout name by its description """
for name in self.layouts:
if description == str(self.layouts[name]):
return name
return None
def has_variants(self, name):
""" Check if layout has variants """
return bool(self.layouts[name].variants)
def get_variants(self, name):
""" Get layout variants """
return self.layouts[name].variants
def get_variant_description(self, name, variant_name):
""" Get variant description by its name (and layout name)"""
try:
return str(self.layouts[name].variants[variant_name])
except KeyError as key_error:
return None
def get_variant_descriptions(self, name):
""" Get all variant descriptions for layout 'name' """
descriptions = []
for variant_name in self.layouts[name].variants:
description = str(self.layouts[name].variants[variant_name])
descriptions.append(description)
return descriptions
def get_variant_name_by_description(self, description):
""" Get variant name by its description """
for layout_name in self.layouts:
for variant_name in self.layouts[layout_name].variants:
if description == str(self.layouts[layout_name].variants[variant_name]):
return variant_name
return None
def test():
""" Test module """
base_xml_path = "/usr/share/cnchi/data/base.xml"
kbd_names = KeyboardNames(base_xml_path)
layouts = kbd_names.get_layouts()
for name in layouts:
print(name, layouts[name])
for variant_name in layouts[name].variants:
print(layouts[name], "-", layouts[name].variants[variant_name])
if __name__ == '__main__':
test()
| gpl-3.0 | 5,801,357,571,530,852,000 | 34.329412 | 88 | 0.545233 | false |
hwjworld/xiaodun-platform | cms/djangoapps/contentstore/views/checklist.py | 1 | 5604 | import json
import copy
from util.json_request import JsonResponse
from django.http import HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django_future.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
from django.http import HttpResponseNotFound
from django.core.exceptions import PermissionDenied
from xmodule.modulestore.django import loc_mapper
from ..utils import get_modulestore
from .access import has_course_access
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.locator import BlockUsageLocator
__all__ = ['checklists_handler']
# pylint: disable=unused-argument
@require_http_methods(("GET", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def checklists_handler(request, tag=None, package_id=None, branch=None, version_guid=None, block=None, checklist_index=None):
"""
The restful handler for checklists.
GET
html: return html page for all checklists
json: return json representing all checklists. checklist_index is not supported for GET at this time.
POST or PUT
json: updates the checked state for items within a particular checklist. checklist_index is required.
"""
location = BlockUsageLocator(package_id=package_id, branch=branch, version_guid=version_guid, block_id=block)
if not has_course_access(request.user, location):
raise PermissionDenied()
old_location = loc_mapper().translate_locator_to_location(location)
modulestore = get_modulestore(old_location)
course_module = modulestore.get_item(old_location)
json_request = 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json')
if request.method == 'GET':
# If course was created before checklists were introduced, copy them over
# from the template.
if not course_module.checklists:
course_module.checklists = CourseDescriptor.checklists.default
modulestore.update_item(course_module, request.user.id)
expanded_checklists = expand_all_action_urls(course_module)
if json_request:
return JsonResponse(expanded_checklists)
else:
handler_url = location.url_reverse('checklists/', '')
return render_to_response('checklists.html',
{
'handler_url': handler_url,
# context_course is used by analytics
'context_course': course_module,
'checklists': expanded_checklists
})
elif json_request:
# Can now assume POST or PUT because GET handled above.
if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
index = int(checklist_index)
persisted_checklist = course_module.checklists[index]
modified_checklist = json.loads(request.body)
# Only thing the user can modify is the "checked" state.
# We don't want to persist what comes back from the client because it will
# include the expanded action URLs (which are non-portable).
for item_index, item in enumerate(modified_checklist.get('items')):
persisted_checklist['items'][item_index]['is_checked'] = item['is_checked']
# seeming noop which triggers kvs to record that the metadata is
# not default
course_module.checklists = course_module.checklists
course_module.save()
modulestore.update_item(course_module, request.user.id)
expanded_checklist = expand_checklist_action_url(course_module, persisted_checklist)
return JsonResponse(expanded_checklist)
else:
return HttpResponseBadRequest(
("Could not save checklist state because the checklist index "
"was out of range or unspecified."),
content_type="text/plain"
)
else:
return HttpResponseNotFound()
def expand_all_action_urls(course_module):
"""
Gets the checklists out of the course module and expands their action urls.
Returns a copy of the checklists with modified urls, without modifying the persisted version
of the checklists.
"""
expanded_checklists = []
for checklist in course_module.checklists:
expanded_checklists.append(expand_checklist_action_url(course_module, checklist))
return expanded_checklists
def expand_checklist_action_url(course_module, checklist):
"""
Expands the action URLs for a given checklist and returns the modified version.
The method does a copy of the input checklist and does not modify the input argument.
"""
expanded_checklist = copy.deepcopy(checklist)
urlconf_map = {
"ManageUsers": "course_team",
"CourseOutline": "course",
"SettingsDetails": "settings/details",
"SettingsGrading": "settings/grading",
}
for item in expanded_checklist.get('items'):
action_url = item.get('action_url')
if action_url in urlconf_map:
url_prefix = urlconf_map[action_url]
ctx_loc = course_module.location
location = loc_mapper().translate_location(ctx_loc.course_id, ctx_loc, False, True)
item['action_url'] = location.url_reverse(url_prefix, '')
return expanded_checklist
| agpl-3.0 | -7,421,149,964,564,552,000 | 42.107692 | 125 | 0.662384 | false |
rhwhite/eventTracking | analysis/CalculateLand_vs_Sea_Averages.py | 1 | 10844 | """
Code to sum characteristics of events split by either time or speed and write
out the results to a table.
Author: Rachel H White [email protected]
Created: Oct 2016
Example use:
python CalculateLand_vs_SeaAverages.py --Data TRMM --Version Standard \
--anstartyr 1998 --anendyr 2014 \
--tbound1 0 1 2 5 --tbound2 1 2 5 100 --splittype day \
--unit day --minlat -40 --maxlat 40
"""
import os, errno
import numpy as np
import netCDF4
from netCDF4 import Dataset
import datetime as dt
import re
import sys
import Ngl
import xray
import math
import resource
import argparse
from rhwhitepackages.readwrite import getunitsdesc
from rhwhitepackages.readwrite import xrayOpen
from rhwhitepackages.readwrite import getdirectory
parser = argparse.ArgumentParser(description="Calculate land/sea averages")
parser.add_argument('--minlat',type=int,nargs='?',default=-45,help='min lat')
parser.add_argument('--maxlat',type=int,nargs='?',default=45,help='max lat')
parser.add_argument('--minlon',type=int,nargs='?',default=0,help='min lon')
parser.add_argument('--maxlon',type=int,nargs='?',default=360,help='max lon')
parser.add_argument('--splittype',metavar='splittype',type=str,nargs=1,
help='the type of split you want, day, speed, or maxspeed')
parser.add_argument('--speedtspan',metavar='speedtspan',type=int,nargs='?',default=4,
help='how many time spans does the speed average cover?')
parser.add_argument('--tbound1',metavar='tbound1',type=float,nargs='+',
help='lower bounds')
parser.add_argument('--tbound2',metavar='tbound2',type=float,nargs="+",
help='upper bounds')
parser.add_argument('--unit',type=str,nargs=1,help='units of split type')
parser.add_argument('--Data',type=str,nargs=1,
help='type of Data, TRMM, ERAI, or CESM')
parser.add_argument('--Version',type=str,nargs=1,
help='Version of Data, Standard, low, 6th_from6 etc')
parser.add_argument('--anstartyr',type=int,nargs=1,
help='start year for analysis')
parser.add_argument('--anendyr',type=int,nargs=1,help='end year for analysis')
parser.add_argument('--test',type=int,nargs='?',default=0,help='1 for test')
args = parser.parse_args()
print args
# put inputs into the type and variable names we want
splittype = args.splittype[0]
speedtspan = args.speedtspan
# multiply tbound in by 24 to get hours rather than days
tbound1 = np.multiply(args.tbound1,24.0)
tbound2 = np.multiply(args.tbound2,24.0)
unit = args.unit[0]
Data = args.Data[0]
Version = args.Version[0]
startyr = args.anstartyr[0]
endyr = args.anendyr[0]
minlon = args.minlon
maxlon = args.maxlon
minlat = args.minlat
maxlat = args.maxlat
test = args.test
diradd = getdirectory(splittype)
nbounds = len(tbound1)
print(tbound1)
R = 6371000 # radius of Earth in m
nyears = endyr - startyr + 1
minevent = 100000
DirI = ('/home/disk/eos4/rachel/EventTracking/FiT_RW_ERA/' + Data + '_output/' +
Version + str(startyr) + '/proc/')
if Data == "TRMM":
if Version == '6th_from6' or Version == '5th_from48':
DirI = ('/home/disk/eos4/rachel/EventTracking/FiT_RW/TRMM_output/' +
Version + '/proc/')
FileInLats = ('/home/disk/eos4/rachel/Obs/TRMM/'
'SeasAnn_TRMM_1998-2014_3B42_3hrly_nonan.nc')
elif Data == "TRMMERAIgd":
FileInLats = ('/home/disk/eos4/rachel/Obs/TRMM/'
'regrid2ERAI_TRMM_3B42_1998-2014.nc')
elif Data == "ERAI":
FileInLats = ('/home/disk/eos4/rachel/Obs/ERAI/3hrly/Precip_3hrly/'
'SeasAnn_ERAI_Totalprecip_' +
str(startyr) + '-' + str(endyr) + '_preprocess.nc')
elif Data == "ERA20C":
FileInLats = '/home/disk/eos4/rachel/Obs/ERA_20C/ERA_20C_LatLon.nc'
elif Data == "CESM":
DirI = ('/home/disk/eos4/rachel/EventTracking/FiT_RW_ERA/CESM_output/' +
Version + str(startyr) + '/proc/')
FileInLats = ('/home/disk/eos4/rachel/EventTracking/Inputs/CESM/'
'f.e13.FAMPIC5.ne120_ne120.1979_2012.001/'
'f.e13.FAMIPC5.ne120_ne120_TotalPrecip_1979-2012.nc')
else:
print("unexpected data type")
exit()
DirO = DirI + diradd + '/'
# In[4]:
#Get lons and lats
FileIn = xrayOpen(FileInLats)
if Data == "CESM":
lats = FileIn['lat'].values
lons = FileIn['lon'].values
elif Data in ["ERA20C","TRMMERAIgd"]:
lats = FileIn['latitude'].values
lons = FileIn['longitude'].values
else:
lats = FileIn['Latitude'].values
lons = FileIn['Longitude'].values
nlats = len(lats)
nlons = len(lons)
# initialize data
averageydist = np.zeros([nbounds,2],float)
averagexdist = np.zeros([nbounds,2],float)
averageprecipperhr = np.zeros([nbounds,2],float)
averageprecipperareahr = np.zeros([nbounds,2],float)
averagetime = np.zeros([nbounds,2],float)
averagegridboxes = np.zeros([nbounds,2],float)
precipvolume = np.zeros([nbounds,2],float)
count = np.zeros([nbounds,2],int)
# In[5]:
# open main dataset and read in data
FileI1 = ('All_Precip_' + str(startyr) + '-' + str(endyr) + '_' + Data + '_' +
Version + '.nc')
datain = xrayOpen(DirI + FileI1,decodetimes=False)
nevents = len(datain.events)
timespan = datain.timespan[0:nevents].values
ycenterstart = datain.ycenterstart[0:nevents].values
ycenterend = datain.ycenterend[0:nevents].values
ycentermean = datain.ycentermean[0:nevents].values
xcentermean = datain.xcentermean[0:nevents].values
timespan = datain.timespan[0:nevents].values
totalprecip = datain.totalprecip[0:nevents].values
totalprecipSA = datain.totalprecipSA[0:nevents].values
gridboxspan = datain.gridboxspan[0:nevents].values
startlats = lats[datain.ycenterstart[0:nevents].astype(int)]
endlats = lats[datain.ycenterend[0:nevents].astype(int)]
# In[6]:
# Set fileminlat and filemaxlat if we ran FiT on a subsection of the data
fileminlat = -90
filemaxlat = 90
# Need to get a land-sea mask at correct resolution
LandSeaMask = '/home/disk/eos4/rachel/Obs/TRMM/TMPA_land_sea_mask.nc'
LandSeaFile = xray.open_dataset(LandSeaMask)
LandSea = LandSeaFile['landseamask'].sel(lat=slice(fileminlat,filemaxlat))
# In[ ]:
def isinregion(ilat,ilon):
if ilat < minlat or ilat > maxlat:
return(False)
else:
if checklons:
if ilon < minlon or ilon > maxlon:
return False
else:
return(True)
# In[ ]:
checklons = True
if minlon == 0 and maxlon == 360:
checklons = False
elif minlon ==-180 and maxlon == 180:
checklons = False
if test == 1:
nevents = 10000
filenameadd = "test_"
else:
nevents = len(datain.events)
filenameadd = ""
for ievent in range(0,nevents):
if (ievent % 100000 == 0):
print "ievent: " + str(ievent)
# check if in region
if isinregion(lats[ycenterstart[ievent]],
lons[xcenterstart[ievent]]):
if isinregion(lats[ycenterend[ievent]],
lons[xcenterend[ievent]]):
if LandSea[ycentermean[ievent].astype(int),xcentermean[ievent].astype(int)] > 50:
LSindex = 0
else:
LSindex = 1
for ibound in range(0,nbounds):
if timespan[ievent] < tbound2[ibound]:
averageydist[ibound,LSindex] += (lats[ycenterend[ievent]] -
lats[ycenterstart[ievent]])
averagexdist[ibound,LSindex] += (lons[xcenterend[ievent]] -
lons[xcenterstart[ievent]])
# if negative then that's fine for NH, positive is fine
# for southern hemisphere, so get the average event
# distance travelled
averagetime[ibound,LSindex] += timespan[ievent]
averageprecipperhr[ibound,LSindex] += (
totalprecip[ievent]/timespan[ievent])
# Include factor of 3 to convert to hours, not timesteps
averageprecipperareahr[ibound,LSindex] += (
totalprecip[ievent]/(3.0 *
gridboxspan[ievent]))
averagegridboxes[ibound,LSindex] += gridboxspan[ievent]
precipvolume[ibound,LSindex] += totalprecipSA[ievent]
count[ibound,LSindex] += 1
break
# In[ ]:
averageydist = averageydist / count
averagexdist = averagexdist / count
averagetime = averagetime / count
averageprecipperhr = averageprecipperhr / count
averageprecipperareahr = averageprecipperareahr / count
averagegridboxes = averagegridboxes/count
# In[ ]:
# Write out to a text file
for LSindex in range(0,2):
if LSindex == 0:
filename = (filenameadd + 'Averages_Seas_' + '{:d}'.format(minlat) + 'N-' +
'{:d}'.format(maxlat) + 'N.txt')
elif LSindex == 1:
filename = (filenameadd + 'Averages_Land_' + '{:d}'.format(minlat) + 'N-' +
'{:d}'.format(maxlat) + 'N.txt')
with open(DirI + filename, 'w') as text_file:
text_file.write('Domain averages for ' + '{:d}'.format(minlat) + 'N-' +
'{:d}'.format(maxlat) + 'N and ' + '{:d}'.format(minlon) + 'E-'
+ '{:d}'.format(maxlon) + 'E \n')
text_file.write('timespan (hours), \t count (events/yr), \t average '
'latitude distance (degs), \t average longitude distance '
'(degrees) \t averagepreciphr (mm/hr), \taveragepreciphr '
'(mm/gridbox/hr) \t total precip (m3 /yr) \n')
for ibound in range(0,nbounds):
text_file.write('{:.1f}'.format(tbound1[ibound]) + '-' +
'{:.1f}'.format(tbound2[ibound]) + 'hours, ' +
'{:.2e}'.format(count[ibound,LSindex]/nyears) +
'; ' +
'{:.2f}'.format(averageydist[ibound,LSindex]) +
'; ' +
'{:.2f}'.format(averagexdist[ibound,LSindex]) +
'; ' +
'{:.2f}'.format(averagetime[ibound,LSindex]) +
'; ' +
'{:.2e}'.format(averagegridboxes[ibound,LSindex]) +
'; ' +
'{:.2e}'.format(averageprecipperhr[ibound,LSindex]) +
'; ' +
'{:.2e}'.format(averageprecipperareahr[ibound,LSindex])
+ '; /t ' +
'{:.2e}'.format(precipvolume[ibound,LSindex]/nyears)
+ ' \n')
# In[ ]:
datain.close()
| mit | -5,112,532,222,688,676,000 | 33.645367 | 93 | 0.595629 | false |
wevoice/wesub | utils/factories.py | 1 | 17469 | # Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
"""utils.factories.py -- Factoryboy factories for testing
"""
from __future__ import absolute_import
import datetime
import hashlib
from django.contrib.auth.hashers import make_password
from django.template.defaultfilters import slugify
import factory
from factory import Factory
from factory.django import DjangoModelFactory
import auth.models
import babelsubs.storage
import comments.models
import externalsites.models
import subtitles.models
import teams.models
import videos.models
from externalsites import google
from subtitles import pipeline
from utils import dates
from utils import translation
class VideoURLFactory(DjangoModelFactory):
FACTORY_FOR = videos.models.VideoUrl
url = factory.Sequence(
lambda n: 'http://example.com/videos/video-{0}'.format(n))
type = videos.models.VIDEO_TYPE_HTML5
class VideoFactory(DjangoModelFactory):
FACTORY_FOR = videos.models.Video
title = factory.Sequence(lambda n: 'Test Video {0}'.format(n))
duration = 100
allow_community_edits = False
video_url = factory.RelatedFactory(VideoURLFactory, 'video', primary=True)
@factory.post_generation
def set_follower(video, create, extracted, **kwargs):
if video.user:
video.followers.add(video.user)
@factory.post_generation
def make_version(video, create, extracted, **attrs):
if extracted:
if extracted is True:
languages = ['en']
elif isinstance(extracted, (list, tuple)):
languages = extracted
else:
languages = [extracted]
for language in languages:
make_version(video, language)
@factory.post_generation
def team(video, create, extracted, **attrs):
if extracted:
TeamVideoFactory(team=extracted, video=video,
added_by=video.user)
@factory.post_generation
def with_many_visibility_combinations(video, create, extracted, **kwargs):
"""Make languages with many different combinations of
visibility/visibility_override_choices
This method creates languages with all possible combinations with 0,
1, and 2 versions and some combinations using more versions.
This method will also set the tips/public_tips attributes on the
video. They will map language ids to the version number of the
correct tip for each language.
"""
if not extracted:
return
now = datetime.datetime.now()
language_codes = iter(translation.ALL_LANGUAGE_CODES)
visibility_choices = ['public', 'private']
visibility_override_choices = ['', 'public', 'private', 'deleted']
combo_choices = [
(v, vo)
for v in visibility_choices
for vo in visibility_override_choices
]
all_versions = []
video.public_tips = {}
video.tips = {}
def make_language(visibility_list):
language = SubtitleLanguageFactory(
video=video, language_code=language_codes.next(),
)
public_tip = tip = None
for i, visibilities in enumerate(visibility_list):
version = subtitles.models.SubtitleVersion(
video=video, subtitle_language=language,
language_code=language.language_code,
created=now, visibility=visibilities[0],
visibility_override=visibilities[1],
version_number=i+1)
if version.is_public():
public_tip = version.version_number
if not version.is_deleted():
tip = version.version_number
all_versions.append(version)
video.tips[language.id] = tip
video.public_tips[language.id] = public_tip
# make all combinations of 0, 1, and 2 versions
make_language([])
for choice in combo_choices:
make_language([choice])
for choice2 in combo_choices:
make_language([choice, choice2])
# make a copule languages with many public/private versions
make_language([('public', '') for i in range(4)])
make_language([('private', '') for i in range(4)])
# language that had private verisons, then the last was published
make_language([
('private', ''), ('private', ''), ('private', 'public'),
])
# language that had got published, then had a post-edit
make_language([
('private', ''), ('private', ''), ('private', ''),
('private', 'public'), ('public', ''),
])
# language that had public verisons, but they were deleted
make_language([
('public', 'deleted'), ('public', 'deleted'),
('public', 'deleted'), ('public', 'deleted'),
])
subtitles.models.SubtitleVersion.objects.bulk_create(all_versions)
class KalturaVideoFactory(VideoFactory):
FACTORY_HIDDEN_ARGS = ('name',)
video_url__type = 'K'
name = 'video'
@factory.lazy_attribute
def video_url__url(self):
# generate a video with a kaltura-style URL
entry_id = '1_' + hashlib.md5(self.name).hexdigest()[:8]
return ('http://cdnbakmi.kaltura.com'
'/p/1492321/sp/149232100/serveFlavor/entryId/'
'%s/flavorId/1_dqgopb2z/name/%s.mp4') % (entry_id,
self.name)
class BrightcoveVideoFactory(VideoFactory):
# generate a video with a brightcove-style URL
FACTORY_HIDDEN_ARGS = ('brightcove_id', 'player_id')
brightcove_id = 'abc'
player_id = '1234'
video_url__type = 'C'
@factory.lazy_attribute
def video_url__url(self):
return 'http://bcove.me/services/link/bcpid%s/bctid%s' % (
self.player_id, self.brightcove_id)
class YouTubeVideoFactory(VideoFactory):
video_url__type = 'Y'
video_url__videoid = factory.Sequence(lambda n: 'video{0}'.format(n))
@factory.lazy_attribute
def video_url__url(self):
return ('https://www.youtube.com/watch?v=%s' %
self.video_url__videoid)
@classmethod
def _generate(cls, create, attrs):
"""Override the default _generate() to handle the channel_id
parameteter.
"""
if 'channel_id' in attrs:
attrs['video_url__owner_username'] = attrs.pop('channel_id')
return super(YouTubeVideoFactory, cls)._generate(create, attrs)
class VideoFeedFactory(DjangoModelFactory):
FACTORY_FOR = videos.models.VideoFeed
class UserFactory(DjangoModelFactory):
FACTORY_FOR = auth.models.CustomUser
username = factory.Sequence(lambda n: 'test_user_{0}'.format(n))
email = factory.LazyAttribute(lambda u: '%[email protected]' % u.username)
first_name = 'TestUser'
last_name = factory.Sequence(lambda n: 'Number {0}'.format(n))
notify_by_email = True
valid_email = True
password = 'password'
show_tutorial = False
@classmethod
def _generate(cls, create, attrs):
"""Override the default _generate() to disable the post-save signal."""
if 'password' in attrs:
attrs['password'] = make_password(attrs['password'])
return super(UserFactory, cls)._generate(create, attrs)
@factory.post_generation
def languages(self, create, extracted, **kwargs):
if extracted:
assert create
for language_code in extracted:
auth.models.UserLanguage.objects.create(
user=self, language=language_code)
@factory.post_generation
def team(self, create, extracted, **kwargs):
if extracted:
role = kwargs.get('role', teams.models.TeamMember.ROLE_ADMIN)
TeamMemberFactory(user=self, team=extracted, role=role)
class TeamFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.Team
name = factory.Sequence(lambda n: 'Team %s' % n)
slug = factory.LazyAttribute(lambda t: slugify(t.name))
membership_policy = teams.models.Team.OPEN
workflow_type = 'O'
@classmethod
def _generate(cls, create, attrs):
team = super(TeamFactory, cls)._generate(create, attrs)
if create:
# this forces the default project to be created
team.default_project
return team
@factory.post_generation
def owner(self, create, extracted, **kwargs):
if extracted:
assert create
TeamMemberFactory.create(
user=extracted, team=self,
role=teams.models.TeamMember.ROLE_OWNER,
)
@factory.post_generation
def admin(self, create, extracted, **kwargs):
if extracted:
assert create
TeamMemberFactory.create(
user=extracted, team=self,
role=teams.models.TeamMember.ROLE_ADMIN,
)
@factory.post_generation
def manager(self, create, extracted, **kwargs):
if extracted:
assert create
TeamMemberFactory.create(
user=extracted, team=self,
role=teams.models.TeamMember.ROLE_MANAGER,
)
@factory.post_generation
def member(self, create, extracted, **kwargs):
if extracted:
assert create
TeamMemberFactory.create(
user=extracted, team=self,
role=teams.models.TeamMember.ROLE_CONTRIBUTOR,
)
class WorkflowFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.Workflow
review_allowed = 30 # admin must review
approve_allowed = 20 # admin must approve
class TeamMemberFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.TeamMember
role = teams.models.TeamMember.ROLE_OWNER
user = factory.SubFactory(UserFactory)
team = factory.SubFactory(TeamFactory)
class TeamContributorMemberFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.TeamMember
role = teams.models.TeamMember.ROLE_CONTRIBUTOR
user = factory.SubFactory(UserFactory)
team = factory.SubFactory(TeamFactory)
class TeamVideoFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.TeamVideo
team = factory.SubFactory(TeamFactory)
video = factory.SubFactory(VideoFactory)
@classmethod
def _generate(cls, create, attrs):
tv = super(TeamVideoFactory, cls)._generate(create, attrs)
tv.video.user = tv.added_by
tv.video.clear_team_video_cache()
return tv
@factory.lazy_attribute
def added_by(tv):
member = TeamMemberFactory.create(team=tv.team)
return member.user
class ProjectFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.Project
team = factory.SubFactory(TeamFactory)
name = factory.Sequence(lambda n: 'Project %s' % n)
class TaskFactory(DjangoModelFactory):
FACTORY_FOR = teams.models.Task
type = teams.models.Task.TYPE_IDS['Subtitle']
@classmethod
def create_review(cls, team_video, language_code, subtitler, **kwargs):
"""Create a task, then move it to the review stage
assumptions:
- there are no Tasks or SubtitleVersions for this video+language
- review is enabled for the team
"""
try:
sub_data = kwargs.pop('sub_data')
except KeyError:
sub_data = SubtitleSetFactory()
if 'type' in kwargs and isinstance(kwargs['type'], basestring):
kwargs['type'] = teams.models.Task.TYPE_IDS[kwargs['type']]
team = team_video.team
task = cls.create(team=team, team_video=team_video,
assignee=subtitler, language=language_code, **kwargs)
pipeline.add_subtitles(team_video.video, language_code, sub_data,
author=subtitler, action='save-draft')
return task.complete()
@classmethod
def create_approve(cls, team_video, language_code, reviewer,
subtitler=None, **kwargs):
"""Create a task, then move it to the approval stage
assumptions:
- there are no Tasks or SubtitleVersions for this video+language
- approve is enabled for the team
"""
if subtitler is None:
subtitler = reviewer
task = cls.create_review(team_video, language_code, subtitler, **kwargs)
if task.type == teams.models.Task.TYPE_IDS['Approve']:
# review isn't enabled, but approve is. Just return the task
# early
return task
task.assignee = reviewer
task.approved = teams.models.Task.APPROVED_IDS['Approved']
return task.complete()
class SubtitleLanguageFactory(DjangoModelFactory):
FACTORY_FOR = subtitles.models.SubtitleLanguage
class OldSubtitleLanguageFactory(DjangoModelFactory):
FACTORY_FOR = videos.models.SubtitleLanguage
is_original = True
language = 'en'
created = datetime.datetime(2000, 1, 1)
class OldSubtitleVersionFactory(DjangoModelFactory):
FACTORY_FOR = videos.models.SubtitleVersion
title = 'Title'
description = 'Description'
datetime_started = datetime.datetime(2000, 1, 1)
class BrightcoveAccountFactory(DjangoModelFactory):
FACTORY_FOR = externalsites.models.BrightcoveAccount
publisher_id = 'publisher'
write_token = 'write-token'
class KalturaAccountFactory(DjangoModelFactory):
FACTORY_FOR = externalsites.models.KalturaAccount
partner_id = 'test-partner-id'
secret = 'test-secret'
class YouTubeAccountFactory(DjangoModelFactory):
FACTORY_FOR = externalsites.models.YouTubeAccount
username = factory.Sequence(lambda n: 'youtube-user-%s' % n)
channel_id = factory.Sequence(lambda n: 'channel-id-%s' % n)
oauth_refresh_token = 'refresh-token'
class CommentFactory(DjangoModelFactory):
FACTORY_FOR = comments.models.Comment
user = factory.SubFactory(UserFactory)
content = "test-content"
submit_date = datetime.datetime(2000, 1, 1)
class YouTubeVideoInfoFactory(Factory):
FACTORY_FOR = google.VideoInfo
channel_id = 'test-channel-id'
title = 'test title'
description = 'test description'
duration = 100
thumbnail_url = 'http://example.com/thumbnail.png'
class SubtitleSetFactory(Factory):
FACTORY_FOR = babelsubs.storage.SubtitleSet
language_code = 'en'
@factory.post_generation
def num_subs(self, create, extracted, **kwargs):
if extracted is None:
extracted = 10
for i in xrange(extracted):
self.append_subtitle(i*1000, i*1000 + 999, "Sub %s" % i)
def bulk_subs(sub_data):
"""Create a bunch of videos/languages/versions
sub_data is a dict of dicts containing the data to create the objects
with:
* sub_data maps video titles to language data
* language data map language codes to a list of version data
* version data is a dict containing kwargs to pass to
pipeline.create_subtitles().
returns a tuple of dicts:
* a dict that maps video titles to videos
* a dict that maps (title, language_code) to languages
* a dict that maps (title, language_code, version_number) to versions
"""
videos = {}
langs = {}
versions = {}
for video_title, language_data in sub_data.items():
video = VideoFactory(title=video_title)
videos[video_title] = video
for language_code, version_data in language_data.items():
lang = SubtitleLanguageFactory(video=video,
language_code=language_code)
langs[video_title, language_code] = lang
for kwargs in version_data:
v = pipeline.add_subtitles(video, language_code, None,
**kwargs)
versions[video_title, language_code, v.version_number] = v
return videos, langs, versions
def make_version(video, language_code, subtitle_set=None,
subtitles_complete=True,
**kwargs):
"""Make a version without going through the workflow logic."""
language, _ = subtitles.models.SubtitleLanguage.objects.get_or_create(
video=video, language_code=language_code)
if subtitles_complete != language.subtitles_complete:
language.subtitles_complete = subtitles_complete
language.save()
if subtitle_set is None:
subtitle_set = SubtitleSetFactory()
return language.add_version(subtitles=subtitle_set, **kwargs)
__all__ = ['bulk_subs', 'make_version']
__all__.extend(name for name in globals() if 'Factory' in name)
| agpl-3.0 | 6,238,545,263,500,363,000 | 34.723926 | 80 | 0.640277 | false |
Stanford-Online/edx-analytics-dashboard | analytics_dashboard/courses/urls.py | 1 | 7466 | # pylint: disable=no-value-for-parameter
from django.conf import settings
from django.conf.urls import url, include
from courses import views
from courses.views import (
course_summaries,
csv,
enrollment,
engagement,
performance,
learners,
)
CONTENT_ID_PATTERN = r'(?P<content_id>(?:i4x://?[^/]+/[^/]+/[^/]+/[^@]+(?:@[^/]+)?)|(?:[^/]+))'
PROBLEM_PART_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'problem_part_id')
ASSIGNMENT_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'assignment_id')
PROBLEM_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'problem_id')
SECTION_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'section_id')
SUBSECTION_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'subsection_id')
VIDEO_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'video_id')
PIPELINE_VIDEO_ID = r'(?P<pipeline_video_id>([^/+]+[/+][^/+]+[/+][^/]+)+[|]((?:i4x://?[^/]+/[^/]+/[^/]+' \
r'/[^@]+(?:@[^/]+)?)|(?:[^/]+)+))'
TAG_VALUE_ID_PATTERN = r'(?P<tag_value>[\w-]+)'
answer_distribution_regex = \
r'^graded_content/assignments/{assignment_id}/problems/{problem_id}/parts/{part_id}/answer_distribution/$'.format(
assignment_id=ASSIGNMENT_ID_PATTERN, problem_id=PROBLEM_ID_PATTERN, part_id=PROBLEM_PART_ID_PATTERN)
ungraded_answer_distribution_regex = \
r'^ungraded_content/sections/{}/subsections/{}/problems/{}/parts/{}/answer_distribution/$'.format(
SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN, PROBLEM_ID_PATTERN, PROBLEM_PART_ID_PATTERN)
video_timeline_regex = \
r'^videos/sections/{}/subsections/{}/modules/{}/timeline/$'.format(
SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN, VIDEO_ID_PATTERN)
ENROLLMENT_URLS = ([
url(r'^activity/$', enrollment.EnrollmentActivityView.as_view(), name='activity'),
url(r'^geography/$', enrollment.EnrollmentGeographyView.as_view(), name='geography'),
url(r'^demographics/age/$', enrollment.EnrollmentDemographicsAgeView.as_view(), name='demographics_age'),
url(r'^demographics/education/$', enrollment.EnrollmentDemographicsEducationView.as_view(),
name='demographics_education'),
url(r'^demographics/gender/$', enrollment.EnrollmentDemographicsGenderView.as_view(), name='demographics_gender'),
], 'enrollment')
ENGAGEMENT_URLS = ([
url(r'^content/$', engagement.EngagementContentView.as_view(), name='content'),
url(r'^videos/$', engagement.EngagementVideoCourse.as_view(), name='videos'),
# ordering of the URLS is important for routing the the section, subsection, etc. correctly
url(video_timeline_regex, engagement.EngagementVideoTimeline.as_view(), name='video_timeline'),
url(r'^videos/sections/{}/subsections/{}/$'.format(SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN),
engagement.EngagementVideoSubsection.as_view(),
name='video_subsection'),
url(r'^videos/sections/{}/$'.format(SECTION_ID_PATTERN),
engagement.EngagementVideoSection.as_view(),
name='video_section'),
], 'engagement')
PERFORMANCE_URLS = ([
url(r'^ungraded_content/$', performance.PerformanceUngradedContent.as_view(), name='ungraded_content'),
url(ungraded_answer_distribution_regex, performance.PerformanceUngradedAnswerDistribution.as_view(),
name='ungraded_answer_distribution'),
url(r'^ungraded_content/sections/{}/subsections/{}/$'.format(SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN),
performance.PerformanceUngradedSubsection.as_view(),
name='ungraded_subsection'),
url(r'^ungraded_content/sections/{}/$'.format(SECTION_ID_PATTERN),
performance.PerformanceUngradedSection.as_view(),
name='ungraded_section'),
url(r'^graded_content/$', performance.PerformanceGradedContent.as_view(), name='graded_content'),
url(r'^graded_content/(?P<assignment_type>[\w-]+)/$',
performance.PerformanceGradedContentByType.as_view(),
name='graded_content_by_type'),
url(answer_distribution_regex, performance.PerformanceAnswerDistributionView.as_view(), name='answer_distribution'),
# This MUST come AFTER the answer distribution pattern; otherwise, the answer distribution pattern
# will be interpreted as an assignment pattern.
url(r'^graded_content/assignments/{}/$'.format(ASSIGNMENT_ID_PATTERN),
performance.PerformanceAssignment.as_view(),
name='assignment'),
url(r'^learning_outcomes/$',
performance.PerformanceLearningOutcomesContent.as_view(),
name='learning_outcomes'),
url(r'^learning_outcomes/{}/$'.format(TAG_VALUE_ID_PATTERN),
performance.PerformanceLearningOutcomesSection.as_view(),
name='learning_outcomes_section'),
url(r'^learning_outcomes/{}/problems/{}/$'.format(TAG_VALUE_ID_PATTERN, PROBLEM_ID_PATTERN),
performance.PerformanceLearningOutcomesAnswersDistribution.as_view(),
name='learning_outcomes_answers_distribution'),
url(r'^learning_outcomes/{}/problems/{}/{}/$'.format(TAG_VALUE_ID_PATTERN, PROBLEM_ID_PATTERN,
PROBLEM_PART_ID_PATTERN),
performance.PerformanceLearningOutcomesAnswersDistribution.as_view(),
name='learning_outcomes_answers_distribution_with_part'),
], 'performance')
CSV_URLS = ([
url(r'^enrollment/$', csv.CourseEnrollmentCSV.as_view(), name='enrollment'),
url(r'^enrollment/geography/$', csv.CourseEnrollmentByCountryCSV.as_view(), name='enrollment_geography'),
url(r'^enrollment/demographics/age/$',
csv.CourseEnrollmentDemographicsAgeCSV.as_view(),
name='enrollment_demographics_age'),
url(r'^enrollment/demographics/education/$',
csv.CourseEnrollmentDemographicsEducationCSV.as_view(),
name='enrollment_demographics_education'),
url(r'^enrollment/demographics/gender/$',
csv.CourseEnrollmentDemographicsGenderCSV.as_view(),
name='enrollment_demographics_gender'),
url(r'^engagement/activity_trend/$',
csv.CourseEngagementActivityTrendCSV.as_view(),
name='engagement_activity_trend'),
url(r'^engagement/videos/{}/$'.format(PIPELINE_VIDEO_ID),
csv.CourseEngagementVideoTimelineCSV.as_view(),
name='engagement_video_timeline'),
url(r'^performance/graded_content/problems/{}/answer_distribution/{}/$'.format(CONTENT_ID_PATTERN,
PROBLEM_PART_ID_PATTERN),
csv.PerformanceAnswerDistributionCSV.as_view(),
name='performance_answer_distribution'),
url(r'problem_responses/', csv.PerformanceProblemResponseCSV.as_view(), name='performance_problem_responses')
], 'csv')
LEARNER_URLS = ([
url(r'^$', learners.LearnersView.as_view(), name='learners'),
], 'learners')
COURSE_URLS = [
# Course homepage. This should be the entry point for other applications linking to the course.
url(r'^$', views.CourseHome.as_view(), name='home'),
url(r'^enrollment/', include(ENROLLMENT_URLS)),
url(r'^engagement/', include(ENGAGEMENT_URLS)),
url(r'^performance/', include(PERFORMANCE_URLS)),
url(r'^csv/', include(CSV_URLS)),
url(r'^learners/', include(LEARNER_URLS)),
]
app_name = 'courses'
urlpatterns = [
url('^$', course_summaries.CourseIndex.as_view(), name='index'),
url(r'^{}/'.format(settings.COURSE_ID_PATTERN), include(COURSE_URLS)),
url(r'csv/course_list/$', course_summaries.CourseIndexCSV.as_view(), name='index_csv')
]
| agpl-3.0 | 7,305,463,706,066,446,000 | 52.328571 | 120 | 0.682561 | false |
nojhan/ereshkigal | tunnelmon.py | 1 | 26267 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Ereshkigal is an AutoSSH tunnel monitor
# It gives a curses user interface to monitor existing SSH tunnel that are managed with autossh.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author : nojhan <[email protected]>
#
#################################################################################################
# CORE
#################################################################################################
import os
import subprocess
import logging
import psutil
import socket
import re
import collections
class Tunnel:
def __init__(self, ssh_pid = None, in_port = None, via_host = None, target_host = None, out_port = None):
# assert(ssh_pid != None)
self.ssh_pid = ssh_pid
assert(in_port!=None)
self.in_port = in_port
assert(via_host!=None)
self.via_host = via_host
assert(target_host!=None)
self.target_host = target_host
assert(out_port!=None)
self.out_port = out_port
self.connections = []
def repr_tunnel(self):
return "%i\t%i\t%s\t%s\t%i" % (
self.ssh_pid,
self.in_port,
self.via_host,
self.target_host,
self.out_port)
def repr_connections(self):
# list of tunnels linked to this process
rep = ""
for c in self.connections:
rep += "\n\t↳ %s" % c
return rep
def __repr__(self):
return self.repr_tunnel() + self.repr_connections()
class AutoTunnel(Tunnel):
def __init__(self, autossh_pid = None, *args, **kwargs):
super().__init__(*args, **kwargs)
assert(autossh_pid!=None)
self.autossh_pid = autossh_pid
def repr_tunnel(self):
rep = super().repr_tunnel()
return "auto\t" + rep
class RawTunnel(Tunnel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def repr_tunnel(self):
rep = super().repr_tunnel()
return "ssh\t" + rep
class Connection:
"""A dictionary that stores an SSH connection related to a tunnel"""
def __init__(self, local_address = None, in_port = None, foreign_address = None, out_port = None,
status = None, family = None ):
# informations available with netstat
assert(local_address!=None)
self.local_address = local_address
assert(in_port!=None)
self.in_port = in_port
self.foreign_address = foreign_address
self.out_port = out_port
assert(status!=None)
self.status = status
assert(family!=None)
self.family = family
self.family_rep = {socket.AddressFamily.AF_INET:"INET", socket.AddressFamily.AF_INET6:"INET6", socket.AddressFamily.AF_UNIX:"UNIX"}
# FIXME would be nice to have an estimation of the connections latency
#self.latency = 0
def __repr__(self):
# do not logging.debug all the informations by default
if self.foreign_address and self.out_port:
return "%s\t%s\t%s:%i → %s:%i" % (
self.family_rep[self.family],
self.status,
self.local_address,
self.in_port,
self.foreign_address,
self.out_port,
)
else:
return "%s\t%s\t%s:%i" % (
self.family_rep[self.family],
self.status,
self.local_address,
self.in_port,
)
class TunnelsParser:
def __init__(self):
"""Warning: the initialization does not gather tunnels informations, use update() to do so"""
# { ssh_pid : Tunnel }
self.tunnels = collections.OrderedDict()
# do not perform update by default
# this is necessary because one may want
# only a list of connections OR autossh processes
#self.update()
self.re_forwarding = re.compile(r"-L(\d+):(.+):(\d+)")
self.header = 'TYPE\tSSH_PID\tIN_PORT\tVIA_HOST\tTARGET_HOST\tOUT_PORT'
def get_tunnel(self, pos):
pid = list(self.tunnels.keys())[pos]
return self.tunnels[pid]
def parse(self, cmd):
cmdline = " ".join(cmd)
logging.debug('autossh cmd line:', cmdline)
logging.debug('forwarding regexp:', self.re_forwarding)
match = self.re_forwarding.findall(cmdline)
logging.debug(match)
if match:
assert(len(match)==1)
in_port, target_host, out_port = match[0]
logging.debug("matches: ", match)
# Find the hostname on wich the tunnel is built.
via_host = "unknown"
# Search backward and take the first parameter argument.
# FIXME this is an ugly hack
for i in range( len(cmd)-1,0,-1 ):
if cmd[i][0] != '-':
via_host = cmd[i]
break
return (int(in_port), via_host, target_host, int(out_port))
def update(self):
"""Gather and parse informations from the operating system"""
self.tunnels.clear()
# Browse the SSH processes handling a tunnel.
for proc in psutil.process_iter():
try:
process = proc.as_dict(attrs=['pid','ppid','name','cmdline','connections'])
cmd = process['cmdline']
except psutil.NoSuchProcess:
pass
else:
if process['name'] == 'ssh':
logging.debug(process)
in_port, via_host, target_host, out_port = self.parse(cmd)
logging.debug(in_port, via_host, target_host, out_port)
# Check if this ssh tunnel is managed by autossh.
parent = psutil.Process(process['ppid'])
if parent.name() == 'autossh':
# Add an autossh tunnel.
pid = parent.pid # autossh pid
self.tunnels[pid] = AutoTunnel(pid, process['pid'], in_port, via_host, target_host, out_port )
else:
# Add a raw tunnel.
pid = process['pid']
self.tunnels[pid] = RawTunnel(pid, in_port, via_host, target_host, out_port )
for c in process['connections']:
logging.debug(c)
laddr,lport = c.laddr
if c.raddr:
raddr,rport = c.raddr
else:
raddr,rport = (None,None)
connection = Connection(laddr,lport,raddr,rport,c.status,c.family)
logging.debug(connection)
self.tunnels[pid].connections.append(connection)
logging.debug(self.tunnels)
def __repr__(self):
reps = [self.header]
for t in self.tunnels:
reps.append(str(self.tunnels[t]))
return "\n".join(reps)
#################################################################################################
# INTERFACES
#################################################################################################
import curses
import time
import signal
class CursesMonitor:
"""Textual user interface to display up-to-date informations about current tunnels"""
def __init__(self, scr):
# curses screen
self.scr = scr
# tunnels monitor
self.tp = TunnelsParser()
# selected line
self.cur_line = -1
# selected pid
self.cur_pid = -1
# switch to show only autoss processes (False) or ssh connections also (True)
self.show_connections = False
# FIXME pass as parameters+options
self.update_delay = 1 # seconds of delay between two data updates
self.ui_delay = 0.05 # seconds between two screen update
# colors
# FIXME different colors for different types of tunnels (auto or raw)
self.colors_tunnel = {'kind_auto':4, 'kind_raw':5, 'ssh_pid':0, 'in_port':3, 'via_host':2, 'target_host':2, 'out_port':3, 'tunnels_nb':4, 'tunnels_nb_none':1}
self.colors_highlight = {'kind_auto':9, 'kind_raw':9, 'ssh_pid':9, 'in_port':9, 'via_host':9, 'target_host':9, 'out_port':9, 'tunnels_nb':9, 'tunnels_nb_none':9}
self.colors_connection = {'ssh_pid':0, 'autossh_pid':0, 'status':4, 'status_out':1, 'local_address':2, 'in_port':3, 'foreign_address':2, 'out_port':3}
self.header = ("TYPE","SSHPID","INPORT","VIA","TARGET","OUTPORT")
def do_Q(self):
"""Quit"""
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("Key pushed: Q")
return False
def do_R(self):
"""Reload autossh tunnel"""
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("Key pushed: R")
# if a pid is selected
if self.cur_pid != -1:
# send the SIGUSR1 signal
if type(self.tp.get_tunnel(self.cur_line)) == AutoTunnel:
# autossh performs a reload of existing tunnels that it manages
logging.debug("SIGUSR1 on PID: %i" % self.cur_pid)
os.kill( self.cur_pid, signal.SIGUSR1 )
else:
logging.debug("Cannot reload a RAW tunnel")
return True
def do_C(self):
"""Close tunnel"""
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("Key pushed: C")
if self.cur_pid != -1:
# send a SIGKILL
# the related process is stopped
# FIXME SIGTERM or SIGKILL ?
tunnel = self.tp.get_tunnel(self.cur_line)
if type(tunnel) == AutoTunnel:
logging.debug("SIGKILL on autossh PID: %i" % self.cur_pid)
try:
os.kill( self.cur_pid, signal.SIGKILL )
except OSError:
logging.error("No such process: %i" % self.cur_pid)
logging.debug("SIGKILL on ssh PID: %i" % tunnel.ssh_pid)
try:
os.kill( tunnel.ssh_pid, signal.SIGKILL )
except OSError:
logging.error("No such process: %i" % tunnel.ssh_pid)
self.cur_line = -1
self.cur_pid = -1
# FIXME update cur_pid or get rid of it everywhere
return True
def do_N(self):
"""Show connections"""
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("Key pushed: N")
self.show_connections = not self.show_connections
return True
def do_258(self):
"""Move down"""
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("Key pushed: down")
# if not the end of the list
if self.cur_line < len(self.tp.tunnels)-1:
self.cur_line += 1
# get the pid
if type(self.tp.get_tunnel(self.cur_line)) == AutoTunnel:
self.cur_pid = self.tp.get_tunnel(self.cur_line).autossh_pid
else:
self.cur_pid = self.tp.get_tunnel(self.cur_line).ssh_pid
return True
def do_259(self):
"""Move up"""
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("Key pushed: up")
if self.cur_line > -1:
self.cur_line -= 1
if self.cur_line > 0:
self.cur_pid = self.tp.get_tunnel(self.cur_line).pid
return True
def __call__(self):
"""Start the interface"""
self.scr.clear() # clear all
self.scr.nodelay(1) # non-bloking getch
# first display
self.display()
# first update counter
self.last_update = time.clock()
self.last_state = None
self.log_ticks = ""
# infinite loop
notquit = True
while(notquit):
# wait some time
# necessary to not overload the system with unnecessary calls
time.sleep( self.ui_delay )
# if its time to update
if time.time() > self.last_update + self.update_delay:
self.tp.update()
# reset the counter
self.last_update = time.time()
state = "%s" % self.tp
if state != self.last_state:
logging.debug("Waited: %s" % self.log_ticks)
self.log_ticks = ""
logging.debug("----- Time of screen update: %s -----" % time.time())
logging.debug("State of tunnels:\n%s" % self.tp)
self.last_state = state
else:
self.log_ticks += "."
kc = self.scr.getch() # keycode
if kc != -1: # if keypress
pass
ch = chr(0)
if 0 < kc < 256: # if ascii key
# ascii character from the keycode
ch = chr(kc)
# Call the do_* handler.
fch = "do_%s" % ch.capitalize()
fkc = "do_%i" % kc
logging.debug("key func: %s / %s" % (fch,fkc))
if fch in dir(self):
notquit = eval("self."+fch+"()")
elif fkc in dir(self):
notquit = eval("self."+fkc+"()")
logging.debug("notquit = %s" % notquit)
# update the display
self.display()
# force a screen refresh
self.scr.refresh()
# end of the loop
def format(self):
reps = [self.tp.tunnels[t].repr_tunnel() for t in self.tp.tunnels]
tuns = [t.split() for t in reps]
tuns.append(self.header)
logging.debug(tuns)
cols = zip(*tuns)
widths = [max(len(s) for s in col) for col in cols]
logging.debug(widths)
fmt = ['{{: <{}}}'.format(w) for w in widths]
logging.debug(fmt)
return fmt
def display(self):
"""Generate the interface screen"""
# Automagically format help line with available do_* handlers.
h = []
for f in dir(self):
if "do_" in f:
key = f.replace("do_","")
if key.isalpha(): # We do not want arrows.
msg = "[%s] %s" % (key,eval("self.%s.__doc__" % f))
h.append(msg)
help_msg = ", ".join(h)
help_msg += "\n"
self.scr.addstr(0,0, help_msg, curses.color_pair(4) )
self.scr.clrtoeol()
# Second line
self.scr.addstr( "Active tunnels: ", curses.color_pair(6) )
self.scr.addstr( str( len(self.tp.tunnels) ), curses.color_pair(1) )
self.scr.addstr( " / Active connections: ", curses.color_pair(6) )
self.scr.addstr( str( sum([len(self.tp.tunnels[t].connections) for t in self.tp.tunnels]) ), curses.color_pair(1) )
self.scr.addstr( '\n', curses.color_pair(1) )
self.scr.clrtoeol()
# if no line is selected
color = 0
if self.cur_line==-1:
# selected color for the header
color = 9
self.cur_pid = -1
# header line
# header_msg = "TYPE\tINPORT\tVIA \tTARGET \tOUTPORT"
# if os.geteuid() == 0:
header_msg = " ".join(self.format()).format(*self.header)
header_msg += " CONNECTIONS"
self.scr.addstr( header_msg, curses.color_pair(color) )
self.scr.clrtoeol()
# for each tunnel processes available in the monitor
for l in range(len(self.tp.tunnels)):
# add a line for the l-th autossh process
self.add_tunnel( l )
# if one want to show connections
if self.show_connections:# and os.getuid() == 0:
self.add_connection( l )
self.scr.clrtobot()
def add_connection(self, line ):
"""Add lines for each connections related to the l-th autossh process"""
colors = self.colors_connection
# for each connections related to te line-th autossh process
for t in sorted(self.tp.get_tunnel(line).connections, key=lambda c:c.status):
# FIXME fail if the screen's height is too small.
self.scr.addstr( '\n\t+ ' )
color = self.colors_connection['status']
# if the connections is established
# TODO avoid hard-coded constants
if t.status != 'ESTABLISHED' and t.status != 'LISTEN':
color = self.colors_connection['status_out']
self.scr.addstr( t.status, curses.color_pair( color ) )
self.scr.addstr( '\t' )
# self.scr.addstr( str( t['ssh_pid'] ), curses.color_pair(colors['ssh_pid'] ) )
# self.scr.addstr( '\t' )
self.scr.addstr( str( t.local_address ) , curses.color_pair(colors['local_address'] ))
self.scr.addstr( ':' )
self.scr.addstr( str( t.in_port ) , curses.color_pair(colors['in_port'] ))
if t.foreign_address and t.out_port:
self.scr.addstr( ' -> ' )
self.scr.addstr( str( t.foreign_address ) , curses.color_pair(colors['foreign_address'] ))
self.scr.addstr( ':' )
self.scr.addstr( str( t.out_port ) , curses.color_pair(colors['out_port'] ))
self.scr.clrtoeol()
def add_tunnel(self, line):
"""Add line corresponding to the line-th autossh process"""
self.scr.addstr( '\n' )
colors = self.colors_tunnel
if self.cur_line == line:
colors = self.colors_highlight
if type(self.tp.get_tunnel(line)) == AutoTunnel:
self.scr.addstr( self.format()[0].format('auto'), curses.color_pair(colors['kind_auto']) )
self.scr.addstr( ' ', curses.color_pair(colors['kind_auto']) )
else:
self.scr.addstr( self.format()[0].format('ssh'), curses.color_pair(colors['kind_raw']) )
self.scr.addstr( ' ', curses.color_pair(colors['kind_raw']) )
# self.add_tunnel_info('ssh_pid', line)
self.add_tunnel_info('ssh_pid', line, 1)
self.add_tunnel_info('in_port', line, 2)
self.add_tunnel_info('via_host', line, 3)
self.add_tunnel_info('target_host', line, 4)
self.add_tunnel_info('out_port', line, 5)
nb = len(self.tp.get_tunnel(line).connections )
if nb > 0:
# for each connection related to this process
for i in self.tp.get_tunnel(line).connections:
# add a vertical bar |
# the color change according to the status of the connection
if i.status == 'ESTABLISHED' or i.status == 'LISTEN':
self.scr.addstr( '|', curses.color_pair(self.colors_connection['status']) )
else:
self.scr.addstr( '|', curses.color_pair(self.colors_connection['status_out']) )
else:
# if os.geteuid() == 0:
# if there is no connection, display a "None"
self.scr.addstr( 'None', curses.color_pair(self.colors_tunnel['tunnels_nb_none']) )
self.scr.clrtoeol()
def add_tunnel_info( self, key, line, col ):
"""Add an information of an autossh process, in the configured color"""
colors = self.colors_tunnel
# if the line is selected
if self.cur_line == line:
# set the color to the highlight one
colors = self.colors_highlight
txt = eval("str(self.tp.get_tunnel(line).%s)" % key)
if key == 'target_host' or key == 'via_host':
txt = eval("str(self.tp.get_tunnel(line).%s)" % key)
self.scr.addstr(self.format()[col].format(txt), curses.color_pair(colors[key]) )
self.scr.addstr( ' ', curses.color_pair(colors[key]) )
if __name__ == "__main__":
import sys
from optparse import OptionParser
import configparser
usage = """%prog [options]
A user interface to monitor existing SSH tunnel that are managed with autossh.
Called without options, ereshkigal displays a list of tunnels on the standard output.
Note: Users other than root will not see tunnels connections.
Version 0.3"""
parser = OptionParser(usage=usage)
parser.add_option("-c", "--curses",
action="store_true", default=False,
help="Start the user interface in text mode.")
parser.add_option("-n", "--connections",
action="store_true", default=False,
help="Display only SSH connections related to a tunnel.")
parser.add_option("-u", "--tunnels",
action="store_true", default=False,
help="Display only the list of tunnels processes.")
LOG_LEVELS = {'error' : logging.ERROR,
'warning' : logging.WARNING,
'debug' : logging.DEBUG}
parser.add_option('-l', '--log-level', choices=list(LOG_LEVELS), default='error', metavar='LEVEL',
help='Log level (%s), default: %s.' % (", ".join(LOG_LEVELS), 'error') )
parser.add_option('-g', '--log-file', default=None, metavar='FILE',
help="Log to this file, default to standard output. \
If you use the curses interface, you may want to set this to actually see logs.")
parser.add_option('-f', '--config-file', default=None, metavar='FILE',
help="Use this configuration file (default: '~/.ereshkigal.conf')")
(asked_for, args) = parser.parse_args()
logmsg = "----- Started Ereshkigal -----"
if asked_for.log_file:
logfile = asked_for.log_file
logging.basicConfig(filename=logfile, level=LOG_LEVELS[asked_for.log_level])
logging.debug(logmsg)
logging.debug("Log in %s" % logfile)
else:
if asked_for.curses:
logging.warning("It's a bad idea to log to stdout while in the curses interface.")
logging.basicConfig(level=LOG_LEVELS[asked_for.log_level])
logging.debug(logmsg)
logging.debug("Log to stdout")
logging.debug("Asked for: %s" % asked_for)
# unfortunately, asked_for class has no __len__ method in python 2.4.3 (bug?)
#if len(asked_for) > 1:
# parser.error("asked_for are mutually exclusive")
config = configparser.ConfigParser()
if asked_for.config_file:
try:
config.read(asked_for.config_file)
except configparser.MissingSectionHeaderError:
logging.error("'%s' contains no known configuration" % asked_for.config_file)
else:
try:
config.read('~/.ereshkigal.conf')
except configparser.MissingSectionHeaderError:
logging.error("'%s' contains no known configuration" % asked_for.config_file)
# Load autossh instances by sections: [expected]
# if config['expected']:
if asked_for.curses:
logging.debug("Entering curses mode")
import curses
import traceback
try:
scr = curses.initscr()
curses.start_color()
# 0:black, 1:red, 2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, 7:white
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK)
curses.init_pair(3, curses.COLOR_YELLOW, curses.COLOR_BLACK)
curses.init_pair(4, curses.COLOR_BLUE, curses.COLOR_BLACK)
curses.init_pair(5, curses.COLOR_MAGENTA, curses.COLOR_BLACK)
curses.init_pair(6, curses.COLOR_CYAN, curses.COLOR_BLACK)
curses.init_pair(7, curses.COLOR_WHITE, curses.COLOR_BLACK)
curses.init_pair(8, curses.COLOR_WHITE, curses.COLOR_GREEN)
curses.init_pair(9, curses.COLOR_WHITE, curses.COLOR_BLUE)
curses.noecho()
curses.cbreak()
scr.keypad(1)
# create the monitor
mc = CursesMonitor( scr )
# call the monitor
mc()
scr.keypad(0)
curses.echo()
curses.nocbreak()
curses.endwin()
except:
# end cleanly
scr.keypad(0)
curses.echo()
curses.nocbreak()
curses.endwin()
# print the traceback
traceback.print_exc()
elif asked_for.connections:
logging.debug("Entering connections mode")
tp = TunnelsParser()
tp.update()
# do not call update() but only get connections
logging.debug("UID: %i." % os.geteuid())
# if os.geteuid() == 0:
for t in tp.tunnels:
for c in tp.tunnels[t].connections:
print(tp.tunnels[t].ssh_pid, c)
# else:
# logging.error("Only root can see SSH tunnels connections.")
elif asked_for.tunnels:
logging.debug("Entering tunnel mode")
tp = TunnelsParser()
tp.update()
# do not call update() bu only get autossh processes
print(tp.header)
for t in tp.tunnels:
print(tp.tunnels[t].repr_tunnel())
else:
logging.debug("Entering default mode")
tp = TunnelsParser()
# call update
tp.update()
# call the default __repr__
print(tp)
#
# In Mesopotamian mythology, Ereshkigal (lit. "great lady under earth")
# was the goddess of Irkalla, the land of the dead or underworld.
#
# Thus, she knows a lot about tunnels...
#
# http://en.wikipedia.org/wiki/Ereshkigal
#
| gpl-3.0 | -8,461,380,079,873,353,000 | 33.924202 | 169 | 0.545025 | false |
greatguy45/webcrawler | selenium_dropdown_control.py | 1 | 1594 | from selenium import webdriver
import requests
from bs4 import BeautifulSoup
page = requests.get("https://www.se.gob.ar/datosupstream/graf_prod_x_pozo.php?gas=1&ejecutar=1&vienede=&idpozo=")
soup = BeautifulSoup(page.content, 'html.parser')
#get all the year options
year=soup.find("select",{"name":"anio"})
year_options = year.find_all("option")
options1=[y.text for y in year_options]
year_values = [o.get("value") for o in year_options]
print ("list of all year available\n")
for x in range(1,len(options1)):
print (options1[x], year_values[x])
#get all the province options
province=soup.find("select",{"name":"provincia"})
province_options = province.find_all("option")
options2=[y.text for y in province_options]
province_values = [o.get("value") for o in province_options]
print ("list of all province available\n")
for x in range(1,len(options2)):
print (options2[x], province_values[x])
#get all the yacimiento options
yacimiento=soup.find("select",{"name":"yacimiento"})
yacimiento_options = yacimiento.find_all("option")
options3=[y.text for y in yacimiento_options]
yacimiento_values = [o.get("value") for o in yacimiento_options]
print ("list of all formation available\n")
for x in range(1,len(options3)):
print (options3[x], yacimiento_values[x])
#get all the pozo options
pozo=soup.find("select",{"name":"pozo"})
pozo_options = pozo.find_all("option")
options4=[y.text for y in pozo_options]
pozo_values = [o.get("value") for o in pozo_options]
print ("list of all pozo available\n")
for x in range(1,len(options4)):
print (options4[x], pozo_values[x])
| gpl-3.0 | -3,977,733,957,514,390,500 | 33.652174 | 113 | 0.723965 | false |
chemelnucfin/tensorflow | tensorflow/python/framework/ops_test.py | 1 | 129521 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import gc
import numpy as np
import os
import threading
import weakref
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.client import session
from tensorflow.python.compat import compat as forward_compat
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as eager_function
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function
from tensorflow.python.framework import indexed_slices
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.framework import type_spec
from tensorflow.python.framework import versions
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import special_math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
import tensorflow.python.ops.gradients # pylint: disable=unused-import
from tensorflow.python.platform import googletest
from tensorflow.python.util import compat
ops._set_call_cpp_shape_fn(common_shapes.call_cpp_shape_fn)
class ResourceTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testBuildGraph(self):
with self.cached_session():
pt = test_ops.stub_resource_handle_op(container="a", shared_name="b")
test_ops.resource_create_op(pt).run()
@test_util.run_deprecated_v1
def testInitialize(self):
with self.cached_session():
handle = test_ops.stub_resource_handle_op(container="a", shared_name="b")
resources.register_resource(
handle=handle,
create_op=test_ops.resource_create_op(handle),
is_initialized_op=test_ops.resource_initialized_op(handle))
self.assertEquals(
len(
resources.report_uninitialized_resources(
resources.shared_resources()).eval()), 1)
resources.initialize_resources(resources.shared_resources()).run()
self.assertEquals(
len(
resources.report_uninitialized_resources(
resources.shared_resources()).eval()), 0)
class TensorAndShapeTest(test_util.TensorFlowTestCase):
def testShape(self):
op = ops.Operation(
ops._NodeDef("FloatOutput", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
self.assertEqual(tensor_shape.unknown_shape(), t.get_shape())
t.set_shape([1, 2, 3])
self.assertEqual([1, 2, 3], t.get_shape())
def testIterable(self):
if not context.executing_eagerly():
self.skipTest("Eager-mode test")
op = ops.Operation(
ops._NodeDef("FloatOutput", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
with self.assertRaisesRegexp(TypeError, "Cannot iterate"):
next(iter(t))
def testIterableGraph(self):
if context.executing_eagerly():
self.skipTest("Graph-mode test")
op = ops.Operation(
ops._NodeDef("FloatOutput", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
with self.assertRaisesRegexp(TypeError, "iterating.*not allowed in Graph"):
next(iter(t))
with self.assertRaisesRegexp(
TypeError, "iterating.*AutoGraph did not convert"):
with ag_ctx.ControlStatusCtx(ag_ctx.Status.ENABLED):
next(iter(t))
with self.assertRaisesRegexp(
TypeError, "iterating.*AutoGraph is disabled"):
with ag_ctx.ControlStatusCtx(ag_ctx.Status.DISABLED):
next(iter(t))
def testImplicitBool(self):
op = ops.Operation(
ops._NodeDef("FloatOutput", "myop"), ops.Graph(), [], [dtypes.bool])
t = op.outputs[0]
with self.assertRaisesRegexp(
TypeError, "using.*as a.*bool.*not allowed in Graph"):
bool(t)
with self.assertRaisesRegexp(
TypeError, "using.*as a.*bool.*AutoGraph did not convert"):
with ag_ctx.ControlStatusCtx(ag_ctx.Status.ENABLED):
bool(t)
with self.assertRaisesRegexp(
TypeError, "using.*as a.*bool.*AutoGraph is disabled"):
with ag_ctx.ControlStatusCtx(ag_ctx.Status.DISABLED):
bool(t)
def testAddShape(self):
with self.cached_session():
a = array_ops.zeros([2, 3])
b = array_ops.ones([1, 3])
c = a + b
self.assertEqual([2, 3], c.shape)
@test_util.run_deprecated_v1
def testUnknownDim(self):
with self.cached_session():
a = array_ops.placeholder(dtype=dtypes.float32, shape=[2, None, 3])
b = array_ops.placeholder(dtype=dtypes.float32, shape=[2, None, 3])
c = a + b
self.assertEqual([2, None, 3], c.shape.as_list())
@test_util.run_deprecated_v1
def testUnknownShape(self):
with self.cached_session():
a = array_ops.placeholder(dtype=dtypes.float32, shape=None)
b = array_ops.ones([1, 3])
c = a + b
self.assertEqual(tensor_shape.unknown_shape(), c.shape)
@test_util.run_deprecated_v1
def testScalarShape(self):
with self.cached_session():
a = array_ops.placeholder(dtype=dtypes.float32, shape=[])
b = array_ops.ones([])
c = a + b
self.assertEqual(tensor_shape.TensorShape([]), c.shape)
@test_util.run_deprecated_v1
def testShapeFunctionError(self):
with self.cached_session():
a = array_ops.ones([1, 2, 3])
b = array_ops.ones([4, 5, 6])
with self.assertRaisesRegexp(
ValueError, r"Dimensions must be equal, but are 2 and 5 for 'add' "
r"\(op: 'Add(V2)?'\) with input shapes: \[1,2,3\], \[4,5,6\]."):
_ = a + b
def testNumpyArray(self):
with ops.Graph().as_default():
x = array_ops.ones((3, 4), name="test_ones")
with self.assertRaisesRegexp(NotImplementedError,
r"Cannot convert a symbolic.+test_ones"):
np.array(x)
with self.assertRaisesRegexp(TypeError, "not well defined.+test_ones"):
len(x)
# EagerTensors should still behave as numpy arrays.
with context.eager_mode():
x = array_ops.ones((3, 4))
self.assertAllEqual(x, np.ones((3, 4)))
self.assertAllEqual(np.array(x), np.ones((3, 4)))
self.assertEqual(len(x), 3)
def testRef(self):
x1 = constant_op.constant(3)
x2 = x1
y = constant_op.constant(3)
z = constant_op.constant([6, 10])
w = variables.Variable(5)
self.assertEqual(x1.experimental_ref(), x1.experimental_ref())
self.assertEqual(x2.experimental_ref(), x2.experimental_ref())
self.assertEqual(x1.experimental_ref(), x2.experimental_ref())
self.assertEqual(y.experimental_ref(), y.experimental_ref())
self.assertEqual(z.experimental_ref(), z.experimental_ref())
self.assertEqual(w.experimental_ref(), w.experimental_ref())
self.assertNotEqual(x1.experimental_ref(), y.experimental_ref())
self.assertNotEqual(x1.experimental_ref(), z.experimental_ref())
self.assertNotEqual(x1.experimental_ref(), w.experimental_ref())
self.assertNotEqual(y.experimental_ref(), z.experimental_ref())
self.assertNotEqual(y.experimental_ref(), w.experimental_ref())
self.assertNotEqual(z.experimental_ref(), w.experimental_ref())
def testRefDeref(self):
x1 = constant_op.constant(3)
x2 = x1
y = constant_op.constant(3)
z = constant_op.constant([6, 10])
w = variables.Variable(5)
self.assertIs(x1, x1.experimental_ref().deref())
self.assertIs(x2, x2.experimental_ref().deref())
self.assertIs(x1, x2.experimental_ref().deref())
self.assertIs(x2, x1.experimental_ref().deref())
self.assertIs(y, y.experimental_ref().deref())
self.assertIs(z, z.experimental_ref().deref())
self.assertIsNot(x1, y.experimental_ref().deref())
self.assertIsNot(x1, z.experimental_ref().deref())
self.assertIsNot(x1, w.experimental_ref().deref())
self.assertIsNot(y, z.experimental_ref().deref())
self.assertIsNot(y, w.experimental_ref().deref())
self.assertIsNot(z, w.experimental_ref().deref())
def testRefInSet(self):
x1 = constant_op.constant(3)
x2 = x1
y = constant_op.constant(3)
z = constant_op.constant([6, 10])
w = variables.Variable(5)
self.assertEqual(x1.experimental_ref(), x2.experimental_ref())
tensor_set = {
x1.experimental_ref(),
x2.experimental_ref(),
y.experimental_ref(),
z.experimental_ref(),
w.experimental_ref(),
}
self.assertEqual(len(tensor_set), 4)
self.assertIn(x1.experimental_ref(), tensor_set)
self.assertIn(x2.experimental_ref(), tensor_set)
self.assertIn(y.experimental_ref(), tensor_set)
self.assertIn(z.experimental_ref(), tensor_set)
self.assertIn(w.experimental_ref(), tensor_set)
def testRefInDict(self):
x1 = constant_op.constant(3)
x2 = x1
y = constant_op.constant(3)
z = constant_op.constant([6, 10])
w = variables.Variable(5)
self.assertEqual(x1.experimental_ref(), x2.experimental_ref())
tensor_dict = {
x1.experimental_ref(): "x1",
y.experimental_ref(): "y",
z.experimental_ref(): "z",
w.experimental_ref(): "w",
}
self.assertEqual(len(tensor_dict), 4)
# Overwriting x1
tensor_dict[x2.experimental_ref()] = "x2"
self.assertEqual(len(tensor_dict), 4)
self.assertEqual(tensor_dict[x1.experimental_ref()], "x2")
self.assertEqual(tensor_dict[x2.experimental_ref()], "x2")
self.assertEqual(tensor_dict[y.experimental_ref()], "y")
self.assertEqual(tensor_dict[z.experimental_ref()], "z")
self.assertEqual(tensor_dict[w.experimental_ref()], "w")
def testTensorRefStrong(self):
x = constant_op.constant(1.)
x_ref = x.experimental_ref()
del x
self.assertIsNotNone(x_ref.deref())
def testVariableRefStrong(self):
x = variables.Variable(1.)
x_ref = x.experimental_ref()
del x
self.assertIsNotNone(x_ref.deref())
class IndexedSlicesTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testToTensor(self):
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
dense_shape = constant_op.constant([3, 2])
x = ops.IndexedSlices(values, indices, dense_shape)
tensor = ops.convert_to_tensor(x, name="tensor")
self.assertAllEqual(self.evaluate(tensor), [[2, 3], [0, 0], [5, 7]])
@test_util.run_gpu_only
def testEagerCopy(self):
with context.eager_mode():
var = variables.Variable([[0.0], [0.0], [0.0], [0.0]], name="tensor")
with backprop.GradientTape() as tape:
a = array_ops.gather(array_ops.gather(var, [0, 1]), [0, 1])
b = array_ops.gather(array_ops.gather(var, [2, 3]), [0, 1])
r = special_math_ops.einsum("ij,ij->i", a, b)
g = tape.gradient(r, [var])[0]
values = g.values if isinstance(g, ops.IndexedSlices) else g
self.assertAllEqual(values.get_shape(), [4, 1])
@test_util.run_deprecated_v1
def testNegation(self):
with self.cached_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
x = -ops.IndexedSlices(values, indices)
self.assertAllEqual(x.values.eval(), [[-2, -3], [-5, -7]])
self.assertAllEqual(x.indices.eval(), [0, 2])
@test_util.run_deprecated_v1
def testScalarMul(self):
with self.cached_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
x = math_ops.scalar_mul(-2, ops.IndexedSlices(values, indices))
self.assertAllEqual(x.values.eval(), [[-4, -6], [-10, -14]])
self.assertAllEqual(x.indices.eval(), [0, 2])
@test_util.run_all_in_graph_and_eager_modes
class IndexedSlicesSpecTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def assertAllTensorsEqual(self, list1, list2):
self.assertLen(list1, len(list2))
for (t1, t2) in zip(list1, list2):
self.assertAllEqual(t1, t2)
def testConstruction(self):
spec1 = indexed_slices.IndexedSlicesSpec()
self.assertEqual(spec1._shape.rank, None)
self.assertEqual(spec1._values_dtype, dtypes.float32)
self.assertEqual(spec1._indices_dtype, dtypes.int64)
self.assertEqual(spec1._dense_shape_dtype, None)
self.assertEqual(spec1._indices_shape.as_list(), [None])
spec2 = indexed_slices.IndexedSlicesSpec([None, None], dtypes.string,
dtypes.int32, dtypes.int64, [10])
self.assertEqual(spec2._shape.as_list(), [None, None])
self.assertEqual(spec2._values_dtype, dtypes.string)
self.assertEqual(spec2._indices_dtype, dtypes.int32)
self.assertEqual(spec2._dense_shape_dtype, dtypes.int64)
self.assertEqual(spec2._indices_shape.as_list(), [10])
def testValueType(self):
spec1 = indexed_slices.IndexedSlicesSpec()
self.assertEqual(spec1.value_type, ops.IndexedSlices)
@parameterized.parameters([
(indexed_slices.IndexedSlicesSpec(),
(tensor_shape.TensorShape(None), dtypes.float32, dtypes.int64, None,
tensor_shape.TensorShape([None]))),
(indexed_slices.IndexedSlicesSpec(shape=[5, None, None]),
(tensor_shape.TensorShape([5, None, None]), dtypes.float32,
dtypes.int64, None, tensor_shape.TensorShape([None]))),
(indexed_slices.IndexedSlicesSpec(
dtype=dtypes.int32, dense_shape_dtype=dtypes.int64),
(tensor_shape.TensorShape(None), dtypes.int32, dtypes.int64,
dtypes.int64, tensor_shape.TensorShape([None]))),
(indexed_slices.IndexedSlicesSpec(indices_shape=[100]),
(tensor_shape.TensorShape(None), dtypes.float32, dtypes.int64, None,
tensor_shape.TensorShape([100]))),
]) # pyformat: disable
def testSerialize(self, spec, expected):
serialization = spec._serialize()
# TensorShape has an unconventional definition of equality, so we can't use
# assertEqual directly here. But repr() is deterministic and lossless for
# the expected values, so we can use that instead.
self.assertEqual(repr(serialization), repr(expected))
@parameterized.parameters([
(indexed_slices.IndexedSlicesSpec(dtype=dtypes.string), (
tensor_spec.TensorSpec(None, dtypes.string),
tensor_spec.TensorSpec([None], dtypes.int64),
)),
(indexed_slices.IndexedSlicesSpec(
dtype=dtypes.string, dense_shape_dtype=dtypes.int32), (
tensor_spec.TensorSpec(None, dtypes.string),
tensor_spec.TensorSpec([None], dtypes.int64),
tensor_spec.TensorSpec([None], dtypes.int32),
)),
(indexed_slices.IndexedSlicesSpec(
shape=[5, 10, 15], dense_shape_dtype=dtypes.int32), (
tensor_spec.TensorSpec([None, 10, 15], dtypes.float32),
tensor_spec.TensorSpec([None], dtypes.int64),
tensor_spec.TensorSpec([3], dtypes.int32),
)),
(indexed_slices.IndexedSlicesSpec(
shape=[5, 10, 15], dense_shape_dtype=dtypes.int32,
indices_shape=[20]), (
tensor_spec.TensorSpec([20, 10, 15], dtypes.float32),
tensor_spec.TensorSpec([20], dtypes.int64),
tensor_spec.TensorSpec([3], dtypes.int32),
)),
])
def testComponentSpecs(self, spec, expected):
self.assertEqual(spec._component_specs, expected)
@parameterized.parameters([
{
"spec": indexed_slices.IndexedSlicesSpec(),
"values": [3.0, 5.0],
"indices": [5, 10]
},
{
"spec":
indexed_slices.IndexedSlicesSpec(dense_shape_dtype=dtypes.int32),
"values": [3.0, 5.0],
"indices": [5, 10],
"dense_shape": [100]
},
])
def testToFromComponents(self, spec, indices, values, dense_shape=None):
x = ops.IndexedSlices(indices, values, dense_shape)
actual_components = spec._to_components(x)
if dense_shape is None:
self.assertAllTensorsEqual(actual_components, [indices, values])
else:
self.assertAllTensorsEqual(actual_components,
[indices, values, dense_shape])
st_reconstructed = spec._from_components(actual_components)
self.assertAllEqual(x.indices, st_reconstructed.indices)
self.assertAllEqual(x.values, st_reconstructed.values)
if dense_shape is None:
self.assertIs(st_reconstructed.dense_shape, None)
else:
self.assertAllEqual(x.dense_shape, st_reconstructed.dense_shape)
@test_util.run_v1_only("IndexedSlicesValue is deprecated in v2")
def testFromNumpyComponents(self):
indices = np.array([3, 8])
values = np.array([1.0, 9.0])
dense_shape = np.array([100])
spec1 = indexed_slices.IndexedSlicesSpec(dense_shape_dtype=dtypes.int32)
st1 = spec1._from_components((values, indices, dense_shape))
self.assertIsInstance(st1, indexed_slices.IndexedSlicesValue)
self.assertAllEqual(st1.indices, indices)
self.assertAllEqual(st1.values, values)
self.assertAllEqual(st1.dense_shape, dense_shape)
spec2 = indexed_slices.IndexedSlicesSpec()
st2 = spec2._from_components((values, indices))
self.assertIsInstance(st2, indexed_slices.IndexedSlicesValue)
self.assertAllEqual(st2.indices, indices)
self.assertAllEqual(st2.values, values)
self.assertIs(st2.dense_shape, None)
class NodeDefConstructorTest(test_util.TensorFlowTestCase):
def testNoArgs(self):
nodedef = ops._NodeDef("None", "bar")
self.assertProtoEquals("op: 'None' name: 'bar'", nodedef)
def _apply_op(g, *args, **kwargs):
op = g.create_op(*args, **kwargs)
if len(op.outputs) == 1:
return op.outputs[0]
else:
return op.outputs
class OperationTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testNoInputs(self):
op = test_ops.float_output_string_output(name="myop").a.op
self.assertEqual(2, len(op.values()))
self.assertEqual(0, len(op.inputs))
self.assertEqual("myop", op.name)
float_t, label_str_t = op.values()
self.assertEqual(dtypes.float32, float_t.dtype)
self.assertEqual(op, float_t.op)
self.assertEqual(0, float_t._value_index)
self.assertEqual(0, len(float_t.consumers()))
self.assertEqual("myop", float_t._as_node_def_input())
self.assertEqual(dtypes.string, label_str_t.dtype)
self.assertEqual(op, label_str_t.op)
self.assertEqual(1, label_str_t._value_index)
self.assertEqual(0, len(label_str_t.consumers()))
self.assertEqual("myop:1", label_str_t._as_node_def_input())
self.assertProtoEquals("op:'FloatOutputStringOutput' name:'myop'",
op.node_def)
@test_util.run_deprecated_v1
def testNoOutputs(self):
op1 = test_ops.float_output(name="myop1").op
float_t, = op1.values()
op2 = test_ops.float_input(float_t, name="myop2")
self.assertEqual(0, len(op2.values()))
self.assertEqual(1, len(op2.inputs))
self.assertIs(float_t, op2.inputs[0])
self.assertEqual(1, len(float_t.consumers()))
self.assertEqual(op2, float_t.consumers()[0])
self.assertProtoEquals("op:'FloatOutput' name:'myop1'", op1.node_def)
self.assertProtoEquals("op:'FloatInput' name:'myop2' input:'myop1'",
op2.node_def)
@test_util.run_deprecated_v1
def testInputsAndOutputs(self):
op1 = test_ops.float_output(name="myop1").op
self.assertEqual(1, len(op1.values()))
float1_t, = op1.values()
op2 = test_ops.float_output_string_output(name="myop2").a.op
self.assertEqual(2, len(op2.values()))
float2_t, label2_str_t = op2.values()
# Note that we consume label2_str_t twice here.
op3 = test_ops.foo2(float1_t, label2_str_t, label2_str_t, name="myop3").d.op
self.assertEqual(2, len(op3.values()))
self.assertEqual(1, len(float1_t.consumers()))
self.assertEqual(op3, float1_t.consumers()[0])
self.assertEqual(0, len(float2_t.consumers()))
self.assertEqual(2, len(label2_str_t.consumers()))
self.assertEqual(op3, label2_str_t.consumers()[0])
self.assertEqual(op3, label2_str_t.consumers()[1])
self.assertProtoEquals("""
op:'Foo2' name:'myop3'
input:'myop1' input:'myop2:1' input:'myop2:1'
""", op3.node_def)
def testDeviceObject(self):
op = ops.Operation(ops._NodeDef("None", "myop"), ops.Graph(), [], [])
op._set_device("/job:goo/device:GPU:0")
self.assertProtoEquals(
"op:'None' name:'myop' device:'/job:goo/device:GPU:0' ", op.node_def)
op = ops.Operation(ops._NodeDef("None", "op2"), ops.Graph(), [], [])
op._set_device(
pydev.DeviceSpec(
job="muu", device_type="CPU", device_index=0))
self.assertProtoEquals(
"op:'None' name:'op2' device:'/job:muu/device:CPU:0'", op.node_def)
def testReferenceInput(self):
g = ops.Graph()
op1 = ops.Operation(
ops._NodeDef("RefOutputFloatOutput", "op1"), g, [],
[dtypes.float32_ref, dtypes.float32])
self.assertProtoEquals("op:'RefOutputFloatOutput' name:'op1'", op1.node_def)
self.assertEquals([], list(op1.inputs))
ref_t, nonref_t = op1.values()
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
op2 = ops.Operation(
ops._NodeDef("RefInputFloatInput", "op2"),
g, [ref_t, nonref_t], [],
input_types=[dtypes.float32_ref, dtypes.float32])
self.assertProtoEquals(
"op:'RefInputFloatInput' name:'op2' input:'op1' input:'op1:1'",
op2.node_def)
self.assertEquals([ref_t, nonref_t], list(op2.inputs))
op3 = ops.Operation(
ops._NodeDef("TwoFloatInputs", "op3"), g, [ref_t, nonref_t], [])
self.assertProtoEquals(
"op:'TwoFloatInputs' name:'op3' input:'op1' input:'op1:1'",
op3.node_def)
def testInvalidNames(self):
g = ops.Graph()
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", ""), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "_invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "-invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "/invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "invalid:0"), g)
@test_util.run_deprecated_v1
def testNoShapeFunction(self):
op = test_ops.a()
self.assertEqual(tensor_shape.unknown_shape(), op.get_shape())
@test_util.run_in_graph_and_eager_modes
def testConvertToTensorNestedArray(self):
values = [[2], [3], [5], [7]]
tensor = ops.convert_to_tensor(values)
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, self.evaluate(tensor))
def testShapeTuple(self):
with self.cached_session():
c = constant_op.constant(1)
self.assertEqual(c._shape_tuple(), ()) # pylint: disable=protected-access
def testConvertToTensorEager(self):
with context.eager_mode():
t = constant_op.constant(1)
self.assertTrue(isinstance(t, ops.EagerTensor))
converted = ops.convert_to_tensor(t)
self.assertTrue(isinstance(converted, ops.EagerTensor))
converted = ops.convert_to_tensor(1)
self.assertTrue(isinstance(converted, ops.EagerTensor))
@test_util.run_in_graph_and_eager_modes
def testConvertToTensorNestedTuple(self):
values = ((2,), (3,), (5,), (7,))
tensor = ops.convert_to_tensor(values)
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, self.evaluate(ops.convert_to_tensor(values)))
@test_util.run_in_graph_and_eager_modes
def testConvertToTensorNestedTensors(self):
values = ((2,), (3,), (5,), (7,))
tensor = ops.convert_to_tensor(
[constant_op.constant(row) for row in values])
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, self.evaluate(tensor))
tensor = ops.convert_to_tensor(
[[constant_op.constant(v) for v in row] for row in values])
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, self.evaluate(tensor))
@test_util.run_in_graph_and_eager_modes
def testConvertToTensorNestedMix(self):
values = ([2], (3,), [constant_op.constant(5)], constant_op.constant([7]))
tensor = ops.convert_to_tensor(values)
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(((2,), (3,), (5,), (7,)), self.evaluate(tensor))
@test_util.run_in_graph_and_eager_modes
def testConvertToTensorPreferred(self):
values = [2, 3, 5, 7]
tensor = ops.convert_to_tensor(values, preferred_dtype=dtypes.float32)
self.assertEqual(dtypes.float32, tensor.dtype)
# Convert empty tensor to anything.
values = []
tensor = ops.convert_to_tensor(values, preferred_dtype=dtypes.int64)
self.assertEqual(dtypes.int64, tensor.dtype)
# The preferred dtype is a type error and will convert to
# float32 instead.
values = [1.23]
tensor = ops.convert_to_tensor(values, preferred_dtype=dtypes.int64)
self.assertEqual(dtypes.float32, tensor.dtype)
@test_util.run_in_graph_and_eager_modes
def testConvertToInvalidTensorType(self):
with self.assertRaises(TypeError):
# Forcing an invalid dtype should fail with a type error.
values = [1.23]
ops.convert_to_tensor(values, dtype=dtypes.int64)
@test_util.run_in_graph_and_eager_modes
def testConvertToLongLongTensorType(self):
tensor = ops.convert_to_tensor(
# Get a numpy array of dtype NPY_LONGLONG
np.prod(constant_op.constant([1])._shape_tuple()))
self.assertEqual(dtypes.int64, tensor.dtype)
@test_util.run_in_graph_and_eager_modes
def testConvertToTensorFromInvalidTensor(self):
tensor = constant_op.constant(42.0, dtype=dtypes.float32)
with self.assertRaises(ValueError):
ops.convert_to_tensor(tensor, dtype=dtypes.int32)
@test_util.run_deprecated_v1
def testNoConvert(self):
# Operation cannot be converted to Tensor.
op = control_flow_ops.no_op()
with self.assertRaisesRegexp(TypeError,
r"Can't convert Operation '.*' to Tensor"):
ops.convert_to_tensor(op)
def testStr(self):
node_def = ops._NodeDef("None", "op1")
op = ops.Operation(node_def, ops.Graph(), [], [dtypes.float32])
self.assertEqual(str(node_def), str(op))
def testRepr(self):
op = ops.Operation(
ops._NodeDef("None", "op1"), ops.Graph(), [], [dtypes.float32])
self.assertEqual("<tf.Operation 'op1' type=None>", repr(op))
@test_util.run_deprecated_v1
def testGetAttr(self):
op = test_ops.default_attrs()
self.assertEqual(op.get_attr("string_val"), b"abc")
self.assertEqual(op.get_attr("string_list_val"), [b"abc", b""])
self.assertEqual(op.get_attr("int_val"), 123)
self.assertEqual(op.get_attr("int_list_val"), [1, 2, 3])
self.assertEqual(op.get_attr("float_val"), 10.0)
self.assertEqual(op.get_attr("float_list_val"), [10.0])
self.assertEqual(op.get_attr("bool_val"), True)
self.assertEqual(op.get_attr("bool_list_val"), [True, False])
self.assertEqual(op.get_attr("shape_val"),
tensor_shape.as_shape([2, 1]).as_proto())
self.assertEqual(op.get_attr("shape_list_val"),
[tensor_shape.as_shape([]).as_proto(),
tensor_shape.as_shape([1]).as_proto()])
self.assertEqual(op.get_attr("tensor_val"),
tensor_util.make_tensor_proto(1, dtypes.int32))
self.assertEqual(op.get_attr("tensor_list_val"),
[tensor_util.make_tensor_proto(1, dtypes.int32)])
type_val = op.get_attr("type_val")
# First check that type_val is a DType, because the assertEquals will work
# no matter what since DType overrides __eq__
self.assertIsInstance(type_val, dtypes.DType)
self.assertEqual(type_val, dtypes.int32)
type_list_val = op.get_attr("type_list_val")
self.assertTrue(all(isinstance(x, dtypes.DType) for x in type_list_val))
self.assertEqual(type_list_val, [dtypes.int32, dtypes.float32])
@function.Defun(dtypes.float32, func_name="MyFunc")
def func(x):
return x
op = test_ops.func_attr(func)
self.assertEqual(op.get_attr("f"),
attr_value_pb2.NameAttrList(name="MyFunc"))
# Try fetching missing attr
with self.assertRaisesRegexp(
ValueError, "Operation 'FuncAttr' has no attr named 'FakeAttr'."):
op.get_attr("FakeAttr")
# TODO(b/65162920): remove this test when users who are directly mutating the
# node_def have been updated to proper usage.
@test_util.run_deprecated_v1
def testSetAttr(self):
op = test_ops.int_attr().op
op._set_attr("foo", attr_value_pb2.AttrValue(i=2))
# TODO(skyewm): add node_def check
self.assertEqual(op.get_attr("foo"), 2)
# TODO(nolivia): test all error cases
def testAddControlInput(self):
with ops.Graph().as_default():
x = constant_op.constant(1).op
y = constant_op.constant(2).op
z = constant_op.constant(3).op
z._add_control_input(x) # pylint: disable=protected-access
self.assertEqual(z.control_inputs, [x])
z._add_control_input(x) # pylint: disable=protected-access
self.assertEqual(z.control_inputs, [x])
z._add_control_inputs([x, y, y]) # pylint: disable=protected-access
self.assertEqual(z.control_inputs, [x, y])
self.assertEqual(x._control_outputs, [z])
@test_util.run_deprecated_v1
def testRemoveAllControlInputs(self):
a = constant_op.constant(1)
with ops.control_dependencies([a]):
b = constant_op.constant(2)
c = constant_op.constant(3)
d = constant_op.constant(4)
e = constant_op.constant(5)
with ops.control_dependencies([a, c]):
f = d + e
self.assertEqual(a.op.control_inputs, [])
self.assertEqual(b.op.control_inputs, [a.op])
self.assertEqual(f.op.control_inputs, [a.op, c.op])
a.op._remove_all_control_inputs() # pylint: disable=protected-access
self.assertEqual(a.op.control_inputs, [])
b.op._remove_all_control_inputs() # pylint: disable=protected-access
self.assertEqual(b.op.control_inputs, [])
f.op._remove_all_control_inputs() # pylint: disable=protected-access
self.assertEqual(f.op.control_inputs, [])
self.assertEqual(list(f.op.inputs), [d, e])
@test_util.run_deprecated_v1
def testControlInputCycle(self):
graph = ops.Graph()
with graph.as_default():
z = constant_op.constant(0)
x = constant_op.constant(1)
y = constant_op.constant(2)
y.op._add_control_input(z.op) # pylint: disable=protected-access
y.op._add_control_input(x.op) # pylint: disable=protected-access
x.op._add_control_input(y.op) # pylint: disable=protected-access
with self.session(graph=graph) as sess:
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Graph is invalid, contains a cycle with 2 nodes"):
self.evaluate(x)
def testUpdateInput(self):
g = ops.Graph()
with g.as_default():
x = constant_op.constant(1)
y = constant_op.constant(2)
z = x + y
z.op._update_input(0, y) # pylint: disable=protected-access
self.assertEquals(list(z.op.inputs), [y, y])
self.assertEquals(x.consumers(), [])
self.assertEquals(y.consumers(), [z.op, z.op])
with session.Session(graph=g) as sess:
self.assertEquals(self.evaluate(z), 4)
z.op._update_input(0, x) # pylint: disable=protected-access
self.assertEquals(list(z.op.inputs), [x, y])
self.assertEquals(x.consumers(), [z.op])
self.assertEquals(y.consumers(), [z.op])
with session.Session(graph=g) as sess:
self.assertEquals(self.evaluate(z), 3)
z.op._update_input(1, y) # pylint: disable=protected-access
self.assertEquals(list(z.op.inputs), [x, y])
self.assertEquals(x.consumers(), [z.op])
self.assertEquals(y.consumers(), [z.op])
with session.Session(graph=g) as sess:
self.assertEquals(self.evaluate(z), 3)
def testUpdateInputGraphError(self):
g_0 = ops.Graph()
g_1 = ops.Graph()
with g_0.as_default():
x = constant_op.constant(1)
with g_1.as_default():
y = constant_op.constant(2)
z = y * 2
with self.assertRaisesRegexp(ValueError, "must be from the same graph"):
z.op._update_input(0, x) # pylint: disable=protected-access
def testUpdateInputTypeError(self):
g = ops.Graph()
with g.as_default():
w = constant_op.constant(0)
x = constant_op.constant("")
y = constant_op.constant(1)
z = y + w
z.op._update_input(0, x) # pylint: disable=protected-access
with session.Session(graph=g) as sess:
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Input 0 of node add was passed string from Const_1:0 incompatible "
"with expected int32"):
self.evaluate(z)
def testUpdateInputShapeError(self):
g = ops.Graph()
with g.as_default():
w = constant_op.constant(2, shape=[3, 1])
x = constant_op.constant(0, shape=[3, 1])
y = constant_op.constant(1, shape=[2, 2])
z = w + x
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Cannot update edge, incompatible shapes: \[2,2\] and \[3,1\]"):
z.op._update_input(0, y) # pylint: disable=protected-access
def testUpdateInputOutOfRange(self):
g = ops.Graph()
with g.as_default():
x = constant_op.constant(1)
with self.assertRaisesRegexp(
errors.OutOfRangeError,
r"Cannot update edge. Input index \[1\] is greater than the number of "
r"total inputs \[0\]."
):
x.op._update_input(1, x) # pylint: disable=protected-access
@test_util.enable_control_flow_v2
@test_util.run_v1_only("b/120545219")
def testAddWhileInput(self):
if forward_compat.forward_compatible(2019, 8, 23):
@eager_function.defun
def test():
output = control_flow_ops.while_loop(lambda x: x < 3, lambda x: x + 1,
[1])
while_op = output.op
self.assertEqual(while_op.type, "StatelessWhile")
orig_num_inputs = len(while_op.inputs)
# Make sure we can handle the while op having a control input.
while_op._add_control_input(constant_op.constant(0).op)
new_input1 = constant_op.constant(1.0)
new_input2 = constant_op.constant(True)
# Clear output shapes to bypass shape checking.
while_op._set_shape_list_attr("output_shapes", [])
while_op._set_type_list_attr("T",
[t.dtype for t in while_op.inputs] +
[new_input1.dtype, new_input2.dtype])
while_op._add_while_inputs([new_input1, new_input2])
# Can't add an edge beyond what's specified by "T"
with self.assertRaises(errors.OutOfRangeError):
while_op._add_while_inputs([new_input2])
self.assertEqual(len(while_op.inputs), orig_num_inputs + 2) # pylint: disable=g-deprecated-assert
test()
@test_util.run_deprecated_v1
def testOpDef(self):
x = constant_op.constant(0)
y = constant_op.constant(1)
z = x + y
self.assertEqual(x.op.op_def.name, "Const")
self.assertEqual(len(x.op.op_def.input_arg), 0)
self.assertEqual(len(x.op.op_def.output_arg), 1)
self.assertRegexpMatches(z.op.op_def.name, "Add(V2)?")
self.assertEqual(len(z.op.op_def.input_arg), 2)
self.assertEqual(len(z.op.op_def.output_arg), 1)
def testInputFromDifferentGraphError(self):
g_0 = ops.Graph()
g_1 = ops.Graph()
with g_0.as_default():
x = constant_op.constant(1)
with g_1.as_default():
y = constant_op.constant(2)
with self.assertRaisesRegexp(ValueError, "must be from the same graph"):
y * x # pylint: disable=pointless-statement
def testInputsAreImmutable(self):
g = ops.Graph()
with g.as_default():
x = test_ops.int_output()
op = test_ops.int_input_int_output(x, name="myop").op
with self.assertRaisesRegexp(
AttributeError, "'tuple' object has no attribute 'append'"):
op.inputs.append(None)
class CreateOpTest(test_util.TensorFlowTestCase):
def testNodeDefArgs(self):
g = ops.Graph()
op1 = g.create_op("FloatOutput", [], [dtypes.float32], None, name="myop1")
with g.device("/device:GPU:0"):
op2 = g.create_op(
"FloatOutputStringOutput", [], [dtypes.float32, dtypes.string], None,
name="myop2")
op3 = g.create_op(
"Foo3",
[list(op1.values())[0], list(op2.values())[1], list(op2.values())[0]],
[dtypes.float32, dtypes.int32],
None,
name="myop3")
self.assertDeviceEqual(None, op1.device)
self.assertDeviceEqual("/device:GPU:0", op2.device)
self.assertDeviceEqual(None, op3.device)
self.assertProtoEquals("name:'myop1' op:'FloatOutput'", op1.node_def)
self.assertProtoEquals(
"name:'myop2' op:'FloatOutputStringOutput' device:'/device:GPU:0'",
op2.node_def)
self.assertProtoEquals(
"name:'myop3' input:'myop1' input:'myop2:1' input:'myop2' op:'Foo3'",
op3.node_def)
def testReferenceInput(self):
g = ops.Graph()
op1 = g.create_op(
"RefOutputFloatOutput", [], [dtypes.float32_ref, dtypes.float32],
name="op1")
self.assertProtoEquals("op:'RefOutputFloatOutput' name:'op1'", op1.node_def)
ref_t, nonref_t = op1.values()
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
op2 = g.create_op(
"RefInputFloatInput", [ref_t, nonref_t], [],
input_types=[dtypes.float32_ref, dtypes.float32],
name="op2")
self.assertProtoEquals(
"op:'RefInputFloatInput' name:'op2' input:'op1' input:'op1:1'",
op2.node_def)
op3 = g.create_op("TwoFloatInputs", [ref_t, nonref_t], [], name="op3")
self.assertProtoEquals(
"op:'TwoFloatInputs' name:'op3' input:'op1' input:'op1:1'",
op3.node_def)
def testFinalized(self):
g = ops.Graph()
g.finalize()
with self.assertRaises(RuntimeError):
g.create_op("FloatOutput", [], [dtypes.float32], None, name="myop1")
# Test unfinalize.
g._unsafe_unfinalize()
g.create_op("FloatOutput", [], [dtypes.float32], None, name="myop1")
# NOTE(skyewm): these cases test the private Graph._create_op_from_tf_operation
# method. Arguably we should only test the public APIs that depend on this
# method. However, this logic is complex and tricky, and it can be difficult to
# ascertain if we have adequate coverage (e.g. a graph may run successfully if
# the control flow context isn't set properly, but a more complicated use case
# that might not be obvious to test will fail). Thus we instead explicitly test
# the low-level behavior.
class CreateOpFromTFOperationTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testBasic(self):
g = ops.Graph()
with g.as_default():
x = test_ops.int_output()
c_op = ops._create_c_op(
g, ops._NodeDef("IntInputIntOutput", "myop"), [x], [])
op = g._create_op_from_tf_operation(c_op)
self.assertEqual(op.name, "myop")
self.assertEqual(op.type, "IntInputIntOutput")
self.assertEqual(len(op.outputs), 1)
self.assertEqual(op.outputs[0].shape, tensor_shape.unknown_shape())
self.assertEqual(list(op.inputs), [x])
self.assertEqual(op.control_inputs, [])
self.assertEqual(op.graph, g)
self.assertEqual(x.consumers(), [op])
self.assertIsNotNone(op.traceback)
self.assertEqual(g.get_operation_by_name("myop"), op)
self.assertEqual(g.get_tensor_by_name("myop:0"), op.outputs[0])
def testShape(self):
g = ops.Graph()
with g.as_default():
x = constant_op.constant([[1, 2, 3], [4, 5, 6]])
c_op = ops._create_c_op(g, ops._NodeDef("Identity", "myop"), [x], [])
op = g._create_op_from_tf_operation(c_op)
self.assertEqual(op.name, "myop")
self.assertEqual(op.type, "Identity")
self.assertEqual(len(op.outputs), 1)
self.assertEqual(op.outputs[0].shape, tensor_shape.TensorShape([2, 3]))
def testUniqueName(self):
g = ops.Graph()
with g.as_default():
c_op = ops._create_c_op(g, ops._NodeDef("IntOutput", "myop"), [], [])
c_op2 = ops._create_c_op(g, ops._NodeDef("IntOutput", "myop_1"), [], [])
op = g._create_op_from_tf_operation(c_op)
op2 = g._create_op_from_tf_operation(c_op2)
# Create ops with same names as op1 and op2. We expect the new names to be
# uniquified.
op3 = test_ops.int_output(name="myop").op
op4 = test_ops.int_output(name="myop_1").op
self.assertEqual(op.name, "myop")
self.assertEqual(op2.name, "myop_1")
self.assertEqual(op3.name, "myop_2")
self.assertEqual(op4.name, "myop_1_1")
@test_util.run_v1_only("b/120545219")
def testCond(self):
g = ops.Graph()
with g.as_default():
x = test_ops.int_output()
def true_fn():
ops._create_c_op(ops.get_default_graph(),
ops._NodeDef("IntInput", "cond/myop"), [x], [])
new_ops = g._add_new_tf_operations()
self.assertEqual(len(new_ops), 1)
return x
control_flow_ops.cond(x < 10, true_fn, lambda: x)
op = g.get_operation_by_name("cond/myop")
self.assertIsNotNone(op)
self.assertEqual(op.name, "cond/myop")
self.assertEqual(op.type, "IntInput")
self.assertEqual(op.outputs, [])
op_input = op.inputs[0].op
self.assertEqual(op_input.type, "Switch")
self.assertEqual(op_input.inputs[0], x)
self.assertEqual(op.graph, g)
# pylint: disable=protected-access
self.assertIsNotNone(op._get_control_flow_context())
self.assertEqual(op._get_control_flow_context().name,
"cond/cond_text")
# pylint: enable=protected-access
@test_util.run_v1_only("b/120545219")
def testWhileLoop(self):
g = ops.Graph()
with g.as_default():
x = test_ops.int_output()
def body(i):
ops._create_c_op(ops.get_default_graph(),
ops._NodeDef("IntInput", "myloop/myop"), [x], [])
new_ops = g._add_new_tf_operations()
self.assertEqual(len(new_ops), 1)
return i
control_flow_ops.while_loop(lambda i: i < 10, body, [0], name="myloop")
op = g.get_operation_by_name("myloop/myop")
self.assertIsNotNone(op)
self.assertEqual(op.name, "myloop/myop")
self.assertEqual(op.type, "IntInput")
self.assertEqual(op.outputs, [])
op_input = op.inputs[0].op
self.assertEqual(op_input.type, "Enter")
self.assertEqual(list(op_input.inputs), [x])
self.assertEqual(op.graph, g)
# pylint: disable=protected-access
self.assertIsNotNone(op._get_control_flow_context())
self.assertEqual(op._get_control_flow_context().name,
"myloop/while_context")
# pylint: enable=protected-access
@test_util.run_v1_only("b/120545219")
def testWhileLoopWithInternalControlDep(self):
g = ops.Graph()
with g.as_default():
x = test_ops.int_output()
def body(i):
c = constant_op.constant(1.0, name="c")
ops._create_c_op(ops.get_default_graph(),
ops._NodeDef("IntInput", "myloop/myop"), [x], [])
with ops.control_dependencies([c]):
new_ops = g._add_new_tf_operations()
self.assertEqual(len(new_ops), 1)
return i
control_flow_ops.while_loop(lambda i: i < 10, body, [0], name="myloop")
op = g.get_operation_by_name("myloop/myop")
self.assertIsNotNone(op)
c = g.get_operation_by_name("myloop/c")
self.assertIsNotNone(c)
# Internal control dep is preserved
self.assertEqual(op.control_inputs, [c])
@test_util.run_v1_only("b/120545219")
def testWhileLoopWithExternalControlDep(self):
g = ops.Graph()
with g.as_default():
x = test_ops.int_output()
c = constant_op.constant(1.0)
def body(i):
ops._create_c_op(ops.get_default_graph(),
ops._NodeDef("IntInput", "myloop/myop"), [x], [])
with ops.control_dependencies([c]):
new_ops = g._add_new_tf_operations()
self.assertEqual(len(new_ops), 1)
return i
control_flow_ops.while_loop(lambda i: i < 10, body, [0], name="myloop")
op = g.get_operation_by_name("myloop/myop")
self.assertIsNotNone(op)
# External control dep is removed and replaced with internal control dep
self.assertNotEqual(op.control_inputs[0], c.op)
self.assertIsNotNone(op.control_inputs[0]._get_control_flow_context())
class ApplyOpTest(test_util.TensorFlowTestCase):
def testNodeDefArgs(self):
g = ops.Graph()
t1 = _apply_op(g, "FloatOutput", [], [dtypes.float32], name="myop1")
with g.device("/device:GPU:0"):
t2 = _apply_op(
g, "TwoIntOutputs", [], [dtypes.int32, dtypes.int32], name="myop2")
t3 = _apply_op(
g,
"Foo1", [t1, t2[1], t2[0]], [dtypes.float32, dtypes.int32],
name="myop3")
self.assertTrue(isinstance(t1, ops.Tensor))
self.assertTrue(isinstance(t2, list))
self.assertTrue(isinstance(t3, list))
self.assertTrue(isinstance(t3[0], ops.Tensor))
self.assertEqual("myop1", t1._as_node_def_input())
self.assertEqual("myop2", t2[0]._as_node_def_input())
self.assertEqual("myop2:1", t2[1]._as_node_def_input())
self.assertEqual("myop3", t3[0]._as_node_def_input())
# Validate that we got the right ops as well
self.assertProtoEquals("name:'myop1' op:'FloatOutput'", t1.op.node_def)
self.assertProtoEquals(
"name:'myop2' op:'TwoIntOutputs' device:'/device:GPU:0'",
t2[0].op.node_def)
self.assertProtoEquals(
"name:'myop3' input:'myop1' input:'myop2:1' input:'myop2' op:'Foo1'",
t3[0].op.node_def)
def testReferenceInput(self):
g = ops.Graph()
ref_t, nonref_t = _apply_op(
g, "RefOutputFloatOutput", [], [dtypes.float32_ref, dtypes.float32],
name="op1")
self.assertProtoEquals("op:'RefOutputFloatOutput' name:'op1'",
ref_t.op.node_def)
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
out_2 = _apply_op(
g,
"RefInputFloatInputIntOutput", [ref_t, nonref_t], [dtypes.int32],
input_types=[dtypes.float32_ref, dtypes.float32],
name="op2")
self.assertProtoEquals(
"op:'RefInputFloatInputIntOutput' name:'op2' input:'op1' input:'op1:1'",
out_2.op.node_def)
out_3 = _apply_op(
g, "TwoFloatInputsIntOutput", [ref_t, nonref_t], [dtypes.int32],
name="op3")
self.assertProtoEquals(
"op:'TwoFloatInputsIntOutput' name:'op3' input:'op1' input:'op1:1'",
out_3.op.node_def)
class NameStackTest(test_util.TensorFlowTestCase):
def testBasics(self):
g = ops.Graph()
self.assertEqual("foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("foo_1", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_1", g.unique_name("foo"))
self.assertEqual("foo_2", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_2", g.unique_name("foo"))
self.assertEqual("foo_1_1", g.unique_name("foo_1", mark_as_used=False))
self.assertEqual("foo_1_1", g.unique_name("foo_1"))
self.assertEqual("foo_1_2", g.unique_name("foo_1", mark_as_used=False))
self.assertEqual("foo_1_2", g.unique_name("foo_1"))
self.assertEqual("foo_1_2_1", g.unique_name("foo_1_2", mark_as_used=False))
self.assertEqual("foo_1_2_1", g.unique_name("foo_1_2"))
with g.name_scope("bar"):
self.assertEqual("bar/foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("bar/foo", g.unique_name("foo"))
self.assertEqual("bar/foo_1", g.unique_name("foo", mark_as_used=False))
self.assertEqual("bar/foo_1", g.unique_name("foo"))
with g.name_scope(None):
self.assertEqual("foo_3", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_3", g.unique_name("foo"))
with g.name_scope("baz"):
self.assertEqual(
"bar/baz/foo", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz/foo", g.unique_name("foo"))
self.assertEqual(
"bar/baz/foo_1", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz/foo_1", g.unique_name("foo"))
with g.name_scope("baz"):
self.assertEqual(
"bar/baz_1/foo", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz_1/foo", g.unique_name("foo"))
self.assertEqual(
"bar/baz_1/foo_1", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz_1/foo_1", g.unique_name("foo"))
with g.name_scope("quux"):
self.assertEqual("quux/foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("quux/foo", g.unique_name("foo"))
with g.name_scope("bar"):
with g.name_scope("baz"):
self.assertEqual(
"bar_1/baz/foo", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar_1/baz/foo", g.unique_name("foo"))
self.assertEqual("foo_4", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_4", g.unique_name("foo"))
self.assertEqual("bar_2", g.unique_name("bar", mark_as_used=False))
self.assertEqual("bar_2", g.unique_name("bar"))
@test_util.run_deprecated_v1
def testNameAndVariableScope(self):
with self.cached_session() as sess:
with sess.graph.name_scope("l0"):
with variable_scope.variable_scope("l1"):
with sess.graph.name_scope("l1") as scope:
self.assertEqual("l0/l1/l1/", scope)
self.assertEqual(
"l0/l1/l1/foo",
sess.graph.unique_name(
"foo", mark_as_used=False))
self.assertEqual("l0/l1/l1/foo", sess.graph.unique_name("foo"))
with sess.graph.name_scope("l2") as scope:
self.assertEqual("l0/l1/l2/", scope)
self.assertEqual(
"l0/l1/l2/foo",
sess.graph.unique_name(
"foo", mark_as_used=False))
self.assertEqual("l0/l1/l2/foo", sess.graph.unique_name("foo"))
def testOutOfOrderUniqueName(self):
g = ops.Graph()
self.assertEqual("foo_2", g.unique_name("foo_2"))
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("foo_1", g.unique_name("foo"))
self.assertEqual("foo_3", g.unique_name("foo"))
def testUniqueNameCaseInsensitivity(self):
g = ops.Graph()
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("Foo_1", g.unique_name("Foo"))
with g.name_scope("bar"):
self.assertEqual("bar/foo", g.unique_name("foo"))
with g.name_scope("Bar"):
self.assertEqual("Bar_1/foo", g.unique_name("foo"))
def testInvalidNameRaisesError(self):
g = ops.Graph()
with g.name_scope(""): # Should not raise
pass
with g.name_scope("foo/"): # Should not raise
with g.name_scope("_bar"): # Should not raise
pass
with self.assertRaises(ValueError):
with g.name_scope("foo:0"):
pass
with self.assertRaises(ValueError):
with g.name_scope("_bar"):
pass
class NameTest(test_util.TensorFlowTestCase):
def testGenerateName(self):
g = ops.Graph()
op0 = g.create_op("TwoFloatOutputs", [], [dtypes.float32, dtypes.float32])
self.assertEqual("TwoFloatOutputs", op0.name)
self.assertEqual("TwoFloatOutputs:0", op0.outputs[0].name)
self.assertEqual("TwoFloatOutputs:1", op0.outputs[1].name)
op1 = g.create_op("FloatOutput", [], [dtypes.float32])
self.assertEqual("FloatOutput", op1.name)
self.assertEqual("FloatOutput:0", op1.outputs[0].name)
op2 = g.create_op("FloatOutput", [], [dtypes.float32])
self.assertEqual("FloatOutput_1", op2.name)
self.assertEqual("FloatOutput_1:0", op2.outputs[0].name)
op3 = g.create_op("FloatOutput", [], [dtypes.float32], name="my_op")
self.assertEqual("my_op", op3.name)
self.assertEqual("my_op:0", op3.outputs[0].name)
def testNameScope(self):
g = ops.Graph()
with g.name_scope("foo") as foo:
self.assertEqual("foo/", foo)
with g.name_scope("foo2") as foo2:
self.assertEqual("foo/foo2/", foo2)
with g.name_scope(None) as empty1:
self.assertEqual("", empty1)
with g.name_scope("foo3") as foo3:
self.assertEqual("foo3/", foo3)
with g.name_scope("") as empty2:
self.assertEqual("", empty2)
self.assertEqual("FloatOutput",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
with g.name_scope("bar") as scope:
self.assertEqual("bar/FloatOutput",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
self.assertEqual("bar/FloatOutput_1",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
# If you use the value from "with .. as", that values is used as-is.
self.assertEqual(
"bar", g.create_op(
"FloatOutput", [], [dtypes.float32], name=scope).name)
with g.name_scope("baz") as scope:
with g.name_scope("quux"):
self.assertEqual("baz/quux/FloatOutput",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
# If you use the value from the enclosing "with .. as", nothing is pushed.
with g.name_scope(scope):
self.assertEqual("baz/FloatOutput",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
self.assertEqual(
"baz", g.create_op(
"FloatOutput", [], [dtypes.float32], name=scope).name)
self.assertEqual(
"trailing",
g.create_op(
"FloatOutput", [], [dtypes.float32], name="trailing/").name)
with g.name_scope("bar"):
self.assertEqual("bar_1/FloatOutput",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
with g.name_scope("bar/"):
self.assertEqual("bar/FloatOutput_2",
g.create_op("FloatOutput", [], [dtypes.float32]).name)
class DeviceTest(test_util.TensorFlowTestCase):
def testNoDevice(self):
g = ops.Graph()
op = g.create_op("FloatOutput", [], [dtypes.float32])
self.assertDeviceEqual(None, op.device)
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput" }
""", gd)
def testEagerBackingDevice(self):
with context.eager_mode():
with ops.device("/device:CPU:0"):
t = constant_op.constant(1.0)
self.assertRegexpMatches(t.device, "/device:CPU:0")
self.assertRegexpMatches(t.backing_device, "/device:CPU:0")
def testDevicePartialString(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2" }
""", gd)
def testDeviceFull(self):
g = ops.Graph()
with g.device(
pydev.DeviceSpec(
job="worker", replica=2, task=0, device_type="CPU",
device_index=3)):
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2/task:0/device:CPU:3" }
""", gd)
def testNesting(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/job:worker/replica:3/task:0"):
g.create_op("FloatOutput", [], [dtypes.float32])
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/replica:3/task:0" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2" }
""", gd)
def testNestingString(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/job:worker/replica:3/task:0"):
g.create_op("FloatOutput", [], [dtypes.float32])
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/replica:3/task:0" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2" }
""", gd)
def testNestingOverrideGpuCpu(self):
g = ops.Graph()
with g.device("/job:worker/replica:2/device:CPU:1"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/job:worker/replica:2/device:GPU:2"):
g.create_op("FloatOutput", [], [dtypes.float32])
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/replica:2/device:GPU:2" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def testNestingWithMergeDeviceFunction(self):
g = ops.Graph()
with g.device(pydev.merge_device("/device:GPU:0")):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:worker")):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(pydev.merge_device("/device:CPU:0")):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:ps")):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(pydev.merge_device(None)):
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/device:GPU:0" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/device:GPU:0" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/device:CPU:0" }
node { name: "FloatOutput_3" op: "FloatOutput"
device: "/job:ps/device:CPU:0" }
node { name: "FloatOutput_4" op: "FloatOutput"
device: "/job:ps/device:CPU:0" }
""", gd)
def testNestingWithDeviceStrings(self):
g = ops.Graph()
with g.device("/device:GPU:0"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/job:worker"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/device:CPU:0"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/job:ps"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(""):
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/device:GPU:0" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/device:GPU:0" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/device:CPU:0" }
node { name: "FloatOutput_3" op: "FloatOutput"
device: "/job:ps/device:CPU:0" }
node { name: "FloatOutput_4" op: "FloatOutput"
device: "/job:ps/device:CPU:0" }
""", gd)
def testNestingWithDeviceStringWildcard(self):
g = ops.Graph()
with g.device("/device:GPU:7"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/device:GPU:*"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/device:CPU:*"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/device:CPU:5"):
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/device:GPU:7" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/device:GPU:7" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/device:CPU:*" }
node { name: "FloatOutput_3" op: "FloatOutput"
device: "/device:CPU:5" }
""", gd)
def testNestingErrorGraph(self):
g = ops.Graph()
scope = g.device("/device:GPU:8")
scope.__enter__()
with g.device("/device:GPU:9"):
with self.assertRaises(RuntimeError):
scope.__exit__(None, None, None)
def testNestingErrorEager(self):
with context.eager_mode():
scope = ops.device("/device:CPU:0")
scope.__enter__()
with ops.device(None):
with self.assertRaises(RuntimeError):
scope.__exit__(None, None, None)
def testNoneClearsDefault(self):
g = ops.Graph()
with g.device("/job:worker/replica:2/device:CPU:1"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(None):
g.create_op("FloatOutput", [], [dtypes.float32])
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "FloatOutput_1" op: "FloatOutput" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def testNoneIgnoresOuterDeviceFunction(self):
g = ops.Graph()
with g.device(lambda op: "/job:worker/replica:2/device:CPU:1"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(None):
g.create_op("FloatOutput", [], [dtypes.float32])
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "FloatOutput_1" op: "FloatOutput" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def _overwritingDeviceFunction(self, unused_op):
# This device function unconditionally overwrites the device of ops.
#
# NOTE(mrry): Writing device functions like this is not
# recommended. Instead, in most cases you should use
# `pydev.merge_device("/job:ps")` or simply `"/job:ps"` as the
# argument to `tf.device()` and the device component will be merged in.
return "/job:overwrite"
def testOverwritingBehavior(self):
g = ops.Graph()
with g.device(self._overwritingDeviceFunction):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device("/job:ps"): # Will be overwritten.
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:ps")): # Will be overwritten.
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(None): # Disables overwriting device function
with g.device("/job:ps"):
g.create_op("FloatOutput", [], [dtypes.float32])
with g.device(None): # Disables overwriting device function
with g.device(pydev.merge_device("/job:ps")):
g.create_op("FloatOutput", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput" op: "FloatOutput"
device: "/job:overwrite" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:overwrite" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:overwrite" }
node { name: "FloatOutput_3" op: "FloatOutput"
device: "/job:ps" }
node { name: "FloatOutput_4" op: "FloatOutput"
device: "/job:ps" }
""", gd)
class MultithreadedGraphStateTest(test_util.TensorFlowTestCase):
class TestThread(threading.Thread):
def __init__(self, graph, replica_id):
super(MultithreadedGraphStateTest.TestThread, self).__init__()
self._graph = graph
self._replica_id = replica_id
# This thread sets this event when it mutated the graph. The caller can
# wait for that.
self.has_mutated_graph = threading.Event()
# This thread waits for when it should continue. The caller can set this
# event.
self.should_continue = threading.Event()
def run(self):
# Mutate a graph's stack, then set `has_mutated_graph`, then wait for
# `should_continue`, then add an op to the graph affected by the graph's
# stack.
raise NotImplementedError("must be implemented in descendants")
def testDeviceFunctionStack(self):
class DeviceSettingThread(self.TestThread):
def run(self):
with g.device("/job:worker/replica:{}".format(self._replica_id)):
self.has_mutated_graph.set()
self.should_continue.wait()
self.should_continue.clear()
g.create_op(
"FloatOutput", [], [dtypes.float32],
name="FloatOutput_{}".format(self._replica_id))
g = ops.Graph()
# If `switch_to_thread` isn't called, then device placement of the ops
# below is not deterministic.
g.switch_to_thread_local()
threads = [DeviceSettingThread(g, i) for i in range(3)]
for t in threads:
t.start()
t.has_mutated_graph.wait()
t.has_mutated_graph.clear()
for t in threads:
t.should_continue.set()
t.join()
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "FloatOutput_0" op: "FloatOutput"
device: "/job:worker/replica:0" }
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/replica:1" }
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2" }
""", gd)
def testColocateWith(self):
class ColocatingThread(self.TestThread):
def __init__(self, graph, replica_id, op_to_colocate_with):
super(ColocatingThread, self).__init__(graph, replica_id)
self._op_to_colocate_with = op_to_colocate_with
def run(self):
with g.colocate_with(self._op_to_colocate_with):
self.has_mutated_graph.set()
self.should_continue.wait()
self.should_continue.clear()
g.create_op(
"FloatOutput", [], [dtypes.float32],
name="FloatOutput_{}".format(self._replica_id))
g = ops.Graph()
ops_to_colocate_with = []
for i in range(3):
with g.device("/job:worker/replica:{}".format(i)):
ops_to_colocate_with.append(
g.create_op(
"FloatOutput", [], [dtypes.float32],
name="ColocateWithMe_{}".format(i)))
# If `switch_to_thread` isn't called, then `device` and `attr` values for
# the ops below are not deterministic.
g.switch_to_thread_local()
threads = [
ColocatingThread(g, i, ops_to_colocate_with[i]) for i in range(3)
]
for t in threads:
t.start()
t.has_mutated_graph.wait()
t.has_mutated_graph.clear()
for t in threads:
t.should_continue.set()
t.join()
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "ColocateWithMe_0" op: "FloatOutput"
device: "/job:worker/replica:0" }
node { name: "ColocateWithMe_1" op: "FloatOutput"
device: "/job:worker/replica:1" }
node { name: "ColocateWithMe_2" op: "FloatOutput"
device: "/job:worker/replica:2" }
node { name: "FloatOutput_0" op: "FloatOutput"
device: "/job:worker/replica:0"
attr { key: "_class"
value { list {
s: "loc:@ColocateWithMe_0"}}}}
node { name: "FloatOutput_1" op: "FloatOutput"
device: "/job:worker/replica:1"
attr { key: "_class"
value { list {
s: "loc:@ColocateWithMe_1"}}}}
node { name: "FloatOutput_2" op: "FloatOutput"
device: "/job:worker/replica:2"
attr { key: "_class"
value { list {
s: "loc:@ColocateWithMe_2"}}}}
""", gd)
def testControlDependencies(self):
class DependingThread(self.TestThread):
def __init__(self, graph, replica_id, dependency_op):
super(DependingThread, self).__init__(graph, replica_id)
self._dependency_op = dependency_op
def run(self):
with g.control_dependencies([self._dependency_op]):
self.has_mutated_graph.set()
self.should_continue.wait()
self.should_continue.clear()
g.create_op(
"FloatOutput", [], [dtypes.float32],
name="FloatOutput_{}".format(self._replica_id))
g = ops.Graph()
dependency_ops = []
for i in range(3):
dependency_ops.append(
g.create_op(
"FloatOutput", [], [dtypes.float32],
name="ColocateWithMe_{}".format(i)))
# If `switch_to_thread` isn't called, then `input` values for the ops below
# are not deterministic.
g.switch_to_thread_local()
threads = [DependingThread(g, i, dependency_ops[i]) for i in range(3)]
for t in threads:
t.start()
t.has_mutated_graph.wait()
t.has_mutated_graph.clear()
for t in threads:
t.should_continue.set()
t.join()
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "ColocateWithMe_0" op: "FloatOutput" }
node { name: "ColocateWithMe_1" op: "FloatOutput" }
node { name: "ColocateWithMe_2" op: "FloatOutput" }
node { name: "FloatOutput_0" op: "FloatOutput"
input: "^ColocateWithMe_0" }
node { name: "FloatOutput_1" op: "FloatOutput"
input: "^ColocateWithMe_1" }
node { name: "FloatOutput_2" op: "FloatOutput"
input: "^ColocateWithMe_2" }
""", gd)
def testNameStack(self):
class NameSettingThread(self.TestThread):
def run(self):
with g.name_scope("foo"):
op1 = g.create_op("FloatOutput", [], [dtypes.float32])
self.has_mutated_graph.set()
self.should_continue.wait()
self.should_continue.clear()
op2 = g.create_op("FloatOutput", [], [dtypes.float32])
self.result = (op1, op2)
g = ops.Graph()
threads = [NameSettingThread(g, i) for i in range(3)]
for t in threads:
t.start()
t.has_mutated_graph.wait()
t.has_mutated_graph.clear()
for t in threads:
t.should_continue.set()
t.join()
suffixes = ["", "_1", "_2"]
for t, s in zip(threads, suffixes):
self.assertEquals("foo" + s + "/FloatOutput", t.result[0].name)
self.assertEquals("foo" + s + "/FloatOutput_1", t.result[1].name)
class ObjectWithName(object):
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
class CollectionTest(test_util.TensorFlowTestCase):
def test_get_collections(self):
g = ops.Graph()
self.assertSequenceEqual(g.collections, [])
g.add_to_collection("key", 12)
g.add_to_collection("key", 15)
self.assertSequenceEqual(g.collections, ["key"])
g.add_to_collection("other", "foo")
self.assertSequenceEqual(sorted(g.collections), ["key", "other"])
self.assertSequenceEqual(
sorted(g.get_all_collection_keys()), ["key", "other"])
def test_add_to_collection(self):
g = ops.Graph()
g.add_to_collection("key", 12)
g.add_to_collection("other", "foo")
g.add_to_collection("key", 34)
# Note that only blank1 is returned.
g.add_to_collection("blah", 27)
blank1 = ObjectWithName("prefix/foo")
g.add_to_collection("blah", blank1)
blank2 = ObjectWithName("junk/foo")
g.add_to_collection("blah", blank2)
self.assertEqual([12, 34], g.get_collection("key"))
self.assertEqual([], g.get_collection("nothing"))
self.assertEqual([27, blank1, blank2], g.get_collection("blah"))
self.assertEqual([blank1], g.get_collection("blah", "prefix"))
self.assertEqual([blank1], g.get_collection("blah", ".*x"))
# Make sure that get_collection() returns a first-level
# copy of the collection, while get_collection_ref() returns
# the original list.
other_collection_snapshot = g.get_collection("other")
other_collection_ref = g.get_collection_ref("other")
self.assertEqual(["foo"], other_collection_snapshot)
self.assertEqual(["foo"], other_collection_ref)
g.add_to_collection("other", "bar")
self.assertEqual(["foo"], other_collection_snapshot)
self.assertEqual(["foo", "bar"], other_collection_ref)
self.assertEqual(["foo", "bar"], g.get_collection("other"))
self.assertTrue(other_collection_ref is g.get_collection_ref("other"))
# Verify that getting an empty collection ref returns a modifiable list.
empty_coll_ref = g.get_collection_ref("empty")
self.assertEqual([], empty_coll_ref)
empty_coll = g.get_collection("empty")
self.assertEqual([], empty_coll)
self.assertFalse(empty_coll is empty_coll_ref)
empty_coll_ref2 = g.get_collection_ref("empty")
self.assertTrue(empty_coll_ref2 is empty_coll_ref)
# Add to the collection.
empty_coll_ref.append("something")
self.assertEqual(["something"], empty_coll_ref)
self.assertEqual(["something"], empty_coll_ref2)
self.assertEqual([], empty_coll)
self.assertEqual(["something"], g.get_collection("empty"))
empty_coll_ref3 = g.get_collection_ref("empty")
self.assertTrue(empty_coll_ref3 is empty_coll_ref)
def test_add_to_collections_uniquify(self):
g = ops.Graph()
g.add_to_collections([1, 2, 1], "key")
# Make sure "key" is not added twice
self.assertEqual(["key"], g.get_collection(1))
def test_add_to_collections_from_list(self):
g = ops.Graph()
g.add_to_collections(["abc", "123"], "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_tuple(self):
g = ops.Graph()
g.add_to_collections(("abc", "123"), "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_generator(self):
g = ops.Graph()
def generator():
yield "abc"
yield "123"
g.add_to_collections(generator(), "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_set(self):
g = ops.Graph()
g.add_to_collections(set(["abc", "123"]), "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_string(self):
g = ops.Graph()
g.add_to_collections("abc", "key")
self.assertEqual(["key"], g.get_collection("abc"))
def test_default_graph(self):
with ops.Graph().as_default():
ops.add_to_collection("key", 90)
ops.add_to_collection("key", 100)
# Collections are ordered.
self.assertEqual([90, 100], ops.get_collection("key"))
def test_defun(self):
with context.eager_mode():
@eager_function.defun
def defun():
ops.add_to_collection("int", 1)
ops.add_to_collection("tensor", constant_op.constant(2))
@eager_function.defun
def inner_defun():
self.assertEqual(ops.get_collection("int"), [1])
three = ops.get_collection("tensor")[0] + ops.get_collection("int")[0]
ops.add_to_collection("int", 2)
self.assertEqual(ops.get_collection("int"), [1, 2])
ops.add_to_collection("foo", "bar")
self.assertEqual(ops.get_collection("foo"), ["bar"])
return three
self.assertEqual(ops.get_collection("int"), [1])
three = inner_defun()
self.assertEqual(ops.get_collection("int"), [1])
self.assertEqual(ops.get_collection("foo"), [])
return three
three = defun()
self.assertEqual(three.numpy(), 3)
ops.NotDifferentiable("FloatOutput")
@ops.RegisterGradient("CopyOp")
def _CopyGrad(op, x_grad): # pylint: disable=invalid-name
_ = op
return x_grad
@ops.RegisterGradient("copy_override")
def _CopyOverrideGrad(op, x_grad): # pylint: disable=invalid-name
_ = op
return x_grad
class RegistrationTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testRegisterGradients(self):
x = test_ops.float_output()
y = test_ops.copy_op(x)
fn = ops.get_gradient_function(y.op)
self.assertEqual(_CopyGrad, fn)
def testOverrideGradients(self):
g = ops.Graph()
with g.as_default():
x = test_ops.float_output()
with g.gradient_override_map({"CopyOp": "copy_override"}):
y = test_ops.copy_op(x)
fn = ops.get_gradient_function(y.op)
self.assertEqual(_CopyOverrideGrad, fn)
def testNonExistentOverride(self):
g = ops.Graph()
with g.as_default():
x = test_ops.float_output()
with g.gradient_override_map({"CopyOp": "unknown_override"}):
y = test_ops.copy_op(x)
with self.assertRaisesRegexp(LookupError, "unknown_override"):
ops.get_gradient_function(y.op)
class ComparisonTest(test_util.TensorFlowTestCase):
def testMembershipAllowed(self):
g = ops.Graph()
t1 = _apply_op(g, "FloatOutput", [], [dtypes.float32], name="myop1")
t2 = _apply_op(g, "FloatOutput", [], [dtypes.float32], name="myop2")
self.assertTrue(isinstance(t1, ops.Tensor))
self.assertTrue(isinstance(t2, ops.Tensor))
self.assertTrue(t1 in [t1])
self.assertTrue(t1 not in [t2])
class ControlDependenciesTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testBasic(self):
g = ops.Graph()
with g.as_default():
# Creating unregistered ops with _apply_op() doesn't work with the C API
# TODO(skyewm): address this more consistently. Possible solutions are
# to use registered ops in all tests, create a way to register ops in
# Python tests, or conditionally disable the op registration check in
# the C API.
a = constant_op.constant(1.0)
b = constant_op.constant(1.0)
with g.control_dependencies([a]):
c = constant_op.constant(1.0)
d = array_ops.identity(b)
e = array_ops.identity(c)
self.assertEqual(c.op.control_inputs, [a.op])
self.assertEqual(d.op.control_inputs, [a.op])
# e should be dominated by c.
self.assertEqual(e.op.control_inputs, [])
@test_util.run_in_graph_and_eager_modes
def testEager(self):
def future():
future.calls += 1
return constant_op.constant(2.0)
future.calls = 0
if context.executing_eagerly():
a = constant_op.constant(1.0)
b = future
with ops.control_dependencies([a, b]):
c = constant_op.constant(3.0)
self.assertEqual(future.calls, 1)
else:
g = ops.Graph()
with g.as_default():
a = constant_op.constant(1.0)
b = future()
with g.control_dependencies([a, b]):
c = constant_op.constant(3.0)
self.assertEqual(c.op.control_inputs, [a.op, b.op])
self.assertEqual(future.calls, 1)
def testBasicWithConversion(self):
g = ops.Graph()
a = _apply_op(g, "FloatOutput", [], [dtypes.float32])
class ConvertibleObj(object):
def _as_graph_element(self):
return a
with g.control_dependencies([ConvertibleObj()]):
c = _apply_op(g, "FloatOutput", [], [dtypes.float32])
self.assertEqual(c.op.control_inputs, [a.op])
def testNested(self):
g = ops.Graph()
a_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_3 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_4 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a_1, a_2, a_3, a_4]):
b_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
b_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
self.assertItemsEqual([a_1.op, a_2.op, a_3.op, a_4.op],
b_1.op.control_inputs)
self.assertItemsEqual(b_1.op.control_inputs, b_2.op.control_inputs)
def testClear(self):
g = ops.Graph()
a_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_3 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_4 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with g.control_dependencies(None):
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
# deps [a_3, a_4]
b_3_4 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps = [a_3]
b_3 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps back to None
b_none = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps back to [a_1, a_2]
b_1_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps back to [a_1]
b_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies(None):
# deps are None again
b_none2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
self.assertItemsEqual([a_3.op, a_4.op], b_3_4.op.control_inputs)
self.assertItemsEqual([a_3.op], b_3.op.control_inputs)
self.assertItemsEqual([], b_none.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_1_2.op.control_inputs)
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([], b_none2.op.control_inputs)
def testComplex(self):
g = ops.Graph()
# Usage pattern:
# * Nodes a_i are constants defined at the outermost scope, and are used
# as control inputs for the ith nested scope.
# * Nodes b_i are defined as Mul(a_3, a_4) at each scope.
# * Nodes c_i are defined as Mul(a_1, b_1) at each scope.
# * Nodes d_i are defined as Mul(b_i, c_i) at each scope.
# * Nodes e_i are defined as Mul(e_i-1, e_i-1) at each scope i > 1.
a_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_3 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_4 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a_1]):
b_1 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_3, a_4],
[dtypes.float32])
c_1 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_1, b_1],
[dtypes.float32])
d_1 = _apply_op(g, "TwoFloatInputsFloatOutput", [b_1, c_1],
[dtypes.float32])
e_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a_2]):
b_2 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_3, a_4],
[dtypes.float32])
c_2 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_1, b_1],
[dtypes.float32])
d_2 = _apply_op(g, "TwoFloatInputsFloatOutput", [b_2, c_2],
[dtypes.float32])
e_2 = _apply_op(g, "TwoFloatInputsFloatOutput", [e_1, e_1],
[dtypes.float32])
with g.control_dependencies([a_3]):
b_3 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_3, a_4],
[dtypes.float32])
c_3 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_1, b_1],
[dtypes.float32])
d_3 = _apply_op(g, "TwoFloatInputsFloatOutput", [b_3, c_3],
[dtypes.float32])
e_3 = _apply_op(g, "TwoFloatInputsFloatOutput", [e_2, e_2],
[dtypes.float32])
with g.control_dependencies([a_4]):
b_4 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_3, a_4],
[dtypes.float32])
c_4 = _apply_op(g, "TwoFloatInputsFloatOutput", [a_1, b_1],
[dtypes.float32])
d_4 = _apply_op(g, "TwoFloatInputsFloatOutput", [b_4, c_4],
[dtypes.float32])
e_4 = _apply_op(g, "TwoFloatInputsFloatOutput", [e_3, e_3],
[dtypes.float32])
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_2.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_3.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_4.op.control_inputs)
self.assertItemsEqual([], c_1.op.control_inputs)
self.assertItemsEqual([a_2.op], c_2.op.control_inputs)
self.assertItemsEqual([a_2.op, a_3.op], c_3.op.control_inputs)
self.assertItemsEqual([a_2.op, a_3.op, a_4.op], c_4.op.control_inputs)
self.assertItemsEqual([], d_1.op.control_inputs)
self.assertItemsEqual([], d_2.op.control_inputs)
self.assertItemsEqual([], d_3.op.control_inputs)
self.assertItemsEqual([], d_4.op.control_inputs)
self.assertItemsEqual([a_1.op], e_1.op.control_inputs)
self.assertItemsEqual([a_2.op], e_2.op.control_inputs)
self.assertItemsEqual([a_3.op], e_3.op.control_inputs)
self.assertItemsEqual([a_4.op], e_4.op.control_inputs)
def testRepeatedDependency(self):
g = ops.Graph()
a = g.create_op("TwoFloatOutputs", [], [dtypes.float32, dtypes.float32])
a_0, a_1 = a.outputs
with g.control_dependencies([a_0]):
b = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a_1]):
c = _apply_op(g, "FloatOutput", [], [dtypes.float32])
self.assertEqual(b.op.control_inputs, [a])
self.assertEqual(c.op.control_inputs, [a])
def testNoControlDependencyWithDataDependency(self):
g = ops.Graph()
a = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.control_dependencies([a]):
b = _apply_op(g, "Identity", [a], [dtypes.float32])
self.assertEqual(b.op.control_inputs, [])
class OpScopeTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testNames(self):
with ops.name_scope("foo") as foo:
self.assertEqual("foo/", foo)
with ops.name_scope("foo2") as foo2:
self.assertEqual("foo/foo2/", foo2)
with ops.name_scope(None) as empty1:
self.assertEqual("", empty1)
with ops.name_scope("foo3") as foo3:
self.assertEqual("foo3/", foo3)
with ops.name_scope("") as empty2:
self.assertEqual("", empty2)
with ops.name_scope("foo/") as outer_foo:
self.assertEqual("foo/", outer_foo)
with ops.name_scope("") as empty3:
self.assertEqual("", empty3)
with ops.name_scope("foo4") as foo4:
self.assertEqual("foo/foo4/", foo4)
with ops.name_scope("foo5//") as foo5:
self.assertEqual("foo5//", foo5)
with ops.name_scope("foo6") as foo6:
self.assertEqual("foo5//foo6/", foo6)
with ops.name_scope("/") as foo7:
self.assertEqual("/", foo7)
with ops.name_scope("//") as foo8:
self.assertEqual("//", foo8)
with ops.name_scope("a//b/c") as foo9:
self.assertEqual("foo/a//b/c/", foo9)
with ops.name_scope("a//b/c") as foo10:
self.assertEqual("a//b/c/", foo10)
@test_util.run_in_graph_and_eager_modes
def testEagerDefaultScopeName(self):
with ops.name_scope(None, "default") as scope:
self.assertEqual(scope, "default/")
with ops.name_scope(None, "default2") as scope2:
self.assertEqual(scope2, "default/default2/")
@test_util.run_in_graph_and_eager_modes
def testNameScopeV2IsReEntrant(self):
foo = ops.name_scope_v2("foo")
bar = ops.name_scope_v2("bar")
with foo as scope_name:
self.assertEqual("foo/", scope_name)
with foo as scope_name:
self.assertEqual("foo/foo/", scope_name)
with bar as scope_name:
self.assertEqual("foo/bar/", scope_name)
with foo as scope_name:
self.assertEqual("foo/bar/foo/", scope_name)
with bar as scope_name:
self.assertEqual("bar/", scope_name)
@test_util.run_deprecated_v1
def testNoScopeName(self):
g0 = ops.Graph()
values = [
g0.create_op("A", [], [dtypes.float32]),
g0.create_op("B", [], [dtypes.float32])
]
with self.assertRaises(ValueError):
with ops.name_scope(None, values=values):
pass
with self.assertRaises(ValueError):
with ops.name_scope(None, None, values):
pass
@test_util.run_deprecated_v1
def testEmptyScopeName(self):
g0 = ops.Graph()
a = g0.create_op("A", [], [dtypes.float32])
b = g0.create_op("B", [], [dtypes.float32])
with ops.name_scope("", values=[a, b]) as scope:
self.assertEqual("", scope)
self.assertEqual(g0, ops.get_default_graph())
with ops.name_scope("", "my_default_scope", [a, b]) as scope:
self.assertEqual("", scope)
self.assertEqual(g0, ops.get_default_graph())
@test_util.run_deprecated_v1
def testDefaultScopeName(self):
g0 = ops.Graph()
a = g0.create_op("A", [], [dtypes.float32])
b = g0.create_op("B", [], [dtypes.float32])
scope_name = "my_scope"
default_scope_name = "my_default_scope"
with ops.name_scope(scope_name, default_scope_name, [a, b]) as scope:
self.assertEqual("%s/" % scope_name, scope)
self.assertEqual(g0, ops.get_default_graph())
with ops.name_scope(None, default_scope_name, [a, b]) as scope:
self.assertEqual("%s/" % default_scope_name, scope)
self.assertEqual(g0, ops.get_default_graph())
with self.assertRaises(TypeError):
with ops.name_scope(scope_name, [a, b]):
pass
def _testGraphElements(self, graph_elements):
scope_name = "my_scope"
with ops.name_scope(scope_name, values=graph_elements) as scope:
self.assertEqual("%s/" % scope_name, scope)
self.assertEqual(graph_elements[0].graph, ops.get_default_graph())
g1 = ops.Graph()
a = g1.create_op("A", [], [dtypes.float32])
with self.assertRaises(ValueError):
with ops.name_scope(scope_name, values=graph_elements + [a]):
pass
@test_util.run_deprecated_v1
def testTensor(self):
g0 = ops.Graph()
a = g0.create_op("A", [], [dtypes.float32])
b = g0.create_op("B", [], [dtypes.float32])
self._testGraphElements([a, b])
@test_util.run_deprecated_v1
def testSparseTensor(self):
g0 = ops.Graph()
a = g0.create_op("A", [], [dtypes.float32])
b = g0.create_op("B", [], [dtypes.float32])
sparse = sparse_tensor.SparseTensor(
_apply_op(g0, "Int64Output", [], [dtypes.int64]),
_apply_op(g0, "FloatOutput", [], [dtypes.float32]),
_apply_op(g0, "Int64Output", [], [dtypes.int64]))
self._testGraphElements([a, sparse, b])
@test_util.run_deprecated_v1
def testVariable(self):
g0 = ops.Graph()
with g0.as_default():
variable = variables.Variable([1.0])
a = g0.create_op("A", [], [dtypes.float32])
b = g0.create_op("B", [], [dtypes.float32])
self._testGraphElements([a, variable, b])
class InitScopeTest(test_util.TensorFlowTestCase):
def testClearsControlDependencies(self):
g = ops.Graph()
a_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_3 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
a_4 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with g.as_default():
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with ops.init_scope():
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
# deps [a_3, a_4]
b_3_4 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps = [a_3]
b_3 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps back to None
b_none = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps back to [a_1, a_2]
b_1_2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
# deps back to [a_1]
b_1 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
with ops.init_scope():
# deps are None again
b_none2 = _apply_op(g, "FloatOutput", [], [dtypes.float32])
self.assertItemsEqual([a_3.op, a_4.op], b_3_4.op.control_inputs)
self.assertItemsEqual([a_3.op], b_3.op.control_inputs)
self.assertItemsEqual([], b_none.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_1_2.op.control_inputs)
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([], b_none2.op.control_inputs)
def testLiftsOpsFromFunctions(self):
g0 = ops.Graph()
g1 = ops.Graph()
g1._building_function = True # pylint: disable=protected-access
g2 = ops.Graph()
g2._building_function = True # pylint: disable=protected-access
with g0.as_default():
with g1.as_default():
with g2.as_default():
with ops.init_scope():
_ = constant_op.constant(1.0)
self.assertEqual(len(g2.get_operations()), 0)
self.assertEqual(len(g1.get_operations()), 0)
self.assertEqual(len(g0.get_operations()), 1)
def testPreservesDevices(self):
g0 = ops.Graph()
with g0.as_default(), ops.device("CPU:0"):
g1 = ops.Graph()
g1._building_function = True # pylint: disable=protected-access
with g1.as_default():
with ops.device("GPU:0"):
with ops.init_scope():
# init_scope should preserve device set under `g1`.
on_gpu = constant_op.constant(1.0)
self.assertEqual(on_gpu.device, "/device:GPU:0")
still_on_gpu = constant_op.constant(1.0)
self.assertEqual(still_on_gpu.device, "/device:GPU:0")
blank = constant_op.constant(1.0)
self.assertEqual(blank.device, "")
with ops.init_scope():
now_on_cpu = constant_op.constant(1.0)
self.assertEqual(now_on_cpu.device, "/device:CPU:0")
on_cpu = constant_op.constant(1.0)
self.assertEqual(on_cpu.device, "/device:CPU:0")
def testComposes(self):
g0 = ops.Graph()
g1 = ops.Graph()
g1._building_function = True # pylint: disable=protected-access
g2 = ops.Graph()
g2._building_function = True # pylint: disable=protected-access
g3 = ops.Graph()
g3._building_function = False # pylint: disable=protected-access
with g0.as_default():
with g1.as_default():
with ops.init_scope():
# This op should be lifted into g0.
_ = constant_op.constant(1.0)
self.assertIs(g0, ops.get_default_graph())
self.assertEqual(len(g2.get_operations()), 0)
self.assertEqual(len(g1.get_operations()), 0)
self.assertEqual(len(g0.get_operations()), 1)
with g2.as_default():
with ops.init_scope():
# This op should be lifted into g0.
_ = constant_op.constant(1.0)
self.assertIs(g0, ops.get_default_graph())
with g3.as_default():
with ops.init_scope():
# This op should be lifted into g3, because g3 is not building a
# function.
_ = constant_op.constant(1.0)
self.assertIs(g3, ops.get_default_graph())
self.assertEqual(len(g3.get_operations()), 1)
self.assertEqual(len(g2.get_operations()), 0)
self.assertEqual(len(g1.get_operations()), 0)
self.assertEqual(len(g0.get_operations()), 2)
def testEscapesToEagerContext(self):
g = ops.Graph()
g._building_function = True # pylint: disable=protected-access
with context.eager_mode():
with context.graph_mode():
with g.as_default():
with ops.init_scope():
# Because g is building a function, init_scope should
# escape out to the eager context.
self.assertTrue(context.executing_eagerly())
# g should be reinstated as the default graph, and the
# graph context should be re-entered.
self.assertIs(g, ops.get_default_graph())
self.assertFalse(context.executing_eagerly())
def testStaysInEagerWhenOnlyEagerContextActive(self):
with context.eager_mode():
with ops.init_scope():
self.assertTrue(context.eager_mode())
self.assertTrue(context.eager_mode())
def testEscapesDefunWhenInEagerMode(self):
def function_with_variables():
with ops.init_scope():
self.v = resource_variable_ops.ResourceVariable(3)
return self.v.assign_add(1)
with context.eager_mode():
# Each invocation of function_with_variables recreates a variable.
self.assertEqual(4, int(function_with_variables()))
self.assertEqual(4, int(function_with_variables()))
compiled = eager_function.defun(function_with_variables)
# The init_scope in function_with_variables lifts the variable out
# of the graph function constructed by defun; hence,
# compiled now appears to be stateful.
self.assertEqual(4, int(compiled()))
self.assertEqual(5, int(compiled()))
def testEscapesDefunWhenInGraphMode(self):
def function_with_variables(name):
with ops.init_scope():
_ = variable_scope.get_variable(name, shape=(1,))
g = ops.Graph()
with g.as_default():
with self.cached_session():
# First ensure that graphs that are not building functions are
# not escaped.
function_with_variables("foo")
with self.assertRaisesRegexp(ValueError,
r"Variable foo already exists.*"):
# This will fail because reuse is not set to True.
function_with_variables("foo")
compiled = eager_function.defun(function_with_variables)
compiled("bar")
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)), 2)
# The second call to `compiled` should not create variables: the
# init_scope has lifted the variable creation code out of the defun.
compiled("bar")
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)), 2)
def testEscapesNestedDefun(self):
def inner_function():
with ops.init_scope():
self.v = resource_variable_ops.ResourceVariable(1)
return self.v.assign_add(2)
def outer_function(inner=None):
with ops.init_scope():
self.v0 = resource_variable_ops.ResourceVariable(0)
return self.v0.assign_add(1) + inner()
with context.eager_mode():
# Each invocation of outer_function recreates variables.
self.assertEqual(4, int(outer_function(inner=inner_function)))
self.assertEqual(4, int(outer_function(inner=inner_function)))
compiled_inner = eager_function.defun(inner_function)
compiled_outer = eager_function.defun(outer_function)
# The init_scope lifts variables out of the graph functions
# constructed by defun; hence, compiled_outer should now appear to be
# stateful.
self.assertEqual(4, int(compiled_outer(inner=compiled_inner)))
self.assertEqual(7, int(compiled_outer(inner=compiled_inner)))
@test_util.run_v1_only("b/120545219")
def testFallsBackToGlobalGraphWhenAllGraphsAreBuildingFunctions(self):
with context.graph_mode():
ops.reset_default_graph()
# This doesn't push anything onto the graph stack, but it does
# set the stack's global graph.
global_graph = ops.get_default_graph()
fn_graph = ops.Graph()
# pylint: disable=protected-access
fn_graph._building_function = True
self.assertEqual(len(ops._default_graph_stack.stack), 0)
with fn_graph.as_default():
self.assertEqual(len(ops._default_graph_stack.stack), 1)
with ops.init_scope():
self.assertGreater(len(ops._default_graph_stack.stack), 1)
dummy = constant_op.constant(1.0)
self.assertEqual(len(ops._default_graph_stack.stack), 1)
# Note that the global graph is _not_ on the graph stack.
self.assertEqual(len(ops._default_graph_stack.stack), 0)
# Ensure that `dummy` was added to the global graph.
self.assertEqual(global_graph, dummy.graph)
# pylint: enable=protected-access
def testInstallsDefaultGraphWhenGraphStackIsEmptyInGraphMode(self):
with context.graph_mode():
# pylint: disable=protected-access
self.assertEqual(len(ops._default_graph_stack.stack), 0)
with ops.init_scope():
self.assertGreater(len(ops._default_graph_stack.stack), 0)
self.assertEqual(len(ops._default_graph_stack.stack), 0)
# pylint: enable=protected-access
def testPreservesNameScopeInGraphConstruction(self):
with ops.Graph().as_default():
function_graph = ops.Graph()
with function_graph.as_default():
with ops.name_scope("inner"), ops.init_scope():
self.assertEqual(ops.get_name_scope(), "inner")
self.assertEqual(ops.get_name_scope(), "")
def testEnteringGraphFromEagerIsSticky(self):
with context.eager_mode():
g = ops.Graph()
with g.as_default():
with ops.init_scope():
self.assertFalse(context.executing_eagerly())
self.assertEqual(g, ops.get_default_graph())
def testMixGraphEager(self):
with context.eager_mode():
c = constant_op.constant(1.0)
with ops.Graph().as_default():
with self.assertRaisesRegexp(
RuntimeError, "Attempting to capture an EagerTensor"):
math_ops.add(c, c)
c2 = constant_op.constant(2.0)
with self.assertRaisesRegexp(
TypeError, "Graph tensors"):
math_ops.add(c2, c2)
def testPreservesNameScopeInEagerExecution(self):
with context.eager_mode():
def foo():
with ops.name_scope("inner"), ops.init_scope():
if context.executing_eagerly():
# A trailing slash is always appended when eager execution is
# enabled.
self.assertEqual(context.context().scope_name, "inner/")
else:
self.assertEqual(ops.get_name_scope(), "inner")
foo()
self.assertEqual(ops.get_name_scope(), "")
foo_compiled = eager_function.defun(foo)
foo_compiled()
self.assertEqual(ops.get_name_scope(), "")
def testExecutingEagerlyOutsideFunctions(self):
@def_function.function
def f():
return ops.executing_eagerly_outside_functions()
with context.graph_mode():
self.assertFalse(ops.executing_eagerly_outside_functions())
with session.Session():
# Need self.evaluate for these as the return type of functions is
# tensors.
self.assertFalse(self.evaluate(f()))
with context.eager_mode():
self.assertTrue(ops.executing_eagerly_outside_functions())
self.assertTrue(f())
with ops.Graph().as_default():
self.assertFalse(ops.executing_eagerly_outside_functions())
with session.Session():
self.assertFalse(self.evaluate(f()))
class GraphTest(test_util.TensorFlowTestCase):
def setUp(self):
ops.reset_default_graph()
def _AssertDefault(self, expected):
self.assertIs(expected, ops.get_default_graph())
def testResetDefaultGraphNesting(self):
g0 = ops.Graph()
with self.assertRaises(AssertionError):
with g0.as_default():
ops.reset_default_graph()
def testGraphContextManagerCancelsEager(self):
with context.eager_mode():
with ops.Graph().as_default():
self.assertFalse(context.executing_eagerly())
def testGraphContextManager(self):
g0 = ops.Graph()
with g0.as_default() as g1:
self.assertIs(g0, g1)
def testDefaultGraph(self):
orig = ops.get_default_graph()
self.assertFalse(ops.has_default_graph())
self._AssertDefault(orig)
g0 = ops.Graph()
self.assertFalse(ops.has_default_graph())
self._AssertDefault(orig)
context_manager_0 = g0.as_default()
self.assertFalse(ops.has_default_graph())
self._AssertDefault(orig)
with context_manager_0 as g0:
self._AssertDefault(g0)
with ops.Graph().as_default() as g1:
self.assertTrue(ops.has_default_graph())
self._AssertDefault(g1)
self._AssertDefault(g0)
self._AssertDefault(orig)
self.assertFalse(ops.has_default_graph())
def testPreventFeeding(self):
g = ops.Graph()
a = constant_op.constant(2.0)
self.assertTrue(g.is_feedable(a))
g.prevent_feeding(a)
self.assertFalse(g.is_feedable(a))
@test_util.run_deprecated_v1
def testPreventFetching(self):
g = ops.Graph()
a = constant_op.constant(2.0)
self.assertTrue(g.is_fetchable(a))
g.prevent_fetching(a.op)
self.assertFalse(g.is_fetchable(a))
def testAsGraphElementConversions(self):
class ConvertibleObj(object):
def _as_graph_element(self):
return "FloatOutput:0"
class NonConvertibleObj(object):
pass
g = ops.Graph()
a = _apply_op(g, "FloatOutput", [], [dtypes.float32])
self.assertEqual(a, g.as_graph_element(ConvertibleObj()))
with self.assertRaises(TypeError):
g.as_graph_element(NonConvertibleObj())
# Regression test against creating custom __del__ functions in classes
# involved in cyclic references, e.g. Graph and Operation. (Python won't gc
# cycles that require calling a __del__ method, because the __del__ method can
# theoretically increase the object's refcount to "save" it from gc, and any
# already-deleted objects in the cycle would have be to restored.)
def testGarbageCollected(self):
# Create a graph we can delete and a weak reference to monitor if it's gc'd
g = ops.Graph()
g_ref = weakref.ref(g)
# Create some ops
with g.as_default():
a = constant_op.constant(2.0)
b = constant_op.constant(3.0)
c = math_ops.add(a, b)
# Create a session we can delete
with session.Session(graph=g) as sess:
self.evaluate(c)
# Delete all references and trigger gc
del g
del a
del b
del c
del sess
gc.collect()
self.assertIsNone(g_ref())
def testRunnableAfterInvalidShape(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError):
math_ops.add([1, 2], [1, 2, 3])
a = constant_op.constant(1)
with session.Session() as sess:
self.evaluate(a)
def testRunnableAfterInvalidShapeWithKernelLabelMap(self):
g = ops.Graph()
with g.as_default():
with g._kernel_label_map({"KernelLabelRequired": "overload_1"}):
with self.assertRaises(ValueError):
test_ops.kernel_label_required(1)
a = constant_op.constant(1)
with session.Session() as sess:
self.evaluate(a)
class AttrScopeTest(test_util.TensorFlowTestCase):
def _get_test_attrs(self):
x = control_flow_ops.no_op()
try:
a = compat.as_text(x.get_attr("_A"))
except ValueError:
a = None
try:
b = compat.as_text(x.get_attr("_B"))
except ValueError:
b = None
return (a, b)
@test_util.run_deprecated_v1
def testNoLabel(self):
with self.cached_session():
self.assertAllEqual((None, None), self._get_test_attrs())
@test_util.run_deprecated_v1
def testLabelMap(self):
with self.cached_session() as sess:
a1 = self._get_test_attrs()
with sess.graph._attr_scope({
"_A": attr_value_pb2.AttrValue(s=compat.as_bytes("foo"))
}):
a2 = self._get_test_attrs()
with sess.graph._attr_scope({
"_A": None,
"_B": attr_value_pb2.AttrValue(s=compat.as_bytes("bar"))
}):
a3 = self._get_test_attrs()
with sess.graph._attr_scope({
"_A": attr_value_pb2.AttrValue(s=compat.as_bytes("baz"))
}):
a4 = self._get_test_attrs()
a5 = self._get_test_attrs()
a6 = self._get_test_attrs()
a7 = self._get_test_attrs()
self.assertAllEqual((None, None), a1)
self.assertAllEqual(("foo", None), a2)
self.assertAllEqual((None, "bar"), a3)
self.assertAllEqual(("baz", "bar"), a4)
self.assertAllEqual((None, "bar"), a5)
self.assertAllEqual(("foo", None), a6)
self.assertAllEqual((None, None), a7)
ops.RegisterShape("KernelLabel")(common_shapes.scalar_shape)
class KernelLabelTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testNoLabel(self):
with self.cached_session():
self.assertAllEqual(b"My label is: default",
test_ops.kernel_label().eval())
@test_util.run_deprecated_v1
def testLabelMap(self):
with self.cached_session() as sess:
default_1 = test_ops.kernel_label()
# pylint: disable=protected-access
with sess.graph._kernel_label_map({"KernelLabel": "overload_1"}):
overload_1_1 = test_ops.kernel_label()
with sess.graph._kernel_label_map({"KernelLabel": "overload_2"}):
overload_2 = test_ops.kernel_label()
with sess.graph._kernel_label_map({"KernelLabel": ""}):
default_2 = test_ops.kernel_label()
overload_1_2 = test_ops.kernel_label()
# pylint: enable=protected-access
default_3 = test_ops.kernel_label()
self.assertAllEqual(b"My label is: default", self.evaluate(default_1))
self.assertAllEqual(b"My label is: default", self.evaluate(default_2))
self.assertAllEqual(b"My label is: default", self.evaluate(default_3))
self.assertAllEqual(b"My label is: overload_1",
self.evaluate(overload_1_1))
self.assertAllEqual(b"My label is: overload_1",
self.evaluate(overload_1_2))
self.assertAllEqual(b"My label is: overload_2", self.evaluate(overload_2))
class AsGraphDefTest(test_util.TensorFlowTestCase):
def testGraphDefVersion(self):
"""Test that the graphdef version is plumbed through to kernels."""
with ops.Graph().as_default() as g:
version = g.graph_def_versions.producer
with self.session(graph=g):
v = test_ops.graph_def_version().eval()
self.assertEqual(version, v)
def testAddShapes(self):
with ops.Graph().as_default() as g:
t1, t2, t3, t4, t5 = _apply_op(g, "FiveFloatOutputs", [],
[dtypes.float32] * 5)
t1.set_shape(None)
t2.set_shape([])
t3.set_shape([None])
t4.set_shape([43, 37])
t5.set_shape([43, None])
b = constant_op.constant(1.0) # pylint: disable=unused-variable
gd = g.as_graph_def(add_shapes=True)
self.assertProtoEqualsVersion("""
node { name: "FiveFloatOutputs" op: "FiveFloatOutputs"
attr {
key: "_output_shapes"
value {
list {
shape { unknown_rank: true }
shape { }
shape { dim { size: -1 } }
shape { dim { size: 43 } dim { size: 37 } }
shape { dim { size: 43 } dim { size: -1 } }
}
}
}
}
node { name: "Const" op: "Const"
attr {
key: "_output_shapes"
value {
list {
shape { }
}
}
}
attr {
key: "dtype"
value { type: DT_FLOAT }
}
attr {
key: "value"
value {
tensor {
dtype: DT_FLOAT
tensor_shape { }
float_val: 1.0 } } } }
""", gd)
@ops.RegisterStatistics("a", "flops")
def _calc_a_forward_flops(unused_graph, unused_node):
return ops.OpStats("flops", 20)
class StatisticsTest(test_util.TensorFlowTestCase):
def testRegisteredNode(self):
graph = ops.Graph()
node = ops._NodeDef("a", "an_a")
flops = ops.get_stats_for_node_def(graph, node, "flops")
self.assertEqual(20, flops.value)
missing_stat = ops.get_stats_for_node_def(graph, node, "missing_stat")
self.assertEqual(None, missing_stat.value)
def testUnregisteredNode(self):
graph = ops.Graph()
node = ops._NodeDef("b", "a_b")
weight_params = ops.get_stats_for_node_def(graph, node, "weight_params")
self.assertEqual(None, weight_params.value)
def testAccumulateStatistics(self):
flops_total = ops.OpStats("flops")
self.assertEqual(None, flops_total.value)
second_flops = ops.OpStats("flops", 3)
flops_total += second_flops
self.assertEqual(3, flops_total.value)
class DeviceStackTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testBasicDeviceAssignmentMetadata(self):
def device_func(unused_op):
return "/cpu:*"
const_zero = constant_op.constant([0.0], name="zero")
with ops.device("/cpu"):
const_one = constant_op.constant([1.0], name="one")
with ops.device("/cpu:0"):
const_two = constant_op.constant([2.0], name="two")
with ops.device(device_func):
const_three = constant_op.constant(3.0, name="three")
self.assertEqual(0, len(const_zero.op._device_assignments))
one_list = const_one.op._device_assignments
self.assertEqual(1, len(one_list))
self.assertEqual("/cpu", one_list[0].obj)
self.assertEqual("ops_test.py", os.path.basename(one_list[0].filename))
two_list = const_two.op._device_assignments
self.assertEqual(2, len(two_list))
devices = [t.obj for t in two_list]
self.assertEqual(set(["/cpu", "/cpu:0"]), set(devices))
three_list = const_three.op._device_assignments
self.assertEqual(1, len(three_list))
func_description = three_list[0].obj
expected_regex = r"device_func<.*ops_test.py, [0-9]+"
self.assertRegexpMatches(func_description, expected_regex)
@test_util.run_deprecated_v1
def testDeviceAssignmentMetadataForGraphDeviceAndTfDeviceFunctions(self):
with ops.device("/cpu"):
const_one = constant_op.constant([1.0], name="one")
with ops.get_default_graph().device("/cpu"):
const_two = constant_op.constant([2.0], name="two")
one_metadata = const_one.op._device_assignments[0]
two_metadata = const_two.op._device_assignments[0]
# Verify both types of device assignment return the right stack info.
self.assertRegexpMatches("ops_test.py",
os.path.basename(one_metadata.filename))
self.assertEqual(one_metadata.filename, two_metadata.filename)
self.assertEqual(one_metadata.lineno + 2, two_metadata.lineno)
class ColocationGroupTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testBasic(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
c = constant_op.constant(4.0)
self.assertEqual([b"loc:@a"], a.op.colocation_groups())
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
with self.assertRaises(ValueError):
c.op.get_attr("_class")
@test_util.run_deprecated_v1
def testBasicColocationMetadata(self):
const_two = constant_op.constant([2.0], name="two")
with ops.colocate_with(const_two.op):
const_three = constant_op.constant(3.0, name="three")
locations_dict = const_three.op._colocation_dict
self.assertIn("two", locations_dict)
metadata = locations_dict["two"]
self.assertIsNone(metadata.obj)
# Check that this test's filename is recorded as the file containing the
# colocation statement.
self.assertEqual("ops_test.py", os.path.basename(metadata.filename))
@test_util.run_deprecated_v1
def testColocationDeviceInteraction(self):
with ops.device("/cpu:0"):
with ops.device("/device:GPU:0"):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
# 'b' is created in the scope of /cpu:0, but it is
# colocated with 'a', which is on '/device:GPU:0'. colocate_with
# overrides devices because it is a stronger constraint.
b = constant_op.constant(3.0)
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual(a.op.device, b.op.device)
@test_util.run_deprecated_v1
def testColocationCanonicalization(self):
with ops.device("/device:GPU:0"):
_ = constant_op.constant(2.0)
with ops.device(lambda op: "/device:GPU:0"):
b = constant_op.constant(3.0)
with ops.get_default_graph().colocate_with(b):
with ops.device("/device:GPU:0"):
c = constant_op.constant(4.0)
# A's device will be /device:GPU:0
# B's device will be /device:GPU:0
# C's device will be /device:GPU:0 because it
# inherits B's device name, after canonicalizing the names.
self.assertEqual(b.op.device, c.op.device)
@test_util.run_deprecated_v1
def testLocationOverrides(self):
with ops.device("/cpu:0"):
with ops.device("/device:GPU:0"):
a = constant_op.constant([2.0], name="a")
# Note that this colocation is "redundant", since we are
# within the scope of "/device:GPU:0". However, we would like to
# preserve in the GraphDef that these two ops should be
# colocated in a portable way.
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
c = constant_op.constant(4.0)
d = constant_op.constant(5.0)
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual("/device:GPU:0", a.op.device)
self.assertEqual(a.op.device, b.op.device)
# Test that device function stack is restored.
self.assertEqual("/device:GPU:0", c.op.device)
self.assertEqual("/device:CPU:0", d.op.device)
@test_util.run_deprecated_v1
def testNestedColocateWith(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
with ops.colocate_with(b.op):
c = constant_op.constant(4.0)
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual([b"loc:@a"], c.op.colocation_groups())
@test_util.run_deprecated_v1
def testMultiColocationGroups(self):
a = constant_op.constant([2.0], name="a")
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(a.op):
with ops.colocate_with(b.op):
c = constant_op.constant(4.0)
self.assertEqual(set([b"loc:@a", b"loc:@b"]), set(c.op.colocation_groups()))
@test_util.run_deprecated_v1
def testColocationIgnoreStack(self):
a = constant_op.constant([2.0], name="a")
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(a.op):
with ops.colocate_with(b.op, ignore_existing=True):
c = constant_op.constant(4.0)
self.assertEqual(set([b"loc:@b"]), set(c.op.colocation_groups()))
@test_util.run_deprecated_v1
def testColocateWithReset(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(None, ignore_existing=True):
c = constant_op.constant(4.0, name="c")
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual([b"loc:@c"], c.op.colocation_groups())
@test_util.run_deprecated_v1
def testColocateWithInitialNoneThenNested(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
with ops.colocate_with(None, ignore_existing=True):
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(b.op):
c = constant_op.constant(4.0, name="c")
self.assertEqual([b"loc:@b"], b.op.colocation_groups())
self.assertEqual([b"loc:@b"], c.op.colocation_groups())
@test_util.run_deprecated_v1
def testColocateVariables(self):
a = variables.Variable([2.0], name="a")
with ops.colocate_with(a.op):
b = variables.Variable([3.0], name="b")
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
def testColocateWithVariableInFunction(self):
v = variables.Variable(1.)
@def_function.function
def f():
with ops.colocate_with(v):
return array_ops.ones([], name="output")
f()
graph_def = f.get_concrete_function().graph.as_graph_def()
wrap_function.function_from_graph_def(graph_def, [], ["output"])
class DeprecatedTest(test_util.TensorFlowTestCase):
def testSuccess(self):
with ops.Graph().as_default() as g:
test_util.set_producer_version(g, 7)
old = test_ops.old()
with self.session(graph=g):
old.run()
def _error(self):
return ((r"Op Old is not available in GraphDef version %d\. "
r"It has been removed in version 8\. For reasons\.") %
versions.GRAPH_DEF_VERSION)
def testGraphConstructionFail(self):
with ops.Graph().as_default():
with self.assertRaisesRegexp(NotImplementedError, self._error()):
test_ops.old()
class DenseTensorLikeTypeTest(test_util.TensorFlowTestCase):
def testSuccess(self):
op = ops.Operation(
ops._NodeDef("FloatOutput", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
self.assertTrue(ops.is_dense_tensor_like(t))
v = variables.Variable([17])
self.assertTrue(ops.is_dense_tensor_like(v))
class BadClassNoName(object):
pass
class BadClassBadName(object):
def name(self):
pass
class BadClassNoDtype(object):
@property
def name(self):
pass
class BadClassBadDtype(object):
@property
def name(self):
pass
def dtype(self):
pass
def testBadClass(self):
with self.assertRaisesRegexp(TypeError, "`name`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassNoName)
with self.assertRaisesRegexp(TypeError, "`name`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassBadName)
with self.assertRaisesRegexp(TypeError, "`dtype`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassNoDtype)
with self.assertRaisesRegexp(TypeError, "`dtype`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassBadDtype)
class NameScopeTest(test_util.TensorFlowTestCase):
def testStripAndPrependScope(self):
strs = [
"hidden1/hidden1/weights", # Same prefix. Should strip.
"hidden1///hidden1/weights", # Extra "/". Should strip.
"^hidden1/hidden1/weights", # Same prefix. Should strip.
"loc:@hidden1/hidden1/weights", # Same prefix. Should strip.
"hhidden1/hidden1/weights", # Different prefix. Should keep.
"hidden1"
] # Not a prefix. Should keep.
expected_striped = [
"hidden1/weights", "hidden1/weights", "^hidden1/weights",
"loc:@hidden1/weights", "hhidden1/hidden1/weights", "hidden1"
]
expected_prepended = [
"hidden2/hidden1/weights", "hidden2/hidden1/weights",
"^hidden2/hidden1/weights", "loc:@hidden2/hidden1/weights",
"hidden2/hhidden1/hidden1/weights", "hidden2/hidden1"
]
name_scope_to_strip = "hidden1"
name_scope_to_add = "hidden2"
for es, ep, s in zip(expected_striped, expected_prepended, strs):
striped = ops.strip_name_scope(s, name_scope_to_strip)
self.assertEqual(es, striped)
self.assertEqual(ep, ops.prepend_name_scope(striped, name_scope_to_add))
def testGetNameScope(self):
with ops.Graph().as_default() as g:
with ops.name_scope("scope1"):
with ops.name_scope("scope2"):
with ops.name_scope("scope3"):
self.assertEqual("scope1/scope2/scope3", g.get_name_scope())
self.assertEqual("scope1/scope2", g.get_name_scope())
self.assertEqual("scope1", g.get_name_scope())
self.assertEqual("", g.get_name_scope())
def testTwoGraphs(self):
def f():
g1 = ops.Graph()
g2 = ops.Graph()
with g1.as_default():
with g2.as_default():
with ops.name_scope("_"):
pass
self.assertRaisesRegexp(ValueError, "'_' is not a valid scope name", f)
class EnableEagerExecutionTest(test_util.TensorFlowTestCase):
@test_util.run_v1_only("b/120545219")
def testBadArgumentsToEnableEagerExecution(self):
with self.assertRaisesRegexp(TypeError, "config must be a tf.ConfigProto"):
ops.enable_eager_execution(context.DEVICE_PLACEMENT_SILENT)
with self.assertRaisesRegexp(ValueError, "device_policy must be one of"):
c = config_pb2.ConfigProto()
ops.enable_eager_execution(c, c)
with self.assertRaisesRegexp(ValueError, "execution_mode must be one of"):
c = config_pb2.ConfigProto()
ops.enable_eager_execution(c, execution_mode=c)
class _TupleTensor(composite_tensor.CompositeTensor):
"""`Tensor`-like `tuple`-like for custom `Tensor` conversion masquerading."""
def __init__(self, components):
super(_TupleTensor, self).__init__()
self._components = tuple(ops.convert_to_tensor(c) for c in components)
@property
def _type_spec(self):
return _TupleTensorSpec(type_spec.from_value(c) for c in self._components)
def __getitem__(self, key):
return self._components[key]
def __len__(self):
return len(self._components)
def __iter__(self):
return iter(self._components)
class _TupleTensorSpec(type_spec.TypeSpec):
def __init__(self, specs):
self._specs = specs
value_type = property(lambda self: _TupleTensor)
_component_specs = property(lambda self: self._specs)
def _to_components(self, value):
return value._components
def _from_components(self, components):
return _TupleTensor(*components)
def _serialize(self):
return (self._specs,)
class _MyTuple(object):
"""Pretend user-side class for `ConvertToCompositeTensorTest ."""
def __init__(self, components):
super(_MyTuple, self).__init__()
self._components = tuple(components)
def __getitem__(self, key):
return self._components[key]
def __len__(self):
return len(self._components)
def __iter__(self):
return iter(self._components)
ops.register_tensor_conversion_function(
_MyTuple, conversion_func=lambda x, *_, **__: _TupleTensor(x))
class CustomConvertToCompositeTensorTest(test_util.TensorFlowTestCase):
def testCompositeTensorConversion(self):
"""Tests that a user can register a CompositeTensor converter."""
x = _MyTuple((1, [2., 3.], [[4, 5], [6, 7]]))
y = ops.convert_to_tensor_or_composite(x)
self.assertFalse(tensor_util.is_tensor(y))
self.assertIsInstance(y, _TupleTensor)
self.assertLen(y, len(x))
for x_, y_ in zip(x, y):
self.assertIsInstance(y_, ops.Tensor)
self.assertTrue(tensor_util.is_tensor(y_))
self.assertAllEqual(x_, tensor_util.constant_value(y_))
if __name__ == "__main__":
googletest.main()
| apache-2.0 | -3,855,448,766,385,486,000 | 36.618647 | 106 | 0.630099 | false |
EdDev/vdsm | tests/network/nmdbus_test.py | 1 | 7506 | # Copyright 2016-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from nose.plugins.attrib import attr
from dbus.exceptions import DBusException
from testlib import VdsmTestCase
from testValidation import ValidateRunningAsRoot
from .nettestlib import dummy_devices
from .nettestlib import requires_systemctl
from .nmnettestlib import iface_name, TEST_LINK_TYPE, NMService, nm_connections
from vdsm.network.nm.errors import NMDeviceNotFoundError
from vdsm.network.nm.nmdbus import NMDbus
from vdsm.network.nm.nmdbus import types
from vdsm.network.nm.nmdbus.active import NMDbusActiveConnections
from vdsm.network.nm.nmdbus.device import NMDbusDevice
from vdsm.network.nm.nmdbus.settings import NMDbusSettings
IPV4ADDR = '10.1.1.1/29'
_nm_service = None
@ValidateRunningAsRoot
@requires_systemctl
def setup_module():
global _nm_service
_nm_service = NMService()
_nm_service.setup()
try:
NMDbus.init()
except DBusException as ex:
# Unfortunately, nose labeling does not operate on module fixtures.
# We let the test fail if init was not successful.
if 'Failed to connect to socket' not in ex.args[0]:
raise
def teardown_module():
_nm_service.teardown()
@attr(type='integration')
class TestNMConnectionSettings(VdsmTestCase):
def setUp(self):
self.nm_settings = NMDbusSettings()
self.iface = iface_name()
def test_configured_connections_attributes_existence(self):
with dummy_devices(1) as nics:
with nm_connections(self.iface, IPV4ADDR, slaves=nics) as connames:
nm_con = self._get_connection(connames[0])
self.assertEqual(connames[0], nm_con.connection.id)
self.assertIsNotNone(nm_con.connection.uuid)
self.assertIsNotNone(nm_con.connection.type)
def test_delete_one_of_two_connections(self):
with dummy_devices(1) as nics:
with nm_connections(self.iface, IPV4ADDR,
slaves=nics, con_count=2) as connames:
con0 = self._get_connection(connames[0])
con0.delete()
self.assertIsNone(self._get_connection(connames[0]))
con1 = self._get_connection(connames[1])
self.assertEqual(connames[1], con1.connection.id)
def _get_connection(self, con_name):
for nm_con in self.nm_settings.connections():
if nm_con.connection.id == con_name:
return nm_con
@attr(type='integration')
class TestNMActiveConnections(VdsmTestCase):
def test_active_connections_properties_existence(self):
nm_active_cons = NMDbusActiveConnections()
iface = iface_name()
with dummy_devices(1) as nics:
with nm_connections(iface, IPV4ADDR, slaves=nics):
con_count = 0
for connection in nm_active_cons.connections():
assert connection.id is not None
assert connection.uuid is not None
assert connection.type is not None
assert connection.master_con_path is not None
con_count += 1
self.assertGreaterEqual(con_count, 1)
def test_active_connections_properties_vs_connection_settings(self):
nm_active_cons = NMDbusActiveConnections()
nm_settings = NMDbusSettings()
iface = iface_name()
with dummy_devices(1) as nics:
with nm_connections(iface, IPV4ADDR, slaves=nics):
for active_con in nm_active_cons.connections():
settings_con = nm_settings.connection(active_con.con_path)
assert active_con.uuid == settings_con.connection.uuid
assert active_con.type == settings_con.connection.type
assert active_con.id == settings_con.connection.id
@attr(type='integration')
class TestNMDevice(VdsmTestCase):
def test_device_attributes_existence(self):
nm_device = NMDbusDevice()
nm_settings = NMDbusSettings()
device_count = 0
for device in nm_device.devices():
assert device.interface is not None
assert device.state is not None
assert device.active_connection_path is not None
assert device.connections_path is not None
for connection_path in device.connections_path:
settings_con = nm_settings.connection(connection_path)
assert settings_con.connection.uuid is not None
device_count += 1
self.assertGreaterEqual(device_count, 1)
def test_device_with_single_connection(self):
self._test_device_with_n_connections(1)
def test_device_with_multiple_connections(self):
self._test_device_with_n_connections(2)
def _test_device_with_n_connections(self, con_count):
nm_device = NMDbusDevice()
nm_settings = NMDbusSettings()
nm_act_cons = NMDbusActiveConnections()
configured_connections = set()
active_connections = set()
iface = iface_name()
with dummy_devices(1) as nics:
with nm_connections(iface, IPV4ADDR, slaves=nics,
con_count=con_count):
device = nm_device.device(iface)
for connection_path in device.connections_path:
settings_con = nm_settings.connection(connection_path)
configured_connections.add(settings_con.connection.id)
ac = nm_act_cons.connection(device.active_connection_path)
active_connections.add(ac.id)
self.assertEqual(con_count, len(configured_connections))
self.assertEqual(set([iface + '0']), active_connections)
@attr(type='integration')
class TestNMConnectionCreation(VdsmTestCase):
def test_nm_connection_lifetime(self):
nm_act_cons = NMDbusActiveConnections()
nm_device = NMDbusDevice()
iface = iface_name()
with dummy_devices(1) as nics:
with nm_connections(iface, IPV4ADDR, slaves=nics):
device = nm_device.device(iface)
active_con_path = device.active_connection_path
active_con = nm_act_cons.connection(active_con_path)
self.assertEqual(TEST_LINK_TYPE, str(active_con.type))
self.assertEqual(types.NMActiveConnectionState.ACTIVATED,
active_con.state)
self._assert_no_device(iface)
def _assert_no_device(self, iface):
nm_device = NMDbusDevice()
with self.assertRaises(NMDeviceNotFoundError):
nm_device.device(iface)
| gpl-2.0 | 2,538,314,882,646,192,000 | 35.086538 | 79 | 0.650413 | false |
dpgaspar/Flask-AppBuilder | examples/simpleform/config.py | 1 | 3697 | import os
from flask_appbuilder.security.manager import (
AUTH_OID,
AUTH_REMOTE_USER,
AUTH_DB,
AUTH_LDAP,
AUTH_OAUTH,
)
basedir = os.path.abspath(os.path.dirname(__file__))
# Your App secret key
SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h"
# The SQLAlchemy connection string.
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(basedir, "app.db")
# SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
# SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
# Flask-WTF flag for CSRF
CSRF_ENABLED = True
# ------------------------------
# GLOBALS FOR APP Builder
# ------------------------------
# Uncomment to setup Your App name
# APP_NAME = "My App Name"
# Uncomment to setup Setup an App icon
# APP_ICON = "static/img/logo.jpg"
# ----------------------------------------------------
# AUTHENTICATION CONFIG
# ----------------------------------------------------
# The authentication type
# AUTH_OID : Is for OpenID
# AUTH_DB : Is for database (username/password()
# AUTH_LDAP : Is for LDAP
# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server
AUTH_TYPE = AUTH_DB
# Uncomment to setup Full admin role name
# AUTH_ROLE_ADMIN = 'Admin'
# Uncomment to setup Public role name, no authentication needed
# AUTH_ROLE_PUBLIC = 'Public'
# Will allow user self registration
# AUTH_USER_REGISTRATION = True
# The default user self registration role
# AUTH_USER_REGISTRATION_ROLE = "Public"
# When using LDAP Auth, setup the ldap server
# AUTH_LDAP_SERVER = "ldap://ldapserver.new"
# AUTH_LDAP_USE_TLS = False
# Uncomment to setup OpenID providers example for OpenID authentication
# OPENID_PROVIDERS = [
# { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
# { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
# { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
# { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
# ---------------------------------------------------
# Babel config for translations
# ---------------------------------------------------
# Setup default language
BABEL_DEFAULT_LOCALE = "en"
# Your application default translation path
BABEL_DEFAULT_FOLDER = "translations"
# The allowed translation for you app
LANGUAGES = {
"en": {"flag": "gb", "name": "English"},
"pt": {"flag": "pt", "name": "Portuguese"},
"pt_BR": {"flag": "br", "name": "Pt Brazil"},
"es": {"flag": "es", "name": "Spanish"},
"de": {"flag": "de", "name": "German"},
"zh": {"flag": "cn", "name": "Chinese"},
"ru": {"flag": "ru", "name": "Russian"},
"pl": {"flag": "pl", "name": "Polish"},
}
# ---------------------------------------------------
# Image and file configuration
# ---------------------------------------------------
# The file upload folder, when using models with files
UPLOAD_FOLDER = basedir + "/app/static/uploads/"
# The image upload folder, when using models with images
IMG_UPLOAD_FOLDER = basedir + "/app/static/uploads/"
# The image upload url, when using models with images
IMG_UPLOAD_URL = "/static/uploads/"
# Setup image size default is (300, 200, True)
# IMG_SIZE = (300, 200, True)
# Theme configuration
# these are located on static/appbuilder/css/themes
# you can create your own and easily use them placing them on the same dir structure to override
# APP_THEME = "bootstrap-theme.css" # default bootstrap
# APP_THEME = "cerulean.css"
# APP_THEME = "amelia.css"
# APP_THEME = "cosmo.css"
# APP_THEME = "cyborg.css"
# APP_THEME = "flatly.css"
# APP_THEME = "journal.css"
# APP_THEME = "readable.css"
# APP_THEME = "simplex.css"
# APP_THEME = "slate.css"
# APP_THEME = "spacelab.css"
# APP_THEME = "united.css"
# APP_THEME = "yeti.css"
| bsd-3-clause | -7,382,459,781,208,628,000 | 32.306306 | 96 | 0.603192 | false |
mganeva/mantid | scripts/Reflectometry/isis_reflectometry/settings.py | 1 | 2193 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import xml.etree.ElementTree as XML
import os.path
class MissingSettings(Exception):
pass
class Settings(object):
__contents = None
__filename = None
def __init__(self, filename = None):
self.__filename = filename
if not filename:
filename = os.path.join( os.path.dirname(os.path.realpath(__file__)), "settings.xml")
self.__check_file(filename)
doc = None
try:
tree = XML.parse(filename)
doc = tree.getroot()
self.__extract_to_dictionary(doc)
except:
raise ValueError("The file %s does not contain valid XML" % filename)
def __check_file(self, filename):
path, extension = os.path.splitext(filename)
if extension.lower().strip() != ".xml":
raise ValueError("Wrong file extension. *.xml expected not %s." % extension)
if not os.path.isfile(filename):
''' Deliberately swallow and re-throw at this point. Consise reinterpreted error, will be much nicer for client code.'''
raise MissingSettings("Settings file %s does not exist so no manual settings will be applied." % filename)
def __extract_to_dictionary(self, doc):
temp = dict()
for elem in doc:
key = elem.attrib.get('name').strip()
value = elem.text.strip()
if not key:
raise ValueError("Missing name attribute on Setting element")
if not value:
raise ValueError("Missing value for Setting element")
temp[key] = value
self.__contents = dict(frozenset(list(temp.items())))
def get_all_entries(self):
return self.__contents
def get_named_setting(self, name):
return self.__contents[name]
def get_contents_file(self):
return self.__filename
| gpl-3.0 | -1,148,494,473,465,273,000 | 33.809524 | 132 | 0.620155 | false |
krzjoa/sciquence | sciquence/representation/sax.py | 1 | 3735 | # -*- coding: utf-8 -*-
# Krzysztof Joachimiak 2017
# sciquence: Time series & sequences in Python
#
# Symbolic Aggregate Approximation
# Author: Krzysztof Joachimiak
#
# License: MIT
import numpy as np
import scipy.stats
from sklearn.preprocessing import scale, StandardScaler
from paa import paa
from operator import add
def sax(sequence, window, alphabet_size=5, adjust=True):
'''
Symbolic Aggregate Approximation.
Transform time series into a string.
Parameters
----------
sequence: numpy.ndarray
One-dimensional numpy array of arbitrary length
window: int
Length of sliding window
alphabet_size: int
Number of Gaussian breakpoints
adjust: bool, default True
Compute only for equal-size chunks
Returns
-------
sax_representation: str
A SAX representation
Examples
--------
>>> import numpy as np
>>> from sciquence.representation import sax
>>> np.random.seed(42)
>>> random_time_series = np.random.rand(50)
>>> print sax(random_time_series, 10, alphabet_size=5)
dcccc
References
----------
.. [1] Lin, J., Keogh, E., Lonardi, S., & Chiu, B. (2003).
A Symbolic Representation of Time Series,
with Implications for Streaming Algorithms.
In proceedings of the 8th ACM SIGMOD Workshop
on Research Issues in Data Mining and Knowledge Discovery.
http://www.cs.ucr.edu/~eamonn/SAX.pdf
.. [2] http://www.cs.ucr.edu/~eamonn/SAX.htm
.. [3] https://jmotif.github.io/sax-vsm_site/morea/algorithm/SAX.html
'''
# TODO: check dimensionality, size, aphabet size etc.
# Pre-step: checking if all arguments have proper values
# First step: Standardization ( aka normalization, z-normalization or standard score)
scaled = scale(sequence)
# Second step: PAA
paa_repr = paa(scaled, window=window, adjust=adjust)
# Last step:
breakpoints = gauss_breakpoints(alphabet_size)
letters = _alphabet(alphabet_size)
breakpoints= np.array(breakpoints)
symbols = np.array(letters)
return reduce(add, symbols[np.digitize(paa_repr, breakpoints)])
# =========== SAX object ============ #
# TODO: consider: some classes should be both transformers and processors
class SAX(object):
def __init__(self, n_ranges=5, keep_scale=True):
self.scaler = StandardScaler()
self.breakpoints = gauss_breakpoints(n_ranges)
def fit(self, X, y):
return self
def transform(self, X, y):
pass
def fit_transform(self, X, y):
return self.fit(X, y).transform(X, y)
# ================ UTILS ================ #
def gauss_breakpoints(n_ranges):
# TODO: move this function to utilities
'''
Get quantiles of Gaussian distribution.
Parameters
----------
n_ranges: int
Number of equal ranges in Gaussian distribution
Returns
-------
breakpoints: list of float
List of Gaussian quantiles
'''
quantile_range = 1. / n_ranges
quantiles = [quantile_range*i for i in range(1, n_ranges)]
return [round(scipy.stats.norm.ppf(q), 2) for q in quantiles]
def _alphabet(n_letters):
import string
return np.array(list(string.ascii_lowercase)[:n_letters])
def get_bins(sequence, breakpoints, symbols):
breakpoints= np.array(breakpoints)
symbols = np.array(symbols)
return np.digitize(sequence, breakpoints)[symbols]
if __name__ == '__main__':
# rts = np.random.rand(20)*10
# saxed = sax(rts, 3)
# print saxed
#print gauss_breakpoints(10)
#import scipy.stats
#print scipy.stats.norm.ppf(1. / 3)
random_ts = np.random.rand(30, 100)
print random_ts
| mit | 257,375,761,614,011,420 | 24.236486 | 89 | 0.638554 | false |
luster/oldnyc | generate_static_site.py | 1 | 5564 | #!/usr/bin/env python
'''Generate a static version of oldnyc.org consisting entirely of JSON.'''
import chardet
from collections import defaultdict, OrderedDict
import csv
import json
import record
import re
from distutils.dir_util import copy_tree
from shutil import copyfile
import subprocess
import sys
import time
import os
from ocr import cleaner
import title_cleaner
# Make sure the oldnyc.github.io repo is in a clean state.
git_status = subprocess.check_output('git -C ../oldnyc.github.io status --porcelain'.split(' '))
if git_status.strip():
sys.stderr.write('Make sure the ../oldnyc.github.io repo exists and is clean.\n')
sys.exit(1)
# strip leading 'var popular_photos = ' and trailing ';'
popular_photos = json.loads(open('viewer/static/js/popular-photos.js', 'rb').read()[20:-2])
pop_ids = {x['id'] for x in popular_photos}
# strip leading 'var lat_lons = ' and trailing ';'
lat_lon_to_ids = json.loads(open('viewer/static/js/nyc-lat-lons-ny.js', 'rb').read()[15:-1])
rs = record.AllRecords('nyc/photos.pickle')
id_to_record = {r.photo_id(): r for r in rs}
id_to_dims = {}
for photo_id, width, height in csv.reader(open('nyc-image-sizes.txt')):
id_to_dims[photo_id] = (int(width), int(height))
# rotated images based on user feedback
user_rotations = json.load(open('analysis/rotations/rotations.json'))
id_to_rotation = user_rotations['fixes']
# Load the previous iteration of OCR. Corrections are applied on top of
# this.
old_data = json.load(open('../oldnyc.github.io/data.json', 'rb'))
old_photo_id_to_text = {r['photo_id']: r['text'] for r in old_data['photos'] if r['text']}
manual_ocr_fixes = json.load(open('ocr/feedback/fixes.json', 'rb'))
back_id_to_correction = manual_ocr_fixes['fixes']
id_to_text = {}
for photo_id in id_to_record.iterkeys():
back_id = re.sub(r'f?(?:-[a-z])?$', 'b', photo_id)
if photo_id in old_photo_id_to_text:
id_to_text[photo_id] = old_photo_id_to_text[photo_id]
if back_id in back_id_to_correction:
id_to_text[photo_id] = back_id_to_correction[back_id]
# (This was only helpful on the initial run, when data came straight from
# Ocropus.)
# for k, txt in id_to_text.iteritems():
# id_to_text[k] = cleaner.clean(txt)
back_id_to_text = None # clear
def image_url(photo_id, is_thumb):
degrees = id_to_rotation.get(photo_id)
if not degrees:
return 'http://oldnyc-assets.nypl.org/%s/%s.jpg' % (
'thumb' if is_thumb else '600px', photo_id)
else:
return 'http://www.oldnyc.org/rotated-assets/%s/%s.%s.jpg' % (
'thumb' if is_thumb else '600px', photo_id, degrees)
def decode(b):
try:
return b.decode('utf8')
except UnicodeDecodeError:
return b.decode(chardet.detect(b)['encoding'])
def make_response(photo_ids):
response = OrderedDict()
for photo_id in photo_ids:
r = id_to_record[photo_id]
w, h = id_to_dims[photo_id]
ocr_text = id_to_text.get(photo_id)
# See also viewer/app.py
title = decode(r.title())
original_title = None
if title_cleaner.is_pure_location(title):
original_title = title
title = ''
assert r.description() == ''
assert r.note() == ''
rotation = id_to_rotation.get(photo_id)
if rotation and (rotation % 180 == 90):
w, h = h, w
response[photo_id] = {
'title': title,
'date': re.sub(r'\s+', ' ', r.date()),
'folder': decode(r.location()),
'width': w,
'height': h,
'text': ocr_text,
'image_url': image_url(photo_id, is_thumb=False),
'thumb_url': image_url(photo_id, is_thumb=True)
}
if original_title:
response[photo_id]['original_title'] = original_title
if rotation:
response[photo_id]['rotation'] = rotation
return response
all_photos = []
latlon_to_count = {}
id4_to_latlon = defaultdict(lambda: {}) # first 4 of id -> id -> latlon
for latlon, photo_ids in lat_lon_to_ids.iteritems():
outfile = '../oldnyc.github.io/by-location/%s.json' % latlon.replace(',', '')
response = make_response(photo_ids)
latlon_to_count[latlon] = len(response)
json.dump(response, open(outfile, 'wb'), indent=2)
for id_ in photo_ids:
id4_to_latlon[id_[:4]][id_] = latlon
for photo_id, response in response.iteritems():
lat, lon = [float(x) for x in latlon.split(',')]
response['photo_id'] = photo_id
response['location'] = {
'lat': lat,
'lon': lon
}
response['width'] = int(response['width'])
response['height'] = int(response['height'])
all_photos.append(response)
json.dump(make_response(pop_ids),
open('../oldnyc.github.io/popular.json', 'wb'), indent=2)
with open('../oldnyc.github.io/lat-lon-counts.js', 'wb') as f:
f.write('var lat_lons = %s;' % json.dumps(latlon_to_count, indent=2))
for id4, id_to_latlon in id4_to_latlon.iteritems():
json.dump(id_to_latlon,
open('../oldnyc.github.io/id4-to-location/%s.json' % id4, 'wb'),
indent=2)
# Complete data dump
all_photos.sort(key=lambda photo: photo['photo_id'])
json.dump({
'photos': all_photos,
'timestamp': time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()),
'rotation_time': user_rotations['last_date'],
'ocr_time': manual_ocr_fixes['last_date']
},
open('../oldnyc.github.io/data.json', 'wb'),
indent=2)
| apache-2.0 | -9,218,727,179,228,314,000 | 32.721212 | 96 | 0.612689 | false |
VisualComputingInstitute/TrackR-CNN | datasets/Mapillary/Mapillary_instance.py | 1 | 1930 | from datasets.Loader import register_dataset
from datasets.Mapillary.MapillaryLike_instance import MapillaryLikeInstanceDataset
from datasets.util.Util import username
DEFAULT_PATH = "/fastwork/" + username() + "/mywork/data/mapillary/"
NAME = "mapillary_instance"
@register_dataset("mapillary_instance_full", resolution="full")
@register_dataset("mapillary_instance_half", resolution="half")
@register_dataset("mapillary_instance_quarter", resolution="quarter")
class MapillaryInstanceDataset(MapillaryLikeInstanceDataset):
def __init__(self, config, subset, resolution):
assert resolution in ("quarter", "half", "full"), resolution
if resolution == "full":
default_path = DEFAULT_PATH
else:
default_path = DEFAULT_PATH.replace("/mapillary/", "/mapillary_{}/".format(resolution))
# there are 37 classes with instances in total
# we excluded the following:
# 8: construction--flat--crosswalk-plain -> doesn't really look like a useful object category
# 34: object--bike-rack -> holes*
# 45: object--support--pole -> very large and thin -> bounding box does not capture it well
# 46: object--support--traffic-sign-frame -> holes*
# 47: object--support--utility-pole -> holes*
# further candidate for exclusion:
# 0: animal--bird -> usually very small
# *: holes means that there are large "holes" in the object which usually are still annotated as part of the object
# this will not work well together with laser, so we exclude them
vehicle_ids = [52, 53, 54, 55, 56, 57, 59, 60, 61, 62]
human_ids = [19, 20, 21, 22]
animal_ids = [0, 1]
object_ids = [32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 44, 48, 49, 50, 51]
crosswalk_zebra_id = [23]
cat_ids_to_use = vehicle_ids + human_ids + animal_ids + object_ids + crosswalk_zebra_id
super().__init__(config, subset, NAME, default_path, "datasets/Mapillary/", 256, cat_ids_to_use)
| mit | 4,730,217,750,433,129,000 | 46.073171 | 119 | 0.691192 | false |
evamy/learning_to_see | VAProject/eyes.py | 1 | 1174 |
"""
@author: Antriksh Agarwal
Version 0: 04/29/2018
"""
import cv2
import numpy as np
from utils import *
import time
eyeCascade = cv2.CascadeClassifier('models/eyes.xml')
def detect_eyes(image):
image = cv2.resize(image, (0, 0), fx=4, fy=4)
# start = time.time()
eyes = eyeCascade.detectMultiScale(
image, scaleFactor=2.5, minNeighbors=5)
# print "Eye Time: ", time.time() - start
eyes = non_max_suppression(eyes, overlapThresh=0.5)
# cv2.rectangle(image, (x, y), (x + w, y + h), (69, 165, 255), 2)
return eyes
def video_capture():
cap = cv2.VideoCapture(0)
while(1):
# for frame in imgs:
_, frame = cap.read()
# frame = cv2.imread(frame)
image = cv2.resize(frame, (0, 0), fx=0.5, fy=0.5)
eyes = detect_eyes(image)
for eye in eyes:
(xe, ye, we, he) = eye
cv2.rectangle(image, (xe, ye), (xe + we, ye + he),
(255, 0, 255), 3)
cv2.imshow("Eye detection", image)
# cv2.waitKey(0)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if __name__ == '__main__':
video_capture()
| gpl-3.0 | 7,997,997,112,329,743,000 | 21.576923 | 73 | 0.539182 | false |
avr-aics-riken/SURFACE | glsl/sexps.py | 1 | 4142 | # coding=utf-8
#
# Copyright © 2011 Intel Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# This file contains helper functions for manipulating sexps in Python.
#
# We represent a sexp in Python using nested lists containing strings.
# So, for example, the sexp (constant float (1.000000)) is represented
# as ['constant', 'float', ['1.000000']].
import re
def check_sexp(sexp):
"""Verify that the argument is a proper sexp.
That is, raise an exception if the argument is not a string or a
list, or if it contains anything that is not a string or a list at
any nesting level.
"""
if isinstance(sexp, list):
for s in sexp:
check_sexp(s)
elif not isinstance(sexp, basestring):
raise Exception('Not a sexp: {0!r}'.format(sexp))
def parse_sexp(sexp):
"""Convert a string, of the form that would be output by mesa,
into a sexp represented as nested lists containing strings.
"""
#sexp_token_regexp = re.compile(
# '[a-zA-Z_]+(@[0-9]+)?|[0-9]+(\\.[0-9]+)?|[^ \n]')
# symbol may include '@'
sexp_token_regexp = re.compile(
'[a-zA-Z_][a-zA-Z_@0-9]*|[-+]*[0-9]+(\\.[0-9]+)?|[\|\&\+\-<>=]+|[^ \r\n]')
stack = [[]]
for match in sexp_token_regexp.finditer(sexp):
token = match.group(0)
if token == '\t':
continue # skip
elif token == '(':
stack.append([])
elif token == ')':
if len(stack) == 1:
raise Exception('Unmatched )')
sexp = stack.pop()
stack[-1].append(sexp)
else:
# escape '@' to '__'
t = re.sub('@', '__', token)
stack[-1].append(t)
if len(stack) != 1:
raise Exception('Unmatched (')
if len(stack[0]) != 1:
# flatten last element
sexp = stack[0].pop()
for exp in sexp:
stack[0].append(exp)
return stack[0]
else:
return stack[0][0]
def sexp_to_string(sexp):
"""Convert a sexp, represented as nested lists containing strings,
into a single string of the form parseable by mesa.
"""
if isinstance(sexp, basestring):
return sexp
assert isinstance(sexp, list)
result = ''
for s in sexp:
sub_result = sexp_to_string(s)
if result == '':
result = sub_result
elif '\n' not in result and '\n' not in sub_result and \
len(result) + len(sub_result) + 1 <= 70:
result += ' ' + sub_result
else:
result += '\n' + sub_result
return '({0})'.format(result.replace('\n', '\n '))
def sort_decls(sexp):
"""Sort all toplevel variable declarations in sexp.
This is used to work around the fact that
ir_reader::read_instructions reorders declarations.
"""
assert isinstance(sexp, list)
decls = []
other_code = []
for s in sexp:
if isinstance(s, list) and len(s) >= 4 and s[0] == 'declare':
decls.append(s)
else:
other_code.append(s)
return sorted(decls) + other_code
| bsd-2-clause | 3,785,125,142,359,206,000 | 33.798319 | 82 | 0.612171 | false |
nischalsheth/contrail-controller | src/config/common/utils.py | 1 | 6932 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2015 Juniper Networks
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Numan Siddique, eNovance.
import os
import errno
import urllib
from collections import OrderedDict
import sys
import cStringIO
import logging
from ConfigParser import NoOptionError
from cfgm_common import vnc_cgitb
_DEFAULT_USER_DOMAIN_NAME = 'Default'
_DEFAULT_DOMAIN_ID = 'default'
def cgitb_hook(info=None, **kwargs):
vnc_cgitb.Hook(**kwargs).handle(info or sys.exc_info())
# end cgitb_hook
def detailed_traceback():
buf = cStringIO.StringIO()
cgitb_hook(format="text", file=buf)
tb_txt = buf.getvalue()
buf.close()
return tb_txt
# end detailed_traceback
def encode_string(enc_str, encoding='utf-8'):
"""Encode the string using urllib.quote_plus
Eg. @input:
enc_str = 'neté
type - 'unicode' or 'str'
@retval
enc_str = 'net%C3%A9%C3%B9'
type - str
"""
try:
enc_str.encode()
except (UnicodeDecodeError, UnicodeEncodeError):
if type(enc_str) is unicode:
enc_str = enc_str.encode(encoding)
enc_str = urllib.quote_plus(enc_str)
except Exception:
pass
return enc_str
def decode_string(dec_str, encoding='utf-8'):
"""Decode the string previously encoded using urllib.quote_plus.
Eg. If dec_str = 'net%C3%A9%C3%B9'
type - 'unicode' or 'str'
@retval
ret_dec_str = 'neté
type - unicode
"""
ret_dec_str = dec_str
try:
if type(ret_dec_str) is unicode:
ret_dec_str = str(ret_dec_str)
ret_dec_str = urllib.unquote_plus(ret_dec_str)
return ret_dec_str.decode(encoding)
except Exception:
return dec_str
class CacheContainer(object):
def __init__(self, size):
self.container_size = size
self.dictionary = OrderedDict()
def __getitem__(self, key, default=None):
value = self.dictionary[key]
# item accessed - put it in the front
del self.dictionary[key]
self.dictionary[key] = value
return value
def __setitem__(self, key, value):
self.dictionary[key] = value
if len(self.dictionary.keys()) > self.container_size:
# container is full, loose the least used item
self.dictionary.popitem(last=False)
def __contains__(self, key):
return key in self.dictionary
def __repr__(self):
return str(self.dictionary)
def CamelCase(input):
words = input.replace('_', '-').split('-')
name = ''
for w in words:
name += w.capitalize()
return name
# end CamelCase
def str_to_class(class_name, module_name):
try:
return reduce(getattr, class_name.split("."), sys.modules[module_name])
except Exception as e:
logger = logging.getLogger(module_name)
logger.warn("Exception: %s", str(e))
return None
# end str_to_class
def obj_type_to_vnc_class(obj_type, module_name):
return str_to_class(CamelCase(obj_type), module_name)
# end obj_type_to_vnc_class
def getCertKeyCaBundle(bundle, certs):
if os.path.isfile(bundle):
# Check if bundle needs to be replaced if
# constituent files were updated
bundle_is_stale = False
bundle_mod_time = os.path.getmtime(bundle)
for cert in certs:
if os.path.getmtime(cert) > bundle_mod_time:
bundle_is_stale = True
break
if not bundle_is_stale:
return bundle
try:
os.makedirs(os.path.dirname(bundle))
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(bundle, 'w') as ofile:
for cert in certs:
with open(cert) as ifile:
for line in ifile:
ofile.write(line)
os.chmod(bundle,0o777)
return bundle
# end CreateCertKeyCaBundle
# <uuid> | "tenant-"<uuid> | "domain-"<uuid>
def shareinfo_from_perms2_tenant(field):
x = field.split(":")
if len(x) == 1:
x.insert(0, "tenant")
return x
# end
def shareinfo_from_perms2(field):
x = field.split(":")
if len(x) == 2:
x.insert(0, "tenant")
return x
# end
def compare_refs(old_refs, new_refs):
# compare refs in an object
old_ref_dict = dict((':'.join(ref['to']), ref.get('attr')) for ref in old_refs or [])
new_ref_dict = dict((':'.join(ref['to']), ref.get('attr')) for ref in new_refs or [])
return old_ref_dict == new_ref_dict
# end compare_refs
def get_arg(args, name, default=None):
try:
kwarg = {name: eval('args.%s' % name)}
except AttributeError:
try:
kwarg = {name: args.get('KEYSTONE', name)}
except (NoOptionError, AttributeError):
kwarg = {name: default}
return kwarg
# end get_arg
def get_user_domain_kwargs(args):
user_domain = get_arg(args, 'user_domain_id')
if not user_domain.get('user_domain_id'):
user_domain = get_arg(args, 'user_domain_name', _DEFAULT_USER_DOMAIN_NAME)
return user_domain
# end get_user_domain_kwargs
def get_project_scope_kwargs(args):
scope_kwargs = {}
project_domain_name = get_arg(args, 'project_domain_name')
project_domain_id = get_arg(args, 'project_domain_id')
if project_domain_name.get('project_domain_name'):
# use project domain name
scope_kwargs.update(**project_domain_name)
elif project_domain_id.get('project_domain_id'):
# use project domain id
scope_kwargs.update(**project_domain_id)
if scope_kwargs:
admin_tenant_name = get_arg(args, 'admin_tenant_name')['admin_tenant_name']
project_name = get_arg(args, 'project_name', admin_tenant_name)
scope_kwargs.update(project_name)
return scope_kwargs
# end get_project_scope_kwargs
def get_domain_scope_kwargs(args):
scope_kwargs = {}
domain_name = get_arg(args, 'domain_name')
domain_id = get_arg(args, 'domain_id', _DEFAULT_DOMAIN_ID)
if domain_name.get('domain_name'):
# use domain name
scope_kwargs.update(**domain_name)
elif domain_id.get('domain_id'):
# use domain id
scope_kwargs.update(**domain_id)
return scope_kwargs
# end get_domain_scope_kwargs
| apache-2.0 | 4,898,338,699,762,080,000 | 27.285714 | 89 | 0.622655 | false |
chop-dbhi/varify-data-warehouse | vdw/variants/migrations/0004_auto__chg_field_evs_aa_ac_alt__chg_field_evs_ea_ac_alt__chg_field_evs_.py | 1 | 18632 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'EVS.aa_ac_alt'
db.alter_column('evs', 'aa_ac_alt', self.gf('django.db.models.fields.CharField')(max_length=20, null=True))
# Changing field 'EVS.ea_ac_alt'
db.alter_column('evs', 'ea_ac_alt', self.gf('django.db.models.fields.CharField')(max_length=20, null=True))
# Changing field 'EVS.aa_ac_ref'
db.alter_column('evs', 'aa_ac_ref', self.gf('django.db.models.fields.CharField')(max_length=20, null=True))
# Changing field 'EVS.all_ac_ref'
db.alter_column('evs', 'all_ac_ref', self.gf('django.db.models.fields.CharField')(max_length=20, null=True))
# Changing field 'EVS.all_ac_alt'
db.alter_column('evs', 'all_ac_alt', self.gf('django.db.models.fields.CharField')(max_length=20, null=True))
def backwards(self, orm):
# Changing field 'EVS.aa_ac_alt'
db.alter_column('evs', 'aa_ac_alt', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True))
# Changing field 'EVS.ea_ac_alt'
db.alter_column('evs', 'ea_ac_alt', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True))
# Changing field 'EVS.aa_ac_ref'
db.alter_column('evs', 'aa_ac_ref', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True))
# Changing field 'EVS.all_ac_ref'
db.alter_column('evs', 'all_ac_ref', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True))
# Changing field 'EVS.all_ac_alt'
db.alter_column('evs', 'all_ac_alt', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True))
models = {
'genes.exon': {
'Meta': {'object_name': 'Exon', 'db_table': "'exon'"},
'end': ('django.db.models.fields.IntegerField', [], {}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.IntegerField', [], {}),
'start': ('django.db.models.fields.IntegerField', [], {})
},
'genes.gene': {
'Meta': {'object_name': 'Gene', 'db_table': "'gene'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'gene_pubmed'", 'symmetrical': 'False'}),
'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}),
'families': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.GeneFamily']", 'symmetrical': 'False', 'blank': 'True'}),
'hgnc_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['genes.GenePhenotype']", 'symmetrical': 'False'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Synonym']", 'db_table': "'gene_synonym'", 'symmetrical': 'False'})
},
'genes.genefamily': {
'Meta': {'object_name': 'GeneFamily', 'db_table': "'gene_family'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'})
},
'genes.genephenotype': {
'Meta': {'object_name': 'GenePhenotype', 'db_table': "'gene_phenotype'"},
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}),
'hgmd_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"})
},
'genes.synonym': {
'Meta': {'object_name': 'Synonym', 'db_table': "'synonym'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'genes.transcript': {
'Meta': {'object_name': 'Transcript', 'db_table': "'transcript'"},
'coding_end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coding_end_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'coding_start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coding_start_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'exon_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'exons': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Exon']", 'db_table': "'transcript_exon'", 'symmetrical': 'False'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refseq_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'strand': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'})
},
'genome.chromosome': {
'Meta': {'ordering': "['order']", 'object_name': 'Chromosome', 'db_table': "'chromosome'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'})
},
'literature.pubmed': {
'Meta': {'object_name': 'PubMed', 'db_table': "'pubmed'"},
'pmid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'})
},
'phenotypes.phenotype': {
'Meta': {'object_name': 'Phenotype', 'db_table': "'phenotype'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hpo_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1000'})
},
'variants.effect': {
'Meta': {'ordering': "['order']", 'object_name': 'Effect', 'db_table': "'effect'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'impact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectImpact']", 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectRegion']", 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.effectimpact': {
'Meta': {'ordering': "['order']", 'object_name': 'EffectImpact', 'db_table': "'effect_impact'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.effectregion': {
'Meta': {'ordering': "['order']", 'object_name': 'EffectRegion', 'db_table': "'effect_region'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.evs': {
'Meta': {'object_name': 'EVS', 'db_table': "'evs'"},
'aa_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'aa_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'aa_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'aa_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'all_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'all_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'all_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'all_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'clinical_association': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'ea_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'ea_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'ea_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'ea_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'gts': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'read_depth': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'evs'", 'to': "orm['variants.Variant']"})
},
'variants.functionalclass': {
'Meta': {'ordering': "['order']", 'object_name': 'FunctionalClass', 'db_table': "'variant_functional_class'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.polyphen2': {
'Meta': {'object_name': 'PolyPhen2', 'db_table': "'polyphen2'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polyphen2'", 'to': "orm['variants.Variant']"})
},
'variants.sift': {
'Meta': {'object_name': 'Sift', 'db_table': "'sift'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'varaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sift'", 'to': "orm['variants.Variant']"})
},
'variants.thousandg': {
'Meta': {'object_name': 'ThousandG', 'db_table': "'1000g'"},
'aa': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'ac': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'afr_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'amr_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'an': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'asn_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'eur_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'thousandg'", 'to': "orm['variants.Variant']"})
},
'variants.variant': {
'Meta': {'unique_together': "(('chr', 'pos', 'ref', 'alt'),)", 'object_name': 'Variant', 'db_table': "'variant'"},
'alt': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'variant_pubmed'", 'symmetrical': 'False'}),
'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'liftover': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['variants.VariantPhenotype']", 'symmetrical': 'False'}),
'pos': ('django.db.models.fields.IntegerField', [], {}),
'ref': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'rsid': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.VariantType']", 'null': 'True'})
},
'variants.varianteffect': {
'Meta': {'object_name': 'VariantEffect', 'db_table': "'variant_effect'"},
'amino_acid_change': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'codon_change': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.Effect']", 'null': 'True', 'blank': 'True'}),
'exon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Exon']", 'null': 'True', 'blank': 'True'}),
'functional_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.FunctionalClass']", 'null': 'True', 'blank': 'True'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'transcript': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Transcript']", 'null': 'True', 'blank': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'effects'", 'null': 'True', 'to': "orm['variants.Variant']"})
},
'variants.variantphenotype': {
'Meta': {'object_name': 'VariantPhenotype', 'db_table': "'variant_phenotype'"},
'hgmd_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.Variant']"})
},
'variants.varianttype': {
'Meta': {'ordering': "['order']", 'object_name': 'VariantType', 'db_table': "'variant_type'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '20'})
}
}
complete_apps = ['variants']
| bsd-2-clause | -7,884,365,182,716,240,000 | 75.04898 | 192 | 0.543581 | false |
dkrisman/Traipse | mercurial/portable_hgweb/wsgicgi.py | 1 | 2280 | # hgweb/wsgicgi.py - CGI->WSGI translator
#
# Copyright 2006 Eric Hopper <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
#
# This was originally copied from the public domain code at
# http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side
import os, sys
from upmana.mercurial import util
def launch(application):
util.set_binary(sys.stdin)
util.set_binary(sys.stdout)
environ = dict(os.environ.iteritems())
environ.setdefault('PATH_INFO', '')
if '.cgi' in environ['PATH_INFO']:
environ['PATH_INFO'] = environ['PATH_INFO'].split('.cgi', 1)[1]
environ['wsgi.input'] = sys.stdin
environ['wsgi.errors'] = sys.stderr
environ['wsgi.version'] = (1, 0)
environ['wsgi.multithread'] = False
environ['wsgi.multiprocess'] = True
environ['wsgi.run_once'] = True
if environ.get('HTTPS','off').lower() in ('on','1','yes'):
environ['wsgi.url_scheme'] = 'https'
else:
environ['wsgi.url_scheme'] = 'http'
headers_set = []
headers_sent = []
out = sys.stdout
def write(data):
if not headers_set:
raise AssertionError("write() before start_response()")
elif not headers_sent:
# Before the first output, send the stored headers
status, response_headers = headers_sent[:] = headers_set
out.write('Status: %s\r\n' % status)
for header in response_headers:
out.write('%s: %s\r\n' % header)
out.write('\r\n')
out.write(data)
out.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
# Re-raise original exception if headers sent
raise exc_info[0](exc_info[1], exc_info[2])
finally:
exc_info = None # avoid dangling circular ref
elif headers_set:
raise AssertionError("Headers already set!")
headers_set[:] = [status, response_headers]
return write
content = application(environ, start_response)
for chunk in content:
write(chunk)
| gpl-2.0 | 6,717,463,400,556,089,000 | 31.571429 | 73 | 0.605263 | false |
blxlrsmb/myap.ml | apml-client/pack.py | 1 | 1076 | #!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# File: pack.py
# Date: Sat Jun 06 16:35:44 2015 +0800
# Author: Yuxin Wu <[email protected]>
from collections import defaultdict
class EventPacker(object):
""" Only record and pack result in a time interval"""
def __init__(self):
self.key_cnt = defaultdict(int)
self.mouse_cnt = defaultdict(int)
self.last_time = None
self.start = None
def count(self):
return sum(self.key_cnt.itervalues()) \
+ sum(self.mouse_cnt.itervalues())
def add_key(self, time, window):
if not self.start:
self.start = time
self.last_time = time
self.key_cnt[window] += 1
def add_mouse(self, time, window):
if not self.start:
self.start = time
self.last_time = time
self.mouse_cnt[window] += 1
def dump(self):
dic = {'mouse': dict(self.mouse_cnt),
'key': dict(self.key_cnt),
'start': self.start,
'end': self.last_time}
return dic
| mit | 7,399,868,955,988,437,000 | 25.243902 | 57 | 0.557621 | false |
vsajip/django | django/test/testcases.py | 1 | 46844 | from __future__ import unicode_literals
import difflib
import json
import os
import re
import sys
from copy import copy
from functools import wraps
try:
from urllib.parse import urlsplit, urlunsplit
except ImportError: # Python 2
from urlparse import urlsplit, urlunsplit
from xml.dom.minidom import parseString, Node
import select
import socket
import threading
import errno
from django.conf import settings
from django.contrib.staticfiles.handlers import StaticFilesHandler
from django.core import mail
from django.core.exceptions import ValidationError, ImproperlyConfigured
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.signals import request_started
from django.core.servers.basehttp import (WSGIRequestHandler, WSGIServer,
WSGIServerException)
from django.core.urlresolvers import clear_url_caches
from django.core.validators import EMPTY_VALUES
from django.db import (transaction, connection, connections, DEFAULT_DB_ALIAS,
reset_queries)
from django.forms.fields import CharField
from django.http import QueryDict
from django.test import _doctest as doctest
from django.test.client import Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import template_rendered
from django.test.utils import (get_warnings_state, restore_warnings_state,
override_settings)
from django.test.utils import ContextList
from django.utils import unittest as ut2
from django.utils.encoding import smart_str, force_unicode
from django.utils import six
from django.utils.unittest.util import safe_repr
from django.views.static import serve
__all__ = ('DocTestRunner', 'OutputChecker', 'TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
normalize_decimals = lambda s: re.sub(r"Decimal\('(\d+(\.\d*)?)'\)",
lambda m: "Decimal(\"%s\")" % m.groups()[0], s)
def to_list(value):
"""
Puts value into a list if it's not already one.
Returns an empty list if value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
real_commit = transaction.commit
real_rollback = transaction.rollback
real_enter_transaction_management = transaction.enter_transaction_management
real_leave_transaction_management = transaction.leave_transaction_management
real_managed = transaction.managed
def nop(*args, **kwargs):
return
def disable_transaction_methods():
transaction.commit = nop
transaction.rollback = nop
transaction.enter_transaction_management = nop
transaction.leave_transaction_management = nop
transaction.managed = nop
def restore_transaction_methods():
transaction.commit = real_commit
transaction.rollback = real_rollback
transaction.enter_transaction_management = real_enter_transaction_management
transaction.leave_transaction_management = real_leave_transaction_management
transaction.managed = real_managed
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e.msg)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
"""
The entry method for doctest output checking. Defers to a sequence of
child checkers
"""
checks = (self.check_output_default,
self.check_output_numeric,
self.check_output_xml,
self.check_output_json)
for check in checks:
if check(want, got, optionflags):
return True
return False
def check_output_default(self, want, got, optionflags):
"""
The default comparator provided by doctest - not perfect, but good for
most purposes
"""
return doctest.OutputChecker.check_output(self, want, got, optionflags)
def check_output_numeric(self, want, got, optionflags):
"""Doctest does an exact string comparison of output, which means that
some numerically equivalent values aren't equal. This check normalizes
* long integers (22L) so that they equal normal integers. (22)
* Decimals so that they are comparable, regardless of the change
made to __repr__ in Python 2.6.
"""
return doctest.OutputChecker.check_output(self,
normalize_decimals(normalize_long_ints(want)),
normalize_decimals(normalize_long_ints(got)),
optionflags)
def check_output_xml(self, want, got, optionsflags):
"""Tries to do a 'xml-comparision' of want and got. Plain string
comparision doesn't always work because, for example, attribute
ordering should not be important.
Based on http://codespeak.net/svn/lxml/trunk/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+')
def norm_whitespace(v):
return _norm_whitespace_re.sub(' ', v)
def child_text(element):
return ''.join([c.data for c in element.childNodes
if c.nodeType == Node.TEXT_NODE])
def children(element):
return [c for c in element.childNodes
if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
for want, got in zip(want_children, got_children):
if not check_element(want, got):
return False
return True
want, got = self._strip_quotes(want, got)
want = want.replace('\\n','\n')
got = got.replace('\\n','\n')
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith('<?xml'):
wrapper = '<root>%s</root>'
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
try:
want_root = parseString(want).firstChild
got_root = parseString(got).firstChild
except Exception:
return False
return check_element(want_root, got_root)
def check_output_json(self, want, got, optionsflags):
"""
Tries to compare want and got as if they were JSON-encoded data
"""
want, got = self._strip_quotes(want, got)
try:
want_json = json.loads(want)
got_json = json.loads(got)
except Exception:
return False
return want_json == got_json
def _strip_quotes(self, want, got):
"""
Strip quotes of doctests output values:
>>> o = OutputChecker()
>>> o._strip_quotes("'foo'")
"foo"
>>> o._strip_quotes('"foo"')
"foo"
"""
def is_quoted_string(s):
s = s.strip()
return (len(s) >= 2
and s[0] == s[-1]
and s[0] in ('"', "'"))
def is_quoted_unicode(s):
s = s.strip()
return (len(s) >= 3
and s[0] == 'u'
and s[1] == s[-1]
and s[1] in ('"', "'"))
if is_quoted_string(want) and is_quoted_string(got):
want = want.strip()[1:-1]
got = got.strip()[1:-1]
elif is_quoted_unicode(want) and is_quoted_unicode(got):
want = want.strip()[2:-1]
got = got.strip()[2:-1]
return want, got
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
def report_unexpected_exception(self, out, test, example, exc_info):
doctest.DocTestRunner.report_unexpected_exception(self, out, test,
example, exc_info)
# Rollback, in case of database errors. Otherwise they'd have
# side effects on other tests.
for conn in connections:
transaction.rollback_unless_managed(using=conn)
class _AssertNumQueriesContext(object):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
self.connection = connection
def __enter__(self):
self.old_debug_cursor = self.connection.use_debug_cursor
self.connection.use_debug_cursor = True
self.starting_queries = len(self.connection.queries)
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.use_debug_cursor = self.old_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
final_queries = len(self.connection.queries)
executed = final_queries - self.starting_queries
self.test_case.assertEqual(
executed, self.num, "%d queries executed, %d expected" % (
executed, self.num
)
)
class _AssertTemplateUsedContext(object):
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if len(self.rendered_templates) == 0:
message += ' No template was rendered.'
else:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names))
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class SimpleTestCase(ut2.TestCase):
def save_warnings_state(self):
"""
Saves the state of the warnings module
"""
self._warnings_state = get_warnings_state()
def restore_warnings_state(self):
"""
Restores the state of the warnings module to the state
saved by save_warnings_state()
"""
restore_warnings_state(self._warnings_state)
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts
back to the original value when exiting the context.
"""
return override_settings(**kwargs)
def assertRaisesMessage(self, expected_exception, expected_message,
callable_obj=None, *args, **kwargs):
"""
Asserts that the message in a raised exception matches the passed
value.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
return self.assertRaisesRegexp(expected_exception,
re.escape(expected_message), callable_obj, *args, **kwargs)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Asserts that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in EMPTY_VALUES
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args,
**dict(field_kwargs, required=False))
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [force_unicode(required.error_messages['required'])]
for e in EMPTY_VALUES:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages,
error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length':2, 'max_length':20})
self.assertTrue(isinstance(fieldclass(*field_args, **field_kwargs),
fieldclass))
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Asserts that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg,
'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg,
'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
six.text_type(dom1).splitlines(),
six.text_type(dom2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Asserts that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg,
'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg,
'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* Flushing the database.
* If the Test Case class has a 'fixtures' member, installing the
named fixtures.
* If the Test Case class has a 'urls' member, replace the
ROOT_URLCONF with it.
* Clearing the mail test outbox.
"""
self._fixture_setup()
self._urlconf_setup()
mail.outbox = []
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = \
conn.ops.sequence_reset_by_name_sql(no_style(),
conn.introspection.sequence_list())
if sql_list:
try:
cursor = conn.cursor()
for sql in sql_list:
cursor.execute(sql)
except Exception:
transaction.rollback_unless_managed(using=db_name)
raise
transaction.commit_unless_managed(using=db_name)
def _fixture_setup(self):
# If the test case has a multi_db=True flag, act on all databases.
# Otherwise, just on the default DB.
db_names = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS]
for db_name in db_names:
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
if hasattr(self, 'fixtures'):
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name, 'skip_validation': True})
def _urlconf_setup(self):
if hasattr(self, 'urls'):
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = self.urls
clear_url_caches()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
testMethod = getattr(self, self._testMethodName)
skipped = (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False))
if not skipped:
self.client = self.client_class()
try:
self._pre_setup()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
result.addError(self, sys.exc_info())
return
super(TransactionTestCase, self).__call__(result)
if not skipped:
try:
self._post_teardown()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
result.addError(self, sys.exc_info())
return
def _post_teardown(self):
""" Performs any post-test things. This includes:
* Putting back the original ROOT_URLCONF if it was changed.
* Force closing the connection, so that the next test gets
a clean cursor.
"""
self._fixture_teardown()
self._urlconf_teardown()
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does rollback, the effect
# of these statements is lost, which can effect the operation
# of tests (e.g., losing a timezone setting causing objects to
# be created with the wrong time).
# To make sure this doesn't happen, get a clean connection at the
# start of every test.
for conn in connections.all():
conn.close()
def _fixture_teardown(self):
# If the test case has a multi_db=True flag, flush all databases.
# Otherwise, just flush default.
databases = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS]
for db in databases:
call_command('flush', verbosity=0, interactive=False, database=db,
skip_validation=True, reset_sequences=False)
def _urlconf_teardown(self):
if hasattr(self, '_old_root_urlconf'):
settings.ROOT_URLCONF = self._old_root_urlconf
clear_url_caches()
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, host=None, msg_prefix=''):
"""Asserts that a response redirected to a specific URL, and that the
redirect URL can be loaded.
Note that assertRedirects won't work for external links since it uses
TestClient to do a request.
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(len(response.redirect_chain) > 0,
msg_prefix + "Response didn't redirect as expected: Response"
" code was %d (expected %d)" %
(response.status_code, status_code))
self.assertEqual(response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected:"
" Response code was %d (expected %d)" %
(response.redirect_chain[0][1], status_code))
url, status_code = response.redirect_chain[-1]
self.assertEqual(response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final"
" Response code was %d (expected %d)" %
(response.status_code, target_status_code))
else:
# Not a followed redirect
self.assertEqual(response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response"
" code was %d (expected %d)" %
(response.status_code, status_code))
url = response['Location']
scheme, netloc, path, query, fragment = urlsplit(url)
redirect_response = response.client.get(path, QueryDict(query))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s':"
" response code was %d (expected %d)" %
(path, redirect_response.status_code, target_status_code))
e_scheme, e_netloc, e_path, e_query, e_fragment = urlsplit(
expected_url)
if not (e_scheme or e_netloc):
expected_url = urlunsplit(('http', host or 'testserver', e_path,
e_query, e_fragment))
self.assertEqual(url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" %
(url, expected_url))
def assertContains(self, response, text, count=None, status_code=200,
msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if (hasattr(response, 'render') and callable(response.render)
and not response.is_rendered):
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code))
content = response.content
if not html:
btext = smart_str(text, response._charset)
else:
if six.PY3 and not isinstance(content, six.text_type):
content = content.decode(response._charset)
content = assert_and_parse_html(self, content, None,
"Response's content is not valid HTML:")
btext = assert_and_parse_html(self, text, None,
"Second argument is not valid HTML:")
real_count = content.count(btext)
if count is not None:
self.assertEqual(real_count, count,
msg_prefix + "Found %d instances of '%s' in response"
" (expected %d)" % (real_count, text, count))
else:
self.assertTrue(real_count != 0,
msg_prefix + "Couldn't find '%s' in response" % text)
def assertNotContains(self, response, text, status_code=200,
msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` doesn't occurs in the content of the response.
"""
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if (hasattr(response, 'render') and callable(response.render)
and not response.is_rendered):
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code))
btext = smart_str(text, response._charset)
content = response.content
if html:
if six.PY3 and not isinstance(content, six.text_type):
content = content.decode(response._charset)
content = assert_and_parse_html(self, content, None,
'Response\'s content is not valid HTML:')
text = assert_and_parse_html(self, text, None,
'Second argument is not valid HTML:')
self.assertEqual(content.count(btext), 0,
msg_prefix + "Response should not contain '%s'" % text)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Asserts that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to "
"render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i,context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors)))
elif field in context[form].fields:
self.fail(msg_prefix + "The field '%s' on form '%s'"
" in context %d contains no errors" %
(field, form, i))
else:
self.fail(msg_prefix + "The form '%s' in context %d"
" does not contain the field '%s'" %
(form, i, field))
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors))
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the"
" response" % form)
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Asserts that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
# Use assertTemplateUsed as context manager.
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
context = _AssertTemplateUsedContext(self, template_name)
return context
template_names = [t.name for t in response.templates]
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s" %
(template_name, ', '.join(template_names)))
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Asserts that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
# Use assertTemplateUsed as context manager.
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
context = _AssertTemplateNotUsedContext(self, template_name)
return context
template_names = [t.name for t in response.templates]
self.assertFalse(template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering"
" the response" % template_name)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True):
if not ordered:
return self.assertEqual(set(map(transform, qs)), set(values))
return self.assertEqual(six.lmap(transform, qs), values)
def assertNumQueries(self, num, func=None, *args, **kwargs):
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions():
"""
Returns True if all connections support transactions.
"""
return all(conn.features.supports_transactions
for conn in connections.all())
class TestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase, but surrounds every test
with a transaction, monkey-patches the real transaction management routines
to do nothing, and rollsback the test transaction at the end of the test.
You have to use TransactionTestCase, if you need transaction management
inside a test.
"""
def _fixture_setup(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
# If the test case has a multi_db=True flag, setup all databases.
# Otherwise, just use default.
db_names = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS]
for db_name in db_names:
transaction.enter_transaction_management(using=db_name)
transaction.managed(True, using=db_name)
disable_transaction_methods()
from django.contrib.sites.models import Site
Site.objects.clear_cache()
for db in db_names:
if hasattr(self, 'fixtures'):
call_command('loaddata', *self.fixtures,
**{
'verbosity': 0,
'commit': False,
'database': db,
'skip_validation': True,
})
def _fixture_teardown(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_teardown()
# If the test case has a multi_db=True flag, teardown all databases.
# Otherwise, just teardown default.
if getattr(self, 'multi_db', False):
databases = connections
else:
databases = [DEFAULT_DB_ALIAS]
restore_transaction_methods()
for db in databases:
transaction.rollback(using=db)
transaction.leave_transaction_management(using=db)
def _deferredSkip(condition, reason):
def decorator(test_func):
if not (isinstance(test_func, type) and
issubclass(test_func, TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if condition():
raise ut2.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
test_item = test_func
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIfDBFeature(feature):
"""
Skip a test if a database has the named feature
"""
return _deferredSkip(lambda: getattr(connection.features, feature),
"Database has feature %s" % feature)
def skipUnlessDBFeature(feature):
"""
Skip a test unless a database has the named feature
"""
return _deferredSkip(lambda: not getattr(connection.features, feature),
"Database doesn't support feature %s" % feature)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
Just a regular WSGIRequestHandler except it doesn't log to the standard
output any of the requests received, so as to not clutter the output for
the tests' results.
"""
def log_message(*args):
pass
if sys.version_info[:2] < (3, 3):
_EventBase = threading._Event
else:
_EventBase = threading.Event
class _ImprovedEvent(_EventBase):
"""
Does the same as `threading.Event` except it overrides the wait() method
with some code borrowed from Python 2.7 to return the set state of the
event (see: http://hg.python.org/cpython/rev/b5aa8aa78c0f/). This allows
to know whether the wait() method exited normally or because of the
timeout. This class can be removed when Django supports only Python >= 2.7.
"""
if sys.version_info[:2] < (2, 7):
def wait(self, timeout=None):
self._Event__cond.acquire()
try:
if not self._Event__flag:
self._Event__cond.wait(timeout)
return self._Event__flag
finally:
self._Event__cond.release()
class StoppableWSGIServer(WSGIServer):
"""
The code in this class is borrowed from the `SocketServer.BaseServer` class
in Python 2.6. The important functionality here is that the server is non-
blocking and that it can be shut down at any moment. This is made possible
by the server regularly polling the socket and checking if it has been
asked to stop.
Note for the future: Once Django stops supporting Python 2.6, this class
can be removed as `WSGIServer` will have this ability to shutdown on
demand and will not require the use of the _ImprovedEvent class whose code
is borrowed from Python 2.7.
"""
def __init__(self, *args, **kwargs):
super(StoppableWSGIServer, self).__init__(*args, **kwargs)
self.__is_shut_down = _ImprovedEvent()
self.__serving = False
def serve_forever(self, poll_interval=0.5):
"""
Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds.
"""
self.__serving = True
self.__is_shut_down.clear()
while self.__serving:
r, w, e = select.select([self], [], [], poll_interval)
if r:
self._handle_request_noblock()
self.__is_shut_down.set()
def shutdown(self):
"""
Stops the serve_forever loop.
Blocks until the loop has finished. This must be called while
serve_forever() is running in another thread, or it will
deadlock.
"""
self.__serving = False
if not self.__is_shut_down.wait(2):
raise RuntimeError(
"Failed to shutdown the live test server in 2 seconds. The "
"server might be stuck or generating a slow response.")
def handle_request(self):
"""Handle one request, possibly blocking.
"""
fd_sets = select.select([self], [], [], None)
if not fd_sets[0]:
return
self._handle_request_noblock()
def _handle_request_noblock(self):
"""
Handle one request, without blocking.
I assume that select.select has returned that the socket is
readable before this function was called, so there should be
no risk of blocking in get_request().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except Exception:
self.handle_error(request, client_address)
self.close_request(request)
class _MediaFilesHandler(StaticFilesHandler):
"""
Handler for serving the media files. This is a private class that is
meant to be used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
def serve(self, request):
relative_url = request.path[len(self.base_url[2]):]
return serve(request, relative_url, document_root=self.get_base_dir())
class LiveServerThread(threading.Thread):
"""
Thread for running a live http server while the tests are running.
"""
def __init__(self, host, possible_ports, connections_override=None):
self.host = host
self.port = None
self.possible_ports = possible_ports
self.is_ready = threading.Event()
self.error = None
self.connections_override = connections_override
super(LiveServerThread, self).__init__()
def run(self):
"""
Sets up the live server and databases, and then loops over handling
http requests.
"""
if self.connections_override:
from django.db import connections
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = StaticFilesHandler(_MediaFilesHandler(WSGIHandler()))
# Go through the list of possible ports, hoping that we can find
# one that is free to use for the WSGI server.
for index, port in enumerate(self.possible_ports):
try:
self.httpd = StoppableWSGIServer(
(self.host, port), QuietWSGIRequestHandler)
except WSGIServerException as e:
if (index + 1 < len(self.possible_ports) and
e.args[0].errno == errno.EADDRINUSE):
# This port is already in use, so we go on and try with
# the next one in the list.
continue
else:
# Either none of the given ports are free or the error
# is something else than "Address already in use". So
# we let that error bubble up to the main thread.
raise
else:
# A free port was found.
self.port = port
break
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
def join(self, timeout=None):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
super(LiveServerThread, self).join(timeout)
class LiveServerTestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase but also launches a live
http server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
Note that it inherits from TransactionTestCase instead of TestCase because
the threads do not share the same transactions (unless if using in-memory
sqlite) and each thread needs to commit all their transactions so that the
other thread can see the changes.
"""
@property
def live_server_url(self):
return 'http://%s:%s' % (
self.server_thread.host, self.server_thread.port)
@classmethod
def setUpClass(cls):
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if (conn.settings_dict['ENGINE'] == 'django.db.backends.sqlite3'
and conn.settings_dict['NAME'] == ':memory:'):
# Explicitly enable thread-shareability for this connection
conn.allow_thread_sharing = True
connections_override[conn.alias] = conn
# Launch the live server's thread
specified_address = os.environ.get(
'DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:8081')
# The specified ports may be of the form '8000-8010,8080,9200-9300'
# i.e. a comma-separated list of ports or ranges of ports, so we break
# it down into a detailed list of all possible ports.
possible_ports = []
try:
host, port_ranges = specified_address.split(':')
for port_range in port_ranges.split(','):
# A port range can be of either form: '8000' or '8000-8010'.
extremes = six.lmap(int, port_range.split('-'))
assert len(extremes) in [1, 2]
if len(extremes) == 1:
# Port range of the form '8000'
possible_ports.append(extremes[0])
else:
# Port range of the form '8000-8010'
for port in range(extremes[0], extremes[1] + 1):
possible_ports.append(port)
except Exception:
raise ImproperlyConfigured('Invalid address ("%s") for live '
'server.' % specified_address)
cls.server_thread = LiveServerThread(
host, possible_ports, connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
raise cls.server_thread.error
super(LiveServerTestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.join()
# Restore sqlite connections' non-sharability
for conn in connections.all():
if (conn.settings_dict['ENGINE'] == 'django.db.backends.sqlite3'
and conn.settings_dict['NAME'] == ':memory:'):
conn.allow_thread_sharing = False
super(LiveServerTestCase, cls).tearDownClass()
| bsd-3-clause | -7,669,179,324,741,329,000 | 38.49747 | 95 | 0.591303 | false |
cooljeanius/emacs | build-aux/vcstocl/vcs_git.py | 1 | 6308 | # Git repo support.
# Copyright (C) 2019-2020 Free Software Foundation, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import subprocess
import re
from vcstocl.misc_util import *
class GitRepo:
def __init__(self, ignore_list, debug):
self.ignore_list = ignore_list
self.debug = debug
def exec_git_cmd(self, args):
''' Execute a git command and return its result as a list of strings.
'''
args.insert(0, 'git')
self.debug.print(args)
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
# Clean up the output by removing trailing spaces, newlines and dropping
# blank lines.
op = [decode(x[:-1]).strip() for x in proc.stdout]
op = [re.sub(r'[\s\f]+', ' ', x) for x in op]
op = [x for x in op if x]
return op
def list_changes(self, commit, frontends):
''' List changes in a single commit.
For the input commit id COMMIT, identify the files that have changed and the
nature of their changes. Print commit information in the ChangeLog format,
calling into helper functions as necessary.
'''
op = self.exec_git_cmd(['show', '--pretty=fuller', '--date=short',
'--raw', commit])
authors = []
date = ''
merge = False
copyright_exempt=''
subject= ''
for l in op:
if l.lower().find('copyright-paperwork-exempt:') == 0 \
and 'yes' in l.lower():
copyright_exempt=' (tiny change)'
elif l.lower().find('co-authored-by:') == 0 or \
l.find('Author:') == 0:
author = l.split(':')[1]
author = re.sub(r'([^ ]*)\s*(<.*)', r'\1 \2', author.strip())
authors.append(author)
elif l.find('CommitDate:') == 0:
date = l[11:].strip()
elif l.find('Merge:') == 0:
merge = True
elif not subject and date:
subject = l.strip()
# Find raw commit information for all non-ChangeLog files.
op = [x[1:] for x in op if len(x) > 0 and re.match(r'^:[0-9]+', x)]
# Skip all ignored files.
for ign in self.ignore_list:
op = [x for x in op if ign not in x]
# It was only the ChangeLog, ignore.
if len(op) == 0:
return
print('%s %s' % (date, authors[0]))
if (len(authors) > 1):
authors = authors[1:]
for author in authors:
print(' %s' % author)
print()
if merge:
print('\t MERGE COMMIT: %s\n' % commit)
return
print('\tCOMMIT%s: %s\n\t%s\n' % (copyright_exempt, commit, subject))
# Changes across a large number of files are typically mechanical (URL
# updates, copyright notice changes, etc.) and likely not interesting
# enough to produce a detailed ChangeLog entry.
if len(op) > 100:
print('\t* Suppressing diff as too many files differ.\n')
return
# Each of these lines has a space separated format like so:
# :<OLD MODE> <NEW MODE> <OLD REF> <NEW REF> <OPERATION> <FILE1> <FILE2>
#
# where OPERATION can be one of the following:
# A: File added
# D: File removed
# M[0-9]{3}: File modified
# R[0-9]{3}: File renamed, with the 3 digit number following it indicating
# what percentage of the file is intact.
# C[0-9]{3}: File copied. Same semantics as R.
# T: The permission bits of the file changed
# U: Unmerged. We should not encounter this, so we ignore it/
# X, or anything else: Most likely a bug. Report it.
#
# FILE2 is set only when OPERATION is R or C, to indicate the new file name.
#
# Also note that merge commits have a different format here, with three
# entries each for the modes and refs, but we don't bother with it for now.
#
# For more details: https://git-scm.com/docs/diff-format
for f in op:
data = f.split()
if data[4] == 'A':
print('\t* %s: New file.' % data[5])
elif data[4] == 'D':
print('\t* %s: Delete file.' % data[5])
elif data[4] == 'T':
print('\t* %s: Changed file permission bits from %s to %s' % \
(data[5], data[0], data[1]))
elif data[4][0] == 'M':
print('\t* %s: Modified.' % data[5])
analyze_diff(data[5],
self.exec_git_cmd(['show', data[2]]),
self.exec_git_cmd(['show', data[3]]), frontends)
elif data[4][0] == 'R' or data[4][0] == 'C':
change = int(data[4][1:])
print('\t* %s: Move to...' % data[5])
print('\t* %s: ... here.' % data[6])
if change < 100:
analyze_diff(data[6],
self.exec_git_cmd(['show', data[2]]),
self.exec_git_cmd(['show', data[3]]), frontends)
# We should never encounter this, so ignore for now.
elif data[4] == 'U':
pass
else:
eprint('%s: Unknown line format %s' % (commit, data[4]))
sys.exit(42)
print('')
def list_commits(self, revs):
''' List commit IDs between the two revs in the REVS list.
'''
ref = revs[0] + '..' + revs[1]
return self.exec_git_cmd(['log', '--pretty=%H', ref])
| gpl-3.0 | -7,940,138,127,483,870,000 | 37.699387 | 84 | 0.527267 | false |
tclim/your | urscript/urscript/comm.py | 1 | 3567 | """ comm.py module manages Robot communication using sockets.
It contains functions for sending and listening to the robot
"""
import socket
from struct import unpack
PORT_DASH = 29999
PORT = 30002
PORT_RT = 30003
def send_script(ur_program, robot_ip) :
"""Send a script to robot via a socket
Args:
ur_program: Formatted UR Script program to send (string)
robot_ip: IP address of robot (string)
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(2)
try:
# add an extra new line
ur_program += '\n'
s.connect((robot_ip, PORT))
s.send(ur_program)
except socket.timeout:
print "Time out connecting to {0} Port:{1}".format(robot_ip,PORT)
except socket.error, e:
print e
s.close()
def stop_program(robot_ip):
""" Pauses a running program by sending a command to the Dashboard
Args:
robot_ip: IP address of robot (string)
"""
send_script('pause', robot_ip,PORT_DASH)
def listen(robot_ip):
"""Returns robot data received through a socket in dictionary format.
Args:
robot_ip: IP address of robot (string)
Returns:
dict_data: A dictionary containing robot data in readable format
"""
data = _receive_data(robot_ip)
dict_data = _format_data(data)
return dict_data
def _receive_data(robot_ip):
"""Receives unformatted data from robot using the realtime interface (Port 30003)
Args:
robot_ip: ip address of robot (string)
Returns:
data: Robot data (byte[])
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(.1)
try:
s.connect((robot_ip, PORT_RT))
except socket.timeout:
print "Time out connecting to {0} Port:{1}".format(robot_ip,PORT_RT)
except socket.error, e:
print e
data = s.recv(1024)
s.close()
return data
def _format_data(data):
"""Formats robot data into dictionary
Received byte array is formatted as a dictionary. For added into on data: see
Args:
data: Raw data from robot (byte[])
Returns:
dict_data: A dictionary containing data in readable format
"""
dict_data = {}
fmt_int = "!i"
#fmt_uInt = "!Q"
fmt_double1 = "!d"
fmt_double3 = "!ddd"
fmt_double6 = "!dddddd"
dict_data["message_length"] = unpack(fmt_int, data[0:4])
dict_data["time"] = unpack(fmt_double1, data[4:12])
dict_data["target_joints_pos"] = unpack(fmt_double6, data[12:60])
dict_data["target_joints_vel"] = unpack(fmt_double6, data[60:108])
dict_data["target_joints_accel"] = unpack(fmt_double6, data[108:156])
dict_data["target_joints_current"] = unpack(fmt_double6, data[156:204])
dict_data["target_joints_torque"] = unpack(fmt_double6, data[204:252])
dict_data["actual_joints_pos"] = unpack(fmt_double6, data[252:300])
dict_data["actual_joints_vel"] = unpack(fmt_double6, data[300:348])
dict_data["actual_joints_current"] = unpack(fmt_double6, data[348:396])
dict_data["xyz_accelerometer"] = unpack(fmt_double3, data[396:420])
dict_data["tcp_force"] = unpack(fmt_double6, data[540:588])
dict_data["tool_pose"] = unpack(fmt_double6, data[588:636])
dict_data["tool_speed"] = unpack(fmt_double6, data[636:684])
#dict_data["digital_input"] = unpack(fmt_double6, data[636:684])
dict_data["joint_temperatures"] = unpack(fmt_double6, data[692:740])
return dict_data | mit | 9,156,183,232,854,302,000 | 32.990196 | 85 | 0.626857 | false |
igorgai/django-custom-user | custom_user/models.py | 1 | 4588 | """User models."""
import django
from django.contrib.auth.models import (
AbstractBaseUser, BaseUserManager, PermissionsMixin)
from django.core.mail import send_mail
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
class EmailUserManager(BaseUserManager):
"""Custom manager for EmailUser."""
def _create_user(self, email, password,
is_staff, is_superuser, **extra_fields):
"""Create and save an EmailUser with the given email and password.
:param str email: user email
:param str password: user password
:param bool is_staff: whether user staff or not
:param bool is_superuser: whether user admin or not
:return custom_user.models.EmailUser user: user
:raise ValueError: email is not set
"""
now = timezone.now()
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
is_active = extra_fields.pop("is_active", True)
user = self.model(email=email, is_staff=is_staff, is_active=is_active,
is_superuser=is_superuser, last_login=now,
date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
"""Create and save an EmailUser with the given email and password.
:param str email: user email
:param str password: user password
:return custom_user.models.EmailUser user: regular user
"""
is_staff = extra_fields.pop("is_staff", False)
return self._create_user(email, password, is_staff, False,
**extra_fields)
def create_superuser(self, email, password, **extra_fields):
"""Create and save an EmailUser with the given email and password.
:param str email: user email
:param str password: user password
:return custom_user.models.EmailUser user: admin user
"""
return self._create_user(email, password, True, True,
**extra_fields)
class AbstractEmailUser(AbstractBaseUser, PermissionsMixin):
"""Abstract User with the same behaviour as Django's default User.
AbstractEmailUser does not have username field. Uses email as the
USERNAME_FIELD for authentication.
Use this if you need to extend EmailUser.
Inherits from both the AbstractBaseUser and PermissionMixin.
The following attributes are inherited from the superclasses:
* password
* last_login
* is_superuser
"""
email = models.EmailField(_('email address'), max_length=255,
unique=True, db_index=True)
is_staff = models.BooleanField(
_('staff status'), default=False, help_text=_(
'Designates whether the user can log into this admin site.'))
is_active = models.BooleanField(_('active'), default=True, help_text=_(
'Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = EmailUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def get_full_name(self):
"""Return the email."""
return self.email
def get_short_name(self):
"""Return the email."""
return self.email
def email_user(self, subject, message, from_email=None, **kwargs):
"""Send an email to this User."""
send_mail(subject, message, from_email, [self.email], **kwargs)
# Monkey patch Django 1.7 to avoid detecting migrations
if django.VERSION[:2] == (1, 7):
last_login = AbstractEmailUser._meta.get_field('last_login')
last_login.blank = True
last_login.null = True
last_login.default = models.fields.NOT_PROVIDED
groups = AbstractEmailUser._meta.get_field('groups')
groups.help_text = _('The groups this user belongs to. A user will get '
'all permissions granted to each of their groups.')
class EmailUser(AbstractEmailUser):
"""
Concrete class of AbstractEmailUser.
Use this if you don't need to extend EmailUser.
"""
class Meta(AbstractEmailUser.Meta):
swappable = 'AUTH_USER_MODEL'
| bsd-3-clause | 7,541,121,175,570,798,000 | 32.985185 | 78 | 0.637533 | false |
JaneliaSciComp/Neuroptikon | Source/lib/CrossPlatform/networkx/algorithms/tests/test_degree_centrality.py | 1 | 3070 | """
Unit tests for degree centrality.
"""
from nose.tools import *
import networkx as nx
class TestDegreeCentrality:
def __init__(self):
self.K = nx.krackhardt_kite_graph()
self.P3 = nx.path_graph(3)
self.K5 = nx.complete_graph(5)
F = nx.Graph() # Florentine families
F.add_edge('Acciaiuoli','Medici')
F.add_edge('Castellani','Peruzzi')
F.add_edge('Castellani','Strozzi')
F.add_edge('Castellani','Barbadori')
F.add_edge('Medici','Barbadori')
F.add_edge('Medici','Ridolfi')
F.add_edge('Medici','Tornabuoni')
F.add_edge('Medici','Albizzi')
F.add_edge('Medici','Salviati')
F.add_edge('Salviati','Pazzi')
F.add_edge('Peruzzi','Strozzi')
F.add_edge('Peruzzi','Bischeri')
F.add_edge('Strozzi','Ridolfi')
F.add_edge('Strozzi','Bischeri')
F.add_edge('Ridolfi','Tornabuoni')
F.add_edge('Tornabuoni','Guadagni')
F.add_edge('Albizzi','Ginori')
F.add_edge('Albizzi','Guadagni')
F.add_edge('Bischeri','Guadagni')
F.add_edge('Guadagni','Lamberteschi')
self.F = F
G = nx.DiGraph()
G.add_edge(0,5)
G.add_edge(1,5)
G.add_edge(2,5)
G.add_edge(3,5)
G.add_edge(4,5)
G.add_edge(5,6)
G.add_edge(5,7)
G.add_edge(5,8)
self.G = G
def test_degree_centrality_1(self):
d = nx.degree_centrality(self.K5)
exact = dict(zip(range(5), [1]*5))
for n,dc in d.iteritems():
assert_almost_equal(exact[n], dc)
def test_degree_centrality_2(self):
d = nx.degree_centrality(self.P3)
exact = {0:0.5, 1:1, 2:0.5}
for n,dc in d.iteritems():
assert_almost_equal(exact[n], dc)
def test_degree_centrality_3(self):
d = nx.degree_centrality(self.K)
exact = {0:.444, 1:.444, 2:.333, 3:.667, 4:.333,
5:.556, 6:.556, 7:.333, 8:.222, 9:.111}
for n,dc in d.iteritems():
assert_almost_equal(exact[n], float("%5.3f" % dc))
def test_degree_centrality_4(self):
d = nx.degree_centrality(self.F)
names = sorted(self.F.nodes())
dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286,
0.071, 0.429, 0.071, 0.214, 0.214, 0.143, 0.286, 0.214]
exact = dict(zip(names, dcs))
for n,dc in d.iteritems():
assert_almost_equal(exact[n], float("%5.3f" % dc))
def test_indegree_centrality(self):
d = nx.in_degree_centrality(self.G)
exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0,
5: 0.625, 6: 0.125, 7: 0.125, 8: 0.125}
for n,dc in d.iteritems():
assert_almost_equal(exact[n], dc)
def test_outdegree_centrality(self):
d = nx.out_degree_centrality(self.G)
exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125,
4: 0.125, 5: 0.375, 6: 0.0, 7: 0.0, 8: 0.0}
for n,dc in d.iteritems():
assert_almost_equal(exact[n], dc)
| bsd-3-clause | -7,407,244,194,374,798,000 | 32.369565 | 70 | 0.529967 | false |
LevinJ/SSD_tensorflow_VOC | exercise/data_generator.py | 1 | 8416 | import tensorflow as tf
from datasets import dataset_utils
from datasets import flowers
import matplotlib.pyplot as plt
import tensorflow.contrib.slim as slim
from datasets import download_and_convert_flowers
import numpy as np
from preprocessing import inception_preprocessing
flowers_data_dir = '../../data/flower'
train_dir = '/tmp/tfslim_model/'
print('Will save model to %s' % train_dir)
def display_data():
with tf.Graph().as_default():
dataset = flowers.get_split('train', flowers_data_dir)
data_provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, common_queue_capacity=32, common_queue_min=1)
image, label = data_provider.get(['image', 'label'])
with tf.Session() as sess:
with slim.queues.QueueRunners(sess):
for i in range(4):
np_image, np_label = sess.run([image, label])
height, width, _ = np_image.shape
class_name = name = dataset.labels_to_names[np_label]
plt.figure()
plt.imshow(np_image)
plt.title('%s, %d x %d' % (name, height, width))
plt.axis('off')
plt.show()
return
def download_convert():
dataset_dir = flowers_data_dir
download_and_convert_flowers.run(dataset_dir)
return
def disp_data():
with tf.Graph().as_default():
dataset = flowers.get_split('train', flowers_data_dir)
data_provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, common_queue_capacity=32, common_queue_min=1)
image, label,format = data_provider.get(['image', 'label', 'format'])
with tf.Session() as sess:
with slim.queues.QueueRunners(sess):
for i in range(4):
np_image, np_label,np_format = sess.run([image, label,format])
height, width, _ = np_image.shape
class_name = name = dataset.labels_to_names[np_label]
plt.figure()
plt.imshow(np_image)
plt.title('%s, %d x %d' % (name, height, width))
plt.axis('off')
plt.show()
return
def my_cnn(images, num_classes, is_training): # is_training is not used...
with slim.arg_scope([slim.max_pool2d], kernel_size=[3, 3], stride=2):
net = slim.conv2d(images, 64, [5, 5])
net = slim.max_pool2d(net)
net = slim.conv2d(net, 64, [5, 5])
net = slim.max_pool2d(net)
net = slim.flatten(net)
net = slim.fully_connected(net, 192)
net = slim.fully_connected(net, num_classes, activation_fn=None)
return net
def apply_random_image():
with tf.Graph().as_default():
# The model can handle any input size because the first layer is convolutional.
# The size of the model is determined when image_node is first passed into the my_cnn function.
# Once the variables are initialized, the size of all the weight matrices is fixed.
# Because of the fully connected layers, this means that all subsequent images must have the same
# input size as the first image.
batch_size, height, width, channels = 3, 28, 28, 3
images = tf.random_uniform([batch_size, height, width, channels], maxval=1)
# Create the model.
num_classes = 10
logits = my_cnn(images, num_classes, is_training=True)
probabilities = tf.nn.softmax(logits)
# Initialize all the variables (including parameters) randomly.
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
# Run the init_op, evaluate the model outputs and print the results:
sess.run(init_op)
probabilities = sess.run(probabilities)
print('Probabilities Shape:')
print(probabilities.shape) # batch_size x num_classes
print('\nProbabilities:')
print(probabilities)
print('\nSumming across all classes (Should equal 1):')
print(np.sum(probabilities, 1)) # Each row sums to 1
return
def load_batch(dataset, batch_size=32, height=299, width=299, is_training=False):
"""Loads a single batch of data.
Args:
dataset: The dataset to load.
batch_size: The number of images in the batch.
height: The size of each image after preprocessing.
width: The size of each image after preprocessing.
is_training: Whether or not we're currently training or evaluating.
Returns:
images: A Tensor of size [batch_size, height, width, 3], image samples that have been preprocessed.
images_raw: A Tensor of size [batch_size, height, width, 3], image samples that can be used for visualization.
labels: A Tensor of size [batch_size], whose values range between 0 and dataset.num_classes.
"""
data_provider = slim.dataset_data_provider.DatasetDataProvider(
dataset, common_queue_capacity=32,
common_queue_min=8)
image_raw, label = data_provider.get(['image', 'label'])
# Preprocess image for usage by Inception.
image = inception_preprocessing.preprocess_image(image_raw, height, width, is_training=is_training)
# Preprocess the image for display purposes.
image_raw = tf.expand_dims(image_raw, 0)
image_raw = tf.image.resize_images(image_raw, [height, width])
image_raw = tf.squeeze(image_raw)
# Batch it up.
images, images_raw, labels = tf.train.batch(
[image, image_raw, label],
batch_size=batch_size,
num_threads=1,
capacity=2 * batch_size)
return images, images_raw, labels
def train_save_model():
with tf.Graph().as_default():
tf.logging.set_verbosity(tf.logging.INFO)
dataset = flowers.get_split('train', flowers_data_dir)
images, _, labels = load_batch(dataset)
# Create the model:
logits = my_cnn(images, num_classes=dataset.num_classes, is_training=True)
# Specify the loss function:
one_hot_labels = slim.one_hot_encoding(labels, dataset.num_classes)
slim.losses.softmax_cross_entropy(logits, one_hot_labels)
total_loss = slim.losses.get_total_loss()
# Create some summaries to visualize the training process:
tf.summary.scalar('losses/Total Loss', total_loss)
# Specify the optimizer and create the train op:
optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
train_op = slim.learning.create_train_op(total_loss, optimizer)
# Run the training:
final_loss = slim.learning.train(
train_op,
logdir=train_dir,
number_of_steps=1, # For speed, we just do 1 epoch
save_summaries_secs=1)
print('Finished training. Final batch loss %d' % final_loss)
return
def evaluate_model():
with tf.Graph().as_default():
tf.logging.set_verbosity(tf.logging.DEBUG)
dataset = flowers.get_split('train', flowers_data_dir)
images, _, labels = load_batch(dataset)
logits = my_cnn(images, num_classes=dataset.num_classes, is_training=False)
predictions = tf.argmax(logits, 1)
# Define the metrics:
names_to_values, names_to_updates = slim.metrics.aggregate_metric_map({
'eval/Accuracy': slim.metrics.streaming_accuracy(predictions, labels),
'eval/Recall@5': slim.metrics.streaming_recall_at_k(logits, labels, 5),
})
print('Running evaluation Loop...')
checkpoint_path = tf.train.latest_checkpoint(train_dir)
metric_values = slim.evaluation.evaluate_once(
master='',
checkpoint_path=checkpoint_path,
logdir=train_dir,
eval_op=names_to_updates.values(),
final_op=names_to_values.values())
names_to_values = dict(zip(names_to_values.keys(), metric_values))
for name in names_to_values:
print('%s: %f' % (name, names_to_values[name]))
return
def main():
# download_convert()
# disp_data()
# apply_random_image()
# train_save_model()
evaluate_model()
return
main()
| apache-2.0 | 5,897,464,082,682,168,000 | 36.73991 | 116 | 0.606702 | false |
gnocchixyz/gnocchi | gnocchi/rest/api.py | 1 | 87732 | # -*- encoding: utf-8 -*-
#
# Copyright © 2016-2018 Red Hat, Inc.
# Copyright © 2014-2015 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import functools
import itertools
import logging
import operator
import uuid
import jsonpatch
import pecan
from pecan import rest
import pyparsing
import six
from six.moves.urllib import parse as urllib_parse
import tenacity
import tooz
import voluptuous
import werkzeug.http
import gnocchi
from gnocchi import archive_policy
from gnocchi import calendar
from gnocchi import chef
from gnocchi.cli import metricd
from gnocchi import incoming
from gnocchi import indexer
from gnocchi import json
from gnocchi import resource_type
from gnocchi.rest.aggregates import exceptions
from gnocchi.rest.aggregates import processor
from gnocchi.rest import exceptions as rest_exceptions
from gnocchi import storage
from gnocchi import utils
try:
from gnocchi.rest.prometheus import remote_pb2
import snappy
PROMETHEUS_SUPPORTED = True
except ImportError:
PROMETHEUS_SUPPORTED = False
ATTRGETTER_GRANULARITY = operator.attrgetter("granularity")
LOG = logging.getLogger(__name__)
def arg_to_list(value):
if isinstance(value, list):
return value
elif value:
return [value]
return []
def abort(status_code, detail=''):
"""Like pecan.abort, but make sure detail is a string."""
if status_code == 404 and not detail:
raise RuntimeError("http code 404 must have 'detail' set")
if isinstance(detail, voluptuous.Invalid):
detail = {
'cause': 'Invalid input',
'reason': six.text_type(detail),
'detail': [six.text_type(path) for path in detail.path],
}
elif isinstance(detail, Exception):
detail = detail.jsonify()
LOG.debug("Aborting request. Code [%s]. Details [%s]", status_code, detail)
return pecan.abort(status_code, detail)
def flatten_dict_to_keypairs(d, separator=':'):
"""Generator that produces sequence of keypairs for nested dictionaries.
:param d: dictionaries which may be nested
:param separator: symbol between names
"""
for name, value in sorted(six.iteritems(d)):
if isinstance(value, dict):
for subname, subvalue in flatten_dict_to_keypairs(value,
separator):
yield ('%s%s%s' % (name, separator, subname), subvalue)
else:
yield name, value
def enforce(rule, target):
"""Return the user and project the request should be limited to.
:param rule: The rule name
:param target: The target to enforce on.
"""
creds = pecan.request.auth_helper.get_auth_info(pecan.request)
if not isinstance(target, dict):
if hasattr(target, "jsonify"):
target = target.jsonify()
else:
target = target.__dict__
# Flatten dict
target = dict(flatten_dict_to_keypairs(d=target, separator='.'))
if not pecan.request.policy_enforcer.enforce(rule, target, creds):
abort(403)
def set_resp_location_hdr(location):
location = '%s%s' % (pecan.request.script_name, location)
# NOTE(sileht): according the pep-3333 the headers must be
# str in py2 and py3 even this is not the same thing in both
# version
# see: http://legacy.python.org/dev/peps/pep-3333/#unicode-issues
if six.PY2 and isinstance(location, six.text_type):
location = location.encode('utf-8')
location = urllib_parse.quote(location)
pecan.response.headers['Location'] = location
def set_resp_link_hdr(marker, *args):
# NOTE(sileht): This comes from rfc5988.
# Setting prev, last is too costly/complicated, so just set next for now.
options = {}
for arg in args:
options.update(arg)
if "sorts" in options:
options["sort"] = options["sorts"]
del options["sorts"]
options["marker"] = marker
# NOTE(sileht): To always have the same orders
options = sorted(options.items())
params = urllib_parse.urlencode(options, doseq=True)
pecan.response.headers.add("Link", '<%s?%s>; rel="next"' %
(pecan.request.path_url, params))
def deserialize(expected_content_types=None):
if expected_content_types is None:
expected_content_types = ("application/json", )
mime_type, options = werkzeug.http.parse_options_header(
pecan.request.headers.get('Content-Type'))
if mime_type not in expected_content_types:
abort(415)
try:
params = json.load(pecan.request.body_file)
except Exception as e:
details = rest_exceptions.UnableToDecodeBody(e,
pecan.request.body_file)
LOG.warning(details.jsonify())
abort(400, details)
return params
def validate(schema, data, required=True):
try:
return voluptuous.Schema(schema, required=required)(data)
except voluptuous.Invalid as e:
abort(400, e)
def deserialize_and_validate(schema, required=True,
expected_content_types=None):
return validate(schema,
deserialize(expected_content_types=expected_content_types),
required)
def Timespan(value):
try:
return utils.to_timespan(value)
except ValueError as e:
raise voluptuous.Invalid(e)
def get_bool_param(name, params, default='false'):
return strtobool(name, params.get(name, default))
def strtobool(varname, v):
"""Convert a string to a boolean."""
try:
return utils.strtobool(v)
except ValueError as e:
abort(400, "Unable to parse `%s': %s" % (varname, six.text_type(e)))
RESOURCE_DEFAULT_PAGINATION = [u'revision_start:asc',
u'started_at:asc']
METRIC_DEFAULT_PAGINATION = [u'id:asc']
def get_pagination_options(params, default):
try:
opts = voluptuous.Schema({
voluptuous.Required(
"limit", default=pecan.request.conf.api.max_limit):
voluptuous.All(voluptuous.Coerce(int),
voluptuous.Range(min=1),
voluptuous.Clamp(
min=1, max=pecan.request.conf.api.max_limit)),
"marker": six.text_type,
voluptuous.Required("sort", default=default):
voluptuous.All(
voluptuous.Coerce(arg_to_list),
[six.text_type]),
}, extra=voluptuous.REMOVE_EXTRA)(params)
except voluptuous.Invalid as e:
abort(400, {"cause": "Argument value error",
"reason": str(e)})
opts['sorts'] = opts['sort']
del opts['sort']
return opts
ArchivePolicyDefinitionSchema = voluptuous.Schema(
voluptuous.All([{
"granularity": Timespan,
"points": voluptuous.All(
voluptuous.Coerce(int),
voluptuous.Range(min=1),
),
"timespan": Timespan,
}], voluptuous.Length(min=1)),
)
class ArchivePolicyController(rest.RestController):
def __init__(self, archive_policy):
self.archive_policy = archive_policy
@pecan.expose('json')
def get(self):
ap = pecan.request.indexer.get_archive_policy(self.archive_policy)
if ap:
enforce("get archive policy", ap)
return ap
abort(404, six.text_type(
indexer.NoSuchArchivePolicy(self.archive_policy)))
@pecan.expose('json')
def patch(self):
ap = pecan.request.indexer.get_archive_policy(self.archive_policy)
if not ap:
abort(404, six.text_type(
indexer.NoSuchArchivePolicy(self.archive_policy)))
enforce("update archive policy", ap)
body = deserialize_and_validate(voluptuous.Schema({
voluptuous.Required("definition"): ArchivePolicyDefinitionSchema,
}))
# Validate the data
try:
ap_items = [archive_policy.ArchivePolicyItem(**item) for item in
body['definition']]
except ValueError as e:
abort(400, six.text_type(e))
try:
return pecan.request.indexer.update_archive_policy(
self.archive_policy, ap_items)
except indexer.UnsupportedArchivePolicyChange as e:
abort(400, six.text_type(e))
@pecan.expose('json')
def delete(self):
# NOTE(jd) I don't think there's any point in fetching and passing the
# archive policy here, as the rule is probably checking the actual role
# of the user, not the content of the AP.
enforce("delete archive policy", {})
try:
pecan.request.indexer.delete_archive_policy(self.archive_policy)
except indexer.NoSuchArchivePolicy as e:
abort(404, six.text_type(e))
except indexer.ArchivePolicyInUse as e:
abort(400, six.text_type(e))
class ArchivePoliciesController(rest.RestController):
@pecan.expose()
def _lookup(self, archive_policy, *remainder):
return ArchivePolicyController(archive_policy), remainder
@pecan.expose('json')
def post(self):
enforce("create archive policy", {})
# NOTE(jd): Initialize this one at run-time because we rely on conf
conf = pecan.request.conf
valid_agg_methods = list(
archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS_VALUES
)
ArchivePolicySchema = voluptuous.Schema({
voluptuous.Required("name"): six.text_type,
voluptuous.Required("back_window", default=0): voluptuous.All(
voluptuous.Coerce(int),
voluptuous.Range(min=0),
),
voluptuous.Required(
"aggregation_methods",
default=list(conf.archive_policy.default_aggregation_methods)):
valid_agg_methods,
voluptuous.Required("definition"): ArchivePolicyDefinitionSchema,
})
body = deserialize_and_validate(ArchivePolicySchema)
# Validate the data
try:
ap = archive_policy.ArchivePolicy.from_dict(body)
except ValueError as e:
abort(400, six.text_type(e))
enforce("create archive policy", ap)
try:
ap = pecan.request.indexer.create_archive_policy(ap)
except indexer.ArchivePolicyAlreadyExists as e:
abort(409, six.text_type(e))
location = "/archive_policy/" + ap.name
set_resp_location_hdr(location)
pecan.response.status = 201
return ap
@pecan.expose('json')
def get_all(self):
enforce("list archive policy", {})
return pecan.request.indexer.list_archive_policies()
class ArchivePolicyRulesController(rest.RestController):
@pecan.expose()
def _lookup(self, archive_policy_rule, *remainder):
apr = pecan.request.indexer.get_archive_policy_rule(
archive_policy_rule
)
if apr:
return ArchivePolicyRuleController(apr), remainder
abort(404, six.text_type(
indexer.NoSuchArchivePolicyRule(archive_policy_rule)))
@pecan.expose('json')
def post(self):
enforce("create archive policy rule", {})
ArchivePolicyRuleSchema = voluptuous.Schema({
voluptuous.Required("name"): six.text_type,
voluptuous.Required("metric_pattern"): six.text_type,
voluptuous.Required("archive_policy_name"): six.text_type,
})
body = deserialize_and_validate(ArchivePolicyRuleSchema)
enforce("create archive policy rule", body)
try:
ap = pecan.request.indexer.create_archive_policy_rule(
body['name'], body['metric_pattern'],
body['archive_policy_name']
)
except indexer.ArchivePolicyRuleAlreadyExists as e:
abort(409, six.text_type(e))
except indexer.NoSuchArchivePolicy as e:
abort(400, e)
location = "/archive_policy_rule/" + ap.name
set_resp_location_hdr(location)
pecan.response.status = 201
return ap
@pecan.expose('json')
def get_all(self):
enforce("list archive policy rule", {})
return pecan.request.indexer.list_archive_policy_rules()
class ArchivePolicyRuleController(rest.RestController):
def __init__(self, archive_policy_rule):
self.archive_policy_rule = archive_policy_rule
@pecan.expose('json')
def get(self):
enforce("get archive policy rule", self.archive_policy_rule)
return self.archive_policy_rule
@pecan.expose('json')
def patch(self):
ArchivePolicyRuleSchema = voluptuous.Schema({
voluptuous.Required("name"): six.text_type,
})
body = deserialize_and_validate(ArchivePolicyRuleSchema)
enforce("update archive policy rule", {})
try:
return pecan.request.indexer.update_archive_policy_rule(
self.archive_policy_rule.name, body["name"])
except indexer.UnsupportedArchivePolicyRuleChange as e:
abort(400, six.text_type(e))
@pecan.expose('json')
def delete(self):
# NOTE(jd) I don't think there's any point in fetching and passing the
# archive policy rule here, as the rule is probably checking the actual
# role of the user, not the content of the AP rule.
enforce("delete archive policy rule", {})
try:
pecan.request.indexer.delete_archive_policy_rule(
self.archive_policy_rule.name
)
except indexer.NoSuchArchivePolicyRule as e:
abort(404, six.text_type(e))
def MeasuresListSchema(measures):
try:
times = utils.to_timestamps([m['timestamp'] for m in measures])
except TypeError:
raise voluptuous.Invalid("unexpected measures format")
except ValueError as e:
raise voluptuous.Invalid("unexpected timestamp '%s'" % e)
try:
values = [float(i['value']) for i in measures]
except Exception:
raise voluptuous.Invalid("unexpected measures value")
return (incoming.Measure(t, v) for t, v in six.moves.zip(times, values))
class MetricController(rest.RestController):
_custom_actions = {
'measures': ['POST', 'GET']
}
def __init__(self, metric):
self.metric = metric
def enforce_metric(self, rule):
enforce(rule, json.to_primitive(self.metric))
@pecan.expose('json')
def get_all(self):
self.enforce_metric("get metric")
return self.metric
@pecan.expose('json')
def post_measures(self):
self.enforce_metric("post measures")
measures = deserialize_and_validate(MeasuresListSchema)
if measures:
pecan.request.incoming.add_measures(self.metric.id, measures)
pecan.response.status = 202
@pecan.expose('json')
def get_measures(self, start=None, stop=None, aggregation='mean',
granularity=None, resample=None, refresh=False,
**param):
self.enforce_metric("get measures")
if resample:
if not granularity:
abort(400, 'A granularity must be specified to resample')
try:
resample = (resample if calendar.GROUPINGS.get(resample) else
utils.to_timespan(resample))
except ValueError as e:
abort(400, six.text_type(e))
if granularity is None:
granularity = [d.granularity
for d in self.metric.archive_policy.definition]
start, stop, _, _, _ = validate_qs(
start=start, stop=stop)
else:
start, stop, granularity, _, _ = validate_qs(
start=start, stop=stop, granularity=granularity)
if aggregation not in self.metric.archive_policy.aggregation_methods:
abort(404, {
"cause": "Aggregation method does not exist for this metric",
"detail": {
"metric": self.metric.id,
"aggregation_method": aggregation,
},
})
aggregations = []
for g in sorted(granularity, reverse=True):
agg = self.metric.archive_policy.get_aggregation(
aggregation, g)
if agg is None:
abort(404, six.text_type(
storage.AggregationDoesNotExist(
self.metric, aggregation, g)
))
aggregations.append(agg)
if (strtobool("refresh", refresh) and
pecan.request.incoming.has_unprocessed(self.metric.id)):
try:
pecan.request.chef.refresh_metrics(
[self.metric],
pecan.request.conf.api.operation_timeout)
except chef.SackAlreadyLocked:
abort(503, 'Unable to refresh metric: %s. Metric is locked. '
'Please try again.' % self.metric.id)
try:
results = pecan.request.storage.get_aggregated_measures(
{self.metric: aggregations},
start, stop, resample)[self.metric]
return [(timestamp, results[key].aggregation.granularity, value)
for key in sorted(results.keys(),
reverse=True)
for timestamp, value in results[key]]
except storage.AggregationDoesNotExist as e:
abort(404, six.text_type(e))
except storage.MetricDoesNotExist:
return []
@pecan.expose('json')
def delete(self):
self.enforce_metric("delete metric")
try:
pecan.request.indexer.delete_metric(self.metric.id)
except indexer.NoSuchMetric as e:
abort(404, six.text_type(e))
class MetricsController(rest.RestController):
@pecan.expose()
def _lookup(self, id, *remainder):
try:
metric_id = uuid.UUID(id)
except ValueError:
abort(404, six.text_type(indexer.NoSuchMetric(id)))
# Load details for ACL
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"=": {"id": metric_id}}, details=True)
if not metrics:
abort(404, six.text_type(indexer.NoSuchMetric(id)))
return MetricController(metrics[0]), remainder
# NOTE(jd) Define this method as it was a voluptuous schema – it's just a
# smarter version of a voluptuous schema, no?
@staticmethod
def MetricSchema(definition):
creator = pecan.request.auth_helper.get_current_user(
pecan.request)
# First basic validation
schema = voluptuous.Schema({
"archive_policy_name": six.text_type,
"resource_id": functools.partial(ResourceID, creator=creator),
"name": six.text_type,
voluptuous.Optional("unit"):
voluptuous.All(six.text_type, voluptuous.Length(max=31)),
})
definition = schema(definition)
archive_policy_name = definition.get('archive_policy_name')
name = definition.get('name')
if name and '/' in name:
abort(400, "'/' is not supported in metric name")
if archive_policy_name is None:
try:
ap = pecan.request.indexer.get_archive_policy_for_metric(name)
except indexer.NoArchivePolicyRuleMatch:
# NOTE(jd) Since this is a schema-like function, we
# should/could raise ValueError, but if we do so, voluptuous
# just returns a "invalid value" with no useful message – so we
# prefer to use abort() to make sure the user has the right
# error message
abort(400, "No archive policy name specified "
"and no archive policy rule found matching "
"the metric name %s" % name)
else:
definition['archive_policy_name'] = ap.name
resource_id = definition.get('resource_id')
if resource_id is None:
original_resource_id = None
else:
if name is None:
abort(400,
{"cause": "Attribute value error",
"detail": "name",
"reason": "Name cannot be null "
"if resource_id is not null"})
original_resource_id, resource_id = resource_id
enforce("create metric", {
"creator": creator,
"archive_policy_name": archive_policy_name,
"resource_id": resource_id,
"original_resource_id": original_resource_id,
"name": name,
"unit": definition.get('unit'),
})
return definition
@pecan.expose('json')
def post(self):
creator = pecan.request.auth_helper.get_current_user(
pecan.request)
body = deserialize_and_validate(self.MetricSchema)
resource_id = body.get('resource_id')
if resource_id is not None:
resource_id = resource_id[1]
try:
m = pecan.request.indexer.create_metric(
uuid.uuid4(),
creator,
resource_id=resource_id,
name=body.get('name'),
unit=body.get('unit'),
archive_policy_name=body['archive_policy_name'])
except indexer.NoSuchArchivePolicy as e:
abort(400, six.text_type(e))
except indexer.NamedMetricAlreadyExists as e:
abort(400, e)
set_resp_location_hdr("/metric/" + str(m.id))
pecan.response.status = 201
return m
MetricListSchema = voluptuous.Schema({
"user_id": six.text_type,
"project_id": six.text_type,
"creator": six.text_type,
"name": six.text_type,
"id": six.text_type,
"unit": six.text_type,
"archive_policy_name": six.text_type,
"status": voluptuous.Any("active", "delete"),
}, extra=voluptuous.REMOVE_EXTRA)
@classmethod
@pecan.expose('json')
def get_all(cls, **kwargs):
filtering = cls.MetricListSchema(kwargs)
# Compat with old user/project API
provided_user_id = filtering.pop('user_id', None)
provided_project_id = filtering.pop('project_id', None)
if provided_user_id is None and provided_project_id is None:
provided_creator = filtering.pop('creator', None)
else:
provided_creator = (
(provided_user_id or "")
+ ":"
+ (provided_project_id or "")
)
pagination_opts = get_pagination_options(kwargs,
METRIC_DEFAULT_PAGINATION)
attr_filters = []
if provided_creator is not None:
attr_filters.append({"=": {"creator": provided_creator}})
for k, v in six.iteritems(filtering):
attr_filters.append({"=": {k: v}})
policy_filter = pecan.request.auth_helper.get_metric_policy_filter(
pecan.request, "list metric")
resource_policy_filter = (
pecan.request.auth_helper.get_resource_policy_filter(
pecan.request, "list metric", resource_type=None,
prefix="resource")
)
try:
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"and": attr_filters},
policy_filter=policy_filter,
resource_policy_filter=resource_policy_filter,
**pagination_opts)
if metrics and len(metrics) >= pagination_opts['limit']:
set_resp_link_hdr(str(metrics[-1].id), kwargs, pagination_opts)
return metrics
except indexer.InvalidPagination as e:
abort(400, six.text_type(e))
_MetricsSchema = voluptuous.Schema({
six.text_type: voluptuous.Any(utils.UUID,
MetricsController.MetricSchema),
})
def MetricsSchema(data):
# NOTE(jd) Before doing any kind of validation, copy the metric name
# into the metric definition. This is required so we have the name
# available when doing the metric validation with its own MetricSchema,
# and so we can do things such as applying archive policy rules.
if isinstance(data, dict):
for metric_name, metric_def in six.iteritems(data):
if isinstance(metric_def, dict):
metric_def['name'] = metric_name
return _MetricsSchema(data)
class NamedMetricController(rest.RestController):
def __init__(self, resource_id, resource_type):
self.resource_id = resource_id
self.resource_type = resource_type
@pecan.expose()
def _lookup(self, name, *remainder):
m = pecan.request.indexer.list_metrics(
details=True,
attribute_filter={"and": [
{"=": {"name": name}},
{"=": {"resource_id": self.resource_id}},
]})
if m:
return MetricController(m[0]), remainder
resource = pecan.request.indexer.get_resource(self.resource_type,
self.resource_id)
if resource:
abort(404, six.text_type(indexer.NoSuchMetric(name)))
else:
abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
@pecan.expose('json')
def post(self):
resource = pecan.request.indexer.get_resource(
self.resource_type, self.resource_id)
if not resource:
abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
enforce("update resource", resource)
metrics = deserialize_and_validate(MetricsSchema)
try:
r = pecan.request.indexer.update_resource(
self.resource_type,
self.resource_id,
metrics=metrics,
append_metrics=True,
create_revision=False)
except (indexer.NoSuchMetric,
indexer.NoSuchArchivePolicy,
ValueError) as e:
abort(400, six.text_type(e))
except indexer.NamedMetricAlreadyExists as e:
abort(409, six.text_type(e))
except indexer.NoSuchResource as e:
abort(404, six.text_type(e))
return r.metrics
@pecan.expose('json')
def get_all(self):
resource = pecan.request.indexer.get_resource(
self.resource_type, self.resource_id)
if not resource:
abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
enforce("get resource", resource)
return pecan.request.indexer.list_metrics(
attribute_filter={"=": {"resource_id": self.resource_id}})
class ResourceHistoryController(rest.RestController):
def __init__(self, resource_id, resource_type):
self.resource_id = resource_id
self.resource_type = resource_type
@pecan.expose('json')
def get(self, **kwargs):
details = get_bool_param('details', kwargs)
pagination_opts = get_pagination_options(
kwargs, RESOURCE_DEFAULT_PAGINATION)
resource = pecan.request.indexer.get_resource(
self.resource_type, self.resource_id)
if not resource:
abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
enforce("get resource", resource)
try:
resources = pecan.request.indexer.list_resources(
self.resource_type,
attribute_filter={"=": {"id": self.resource_id}},
details=details,
history=True,
**pagination_opts
)
if resources and len(resources) >= pagination_opts['limit']:
marker = "%s@%s" % (resources[-1].id, resources[-1].revision)
set_resp_link_hdr(marker, kwargs, pagination_opts)
return resources
except indexer.IndexerException as e:
abort(400, six.text_type(e))
def etag_precondition_check(obj):
etag, lastmodified = obj.etag, obj.lastmodified
# NOTE(sileht): Checks and order come from rfc7232
# in webob, the '*' and the absent of the header is handled by
# if_match.__contains__() and if_none_match.__contains__()
# and are identique...
if etag not in pecan.request.if_match:
abort(412)
elif (not pecan.request.environ.get("HTTP_IF_MATCH")
and pecan.request.if_unmodified_since
and pecan.request.if_unmodified_since < lastmodified):
abort(412)
if etag in pecan.request.if_none_match:
if pecan.request.method in ['GET', 'HEAD']:
abort(304)
else:
abort(412)
elif (not pecan.request.environ.get("HTTP_IF_NONE_MATCH")
and pecan.request.if_modified_since
and (pecan.request.if_modified_since >=
lastmodified)
and pecan.request.method in ['GET', 'HEAD']):
abort(304)
def etag_set_headers(obj):
pecan.response.etag = obj.etag
pecan.response.last_modified = obj.lastmodified
def AttributesPath(value):
if value.startswith("/attributes"):
return value
raise ValueError("Only attributes can be modified")
ResourceTypeJsonPatchSchema = voluptuous.Schema([{
"op": voluptuous.Any("add", "remove"),
"path": AttributesPath,
voluptuous.Optional("value"): dict,
}])
class ResourceTypeController(rest.RestController):
def __init__(self, name):
self._name = name
@pecan.expose('json')
def get(self):
try:
rt = pecan.request.indexer.get_resource_type(self._name)
except indexer.NoSuchResourceType as e:
abort(404, six.text_type(e))
enforce("get resource type", rt)
return rt
@pecan.expose('json')
def patch(self):
# NOTE(sileht): should we check for "application/json-patch+json"
# Content-Type ?
try:
rt = pecan.request.indexer.get_resource_type(self._name)
except indexer.NoSuchResourceType as e:
abort(404, six.text_type(e))
enforce("update resource type", rt)
# Ensure this is a valid jsonpatch dict
patch = deserialize_and_validate(
ResourceTypeJsonPatchSchema,
expected_content_types=["application/json-patch+json"])
# Add new attributes to the resource type
rt_json_current = rt.jsonify()
try:
rt_json_next = jsonpatch.apply_patch(rt_json_current, patch)
except jsonpatch.JsonPatchException as e:
abort(400, six.text_type(e))
del rt_json_next['state']
# Validate that the whole new resource_type is valid
schema = pecan.request.indexer.get_resource_type_schema()
try:
rt_json_next = voluptuous.Schema(schema.for_update, required=True)(
rt_json_next)
except voluptuous.Error as e:
abort(400, "Invalid input: %s" % e)
# Get only newly formatted and deleted attributes
add_attrs = {k: v for k, v in rt_json_next["attributes"].items()
if k not in rt_json_current["attributes"]}
del_attrs = [k for k in rt_json_current["attributes"]
if k not in rt_json_next["attributes"]]
update_attrs = self.retrieve_update_attrs(rt_json_current,
rt_json_next)
if update_attrs:
LOG.debug("Updating attributes [%s] for resource-type [%s]",
update_attrs, self._name)
if not add_attrs and not del_attrs and not update_attrs:
# NOTE(sileht): just returns the resource, the asked changes
# just do nothing
return rt
try:
add_attrs = schema.attributes_from_dict(add_attrs)
update_attrs = self.create_update_attrs(schema, update_attrs)
except resource_type.InvalidResourceAttribute as e:
abort(400, "Invalid input: %s" % e)
try:
return pecan.request.indexer.update_resource_type(
self._name, add_attributes=add_attrs,
del_attributes=del_attrs, update_attributes=update_attrs)
except indexer.NoSuchResourceType as e:
abort(400, six.text_type(e))
def create_update_attrs(self, schema, update_attrs):
new_attrs = dict(map(lambda entry: (entry[0], entry[1][1]),
update_attrs.items()))
old_attrs = dict(map(lambda entry: (entry[0], entry[1][0]),
update_attrs.items()))
update_attrs_new = schema.attributes_from_dict(new_attrs)
update_attrs_new.sort(key=lambda attr: attr.name)
update_attrs_old = schema.attributes_from_dict(old_attrs)
update_attrs_old.sort(key=lambda attr: attr.name)
update_attrs = []
for i in range(len(update_attrs_new)):
update_attrs.append((update_attrs_new[i],
update_attrs_old[i]))
return update_attrs
def retrieve_update_attrs(self, rt_json_current, rt_json_next):
update_attrs = {}
for k, v in rt_json_current["attributes"].items():
if k in rt_json_next["attributes"]:
self.validate_types(k, rt_json_next, v)
should_be_updated = False
for kc, vc in v.items():
if vc != rt_json_next["attributes"][k][kc]:
should_be_updated = True
break
if should_be_updated:
update_attrs[k] = (v, rt_json_next["attributes"][k])
return update_attrs
def validate_types(self, attribute, new_json, old_json):
old_type = old_json['type']
new_type = new_json["attributes"][attribute]['type']
if new_type != old_type:
msg = "Type update is not available yet. Changing %s to %s " \
"for attribute %s of resource %s" % (old_type, new_type,
attribute, self._name)
abort(400, msg)
@pecan.expose('json')
def delete(self):
try:
pecan.request.indexer.get_resource_type(self._name)
except indexer.NoSuchResourceType as e:
abort(404, six.text_type(e))
enforce("delete resource type", resource_type)
try:
pecan.request.indexer.delete_resource_type(self._name)
except (indexer.NoSuchResourceType,
indexer.ResourceTypeInUse) as e:
abort(400, six.text_type(e))
class ResourceTypesController(rest.RestController):
@pecan.expose()
def _lookup(self, name, *remainder):
return ResourceTypeController(name), remainder
@pecan.expose('json')
def post(self):
schema = pecan.request.indexer.get_resource_type_schema()
body = deserialize_and_validate(schema)
body["state"] = "creating"
try:
rt = schema.resource_type_from_dict(**body)
except resource_type.InvalidResourceAttribute as e:
abort(400, "Invalid input: %s" % e)
enforce("create resource type", body)
try:
rt = pecan.request.indexer.create_resource_type(rt)
except indexer.ResourceTypeAlreadyExists as e:
abort(409, six.text_type(e))
set_resp_location_hdr("/resource_type/" + rt.name)
pecan.response.status = 201
return rt
@pecan.expose('json')
def get_all(self, **kwargs):
enforce("list resource type", {})
try:
return pecan.request.indexer.list_resource_types()
except indexer.IndexerException as e:
abort(400, six.text_type(e))
def ResourceSchema(schema):
base_schema = {
voluptuous.Optional('started_at'): utils.to_datetime,
voluptuous.Optional('ended_at'): utils.to_datetime,
voluptuous.Optional('user_id'): voluptuous.Any(None, six.text_type),
voluptuous.Optional('project_id'): voluptuous.Any(None, six.text_type),
voluptuous.Optional('metrics'): MetricsSchema,
}
base_schema.update(schema)
return base_schema
class ResourceController(rest.RestController):
def __init__(self, resource_type, id):
self._resource_type = resource_type
creator = pecan.request.auth_helper.get_current_user(
pecan.request)
try:
self.id = utils.ResourceUUID(id, creator)
except ValueError:
abort(404, six.text_type(indexer.NoSuchResource(id)))
self.metric = NamedMetricController(str(self.id), self._resource_type)
self.history = ResourceHistoryController(str(self.id),
self._resource_type)
@pecan.expose('json')
def get(self):
resource = pecan.request.indexer.get_resource(
self._resource_type, self.id, with_metrics=True)
if resource:
enforce("get resource", resource)
etag_precondition_check(resource)
etag_set_headers(resource)
return resource
abort(404, six.text_type(indexer.NoSuchResource(self.id)))
@pecan.expose('json')
def patch(self):
resource = pecan.request.indexer.get_resource(
self._resource_type, self.id, with_metrics=True)
if not resource:
abort(404, six.text_type(indexer.NoSuchResource(self.id)))
enforce("update resource", resource)
etag_precondition_check(resource)
body = deserialize_and_validate(
schema_for(self._resource_type),
required=False)
if len(body) == 0:
etag_set_headers(resource)
return resource
for k, v in six.iteritems(body):
if k != 'metrics' and getattr(resource, k) != v:
create_revision = True
break
else:
if 'metrics' not in body:
# No need to go further, we assume the db resource
# doesn't change between the get and update
return resource
create_revision = False
try:
resource = pecan.request.indexer.update_resource(
self._resource_type,
self.id,
create_revision=create_revision,
**body)
except (indexer.NoSuchMetric,
indexer.NoSuchArchivePolicy,
ValueError) as e:
abort(400, six.text_type(e))
except indexer.NoSuchResource as e:
abort(404, six.text_type(e))
etag_set_headers(resource)
return resource
@pecan.expose('json')
def delete(self):
resource = pecan.request.indexer.get_resource(
self._resource_type, self.id)
if not resource:
abort(404, six.text_type(indexer.NoSuchResource(self.id)))
enforce("delete resource", resource)
etag_precondition_check(resource)
try:
pecan.request.indexer.delete_resource(self.id)
except indexer.NoSuchResource as e:
abort(404, six.text_type(e))
def schema_for(resource_type):
resource_type = pecan.request.indexer.get_resource_type(resource_type)
return ResourceSchema(resource_type.schema)
def ResourceUUID(value, creator):
try:
return utils.ResourceUUID(value, creator)
except ValueError as e:
raise voluptuous.Invalid(e)
def ResourceID(value, creator):
"""Convert value to a resource ID.
:return: A tuple (original_resource_id, resource_id)
"""
return (six.text_type(value), ResourceUUID(value, creator))
class ResourcesController(rest.RestController):
def __init__(self, resource_type):
self._resource_type = resource_type
@pecan.expose()
def _lookup(self, id, *remainder):
return ResourceController(self._resource_type, id), remainder
@pecan.expose('json')
def post(self):
# NOTE(sileht): we need to copy the dict because when change it
# and we don't want that next patch call have the "id"
schema = dict(schema_for(self._resource_type))
creator = pecan.request.auth_helper.get_current_user(
pecan.request)
schema["id"] = functools.partial(ResourceID, creator=creator)
body = deserialize_and_validate(schema)
body["original_resource_id"], body["id"] = body["id"]
target = {
"resource_type": self._resource_type,
}
target.update(body)
enforce("create resource", target)
rid = body['id']
del body['id']
try:
resource = pecan.request.indexer.create_resource(
self._resource_type, rid, creator,
**body)
except (ValueError,
indexer.NoSuchMetric,
indexer.NoSuchArchivePolicy) as e:
abort(400, six.text_type(e))
except indexer.ResourceAlreadyExists as e:
abort(409, six.text_type(e))
set_resp_location_hdr("/resource/"
+ self._resource_type + "/"
+ six.text_type(resource.id))
etag_set_headers(resource)
pecan.response.status = 201
return resource
@pecan.expose('json')
def get_all(self, **kwargs):
details = get_bool_param('details', kwargs)
history = get_bool_param('history', kwargs)
pagination_opts = get_pagination_options(
kwargs, RESOURCE_DEFAULT_PAGINATION)
json_attrs = arg_to_list(kwargs.get('attrs', None))
policy_filter = pecan.request.auth_helper.get_resource_policy_filter(
pecan.request, "list resource", self._resource_type)
try:
# FIXME(sileht): next API version should returns
# {'resources': [...], 'links': [ ... pagination rel ...]}
resources = pecan.request.indexer.list_resources(
self._resource_type,
attribute_filter=policy_filter,
details=details,
history=history,
**pagination_opts
)
if resources and len(resources) >= pagination_opts['limit']:
if history:
marker = "%s@%s" % (resources[-1].id,
resources[-1].revision)
else:
marker = str(resources[-1].id)
set_resp_link_hdr(marker, kwargs, pagination_opts)
return [r.jsonify(json_attrs) for r in resources]
except indexer.IndexerException as e:
abort(400, six.text_type(e))
@pecan.expose('json')
def delete(self, **kwargs):
# NOTE(sileht): Don't allow empty filter, this is going to delete
# the entire database.
if pecan.request.body:
attr_filter = deserialize_and_validate(ResourceSearchSchema)
elif kwargs.get("filter"):
attr_filter = QueryStringSearchAttrFilter.parse(kwargs["filter"])
else:
attr_filter = None
# the voluptuous checks everything, but it is better to
# have this here.
if not attr_filter:
abort(400, "caution: the query can not be empty, or it will \
delete entire database")
policy_filter = pecan.request.auth_helper.get_resource_policy_filter(
pecan.request,
"delete resources", self._resource_type)
if policy_filter:
attr_filter = {"and": [policy_filter, attr_filter]}
try:
delete_num = pecan.request.indexer.delete_resources(
self._resource_type, attribute_filter=attr_filter)
except indexer.IndexerException as e:
abort(400, six.text_type(e))
return {"deleted": delete_num}
class ResourcesByTypeController(rest.RestController):
@pecan.expose('json')
def get_all(self):
return dict(
(rt.name,
pecan.request.application_url + '/resource/' + rt.name)
for rt in pecan.request.indexer.list_resource_types())
@pecan.expose()
def _lookup(self, resource_type, *remainder):
try:
pecan.request.indexer.get_resource_type(resource_type)
except indexer.NoSuchResourceType as e:
abort(404, six.text_type(e))
return ResourcesController(resource_type), remainder
class QueryStringSearchAttrFilter(object):
uninary_operators = ("not", )
binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq",
u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠",
u"≥", u"≤")
multiple_operators = (u"and", u"or", u"∧", u"∨")
operator = pyparsing.Regex(u"|".join(binary_operator))
null = pyparsing.Regex("None|none|null").setParseAction(
pyparsing.replaceWith(None))
boolean = "False|True|false|true"
boolean = pyparsing.Regex(boolean).setParseAction(
lambda t: t[0].lower() == "true")
hex_string = lambda n: pyparsing.Word(pyparsing.hexnums, exact=n)
uuid_string = pyparsing.Combine(
hex_string(8) + (pyparsing.Optional("-") + hex_string(4)) * 3 +
pyparsing.Optional("-") + hex_string(12))
number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?"
number = pyparsing.Regex(number).setParseAction(lambda t: float(t[0]))
identifier = pyparsing.Word(pyparsing.alphas, pyparsing.alphanums + "_")
quoted_string = pyparsing.QuotedString('"') | pyparsing.QuotedString("'")
comparison_term = pyparsing.Forward()
in_list = pyparsing.Group(
pyparsing.Suppress('[') +
pyparsing.Optional(pyparsing.delimitedList(comparison_term)) +
pyparsing.Suppress(']'))("list")
comparison_term << (null | boolean | uuid_string | identifier | number |
quoted_string | in_list)
condition = pyparsing.Group(comparison_term + operator + comparison_term)
expr = pyparsing.infixNotation(condition, [
("not", 1, pyparsing.opAssoc.RIGHT, ),
("and", 2, pyparsing.opAssoc.LEFT, ),
("∧", 2, pyparsing.opAssoc.LEFT, ),
("or", 2, pyparsing.opAssoc.LEFT, ),
("∨", 2, pyparsing.opAssoc.LEFT, ),
])
@classmethod
def _parsed_query2dict(cls, parsed_query):
result = None
while parsed_query:
part = parsed_query.pop()
if part in cls.binary_operator:
result = {part: {parsed_query.pop(): result}}
elif part in cls.multiple_operators:
if result.get(part):
result[part].append(
cls._parsed_query2dict(parsed_query.pop()))
else:
result = {part: [result]}
elif part in cls.uninary_operators:
result = {part: result}
elif isinstance(part, pyparsing.ParseResults):
kind = part.getName()
if kind == "list":
res = part.asList()
else:
res = cls._parsed_query2dict(part)
if result is None:
result = res
elif isinstance(result, dict):
list(result.values())[0].append(res)
else:
result = part
return result
@classmethod
def _parse(cls, query):
try:
parsed_query = cls.expr.parseString(query, parseAll=True)[0]
except pyparsing.ParseException as e:
raise abort(400, "Invalid filter: %s" % str(e))
return cls._parsed_query2dict(parsed_query)
@classmethod
def parse(cls, query):
attr_filter = cls._parse(query)
return validate(ResourceSearchSchema, attr_filter, required=True)
def ResourceSearchSchema(v):
return _ResourceSearchSchema()(v)
# NOTE(sileht): indexer will cast this type to the real attribute
# type, here we just want to be sure this is not a dict or a list
ResourceSearchSchemaAttributeValue = voluptuous.Any(
six.text_type, float, int, bool, None)
NotIDKey = voluptuous.All(six.text_type, voluptuous.NotIn(["id"]))
def _ResourceSearchSchema():
user = pecan.request.auth_helper.get_current_user(
pecan.request)
_ResourceUUID = functools.partial(ResourceUUID, creator=user)
return voluptuous.Schema(
voluptuous.All(
voluptuous.Length(min=0, max=1),
{
voluptuous.Any(
u"=", u"==", u"eq",
u"<", u"lt",
u">", u"gt",
u"<=", u"≤", u"le",
u">=", u"≥", u"ge",
u"!=", u"≠", u"ne",
): voluptuous.All(
voluptuous.Length(min=1, max=1),
{"id": _ResourceUUID,
NotIDKey: ResourceSearchSchemaAttributeValue},
),
u"like": voluptuous.All(
voluptuous.Length(min=1, max=1),
{NotIDKey: ResourceSearchSchemaAttributeValue},
),
u"in": voluptuous.All(
voluptuous.Length(min=1, max=1),
{"id": voluptuous.All(
[_ResourceUUID],
voluptuous.Length(min=1)),
NotIDKey: voluptuous.All(
[ResourceSearchSchemaAttributeValue],
voluptuous.Length(min=1))}
),
voluptuous.Any(
u"and", u"∨",
u"or", u"∧",
): voluptuous.All(
[ResourceSearchSchema], voluptuous.Length(min=1)
),
u"not": ResourceSearchSchema,
}
)
)
class SearchResourceTypeController(rest.RestController):
def __init__(self, resource_type):
self._resource_type = resource_type
def _search(self, **kwargs):
if pecan.request.body:
attr_filter = deserialize_and_validate(ResourceSearchSchema)
elif kwargs.get("filter"):
attr_filter = QueryStringSearchAttrFilter.parse(kwargs["filter"])
else:
attr_filter = None
details = get_bool_param('details', kwargs)
history = get_bool_param('history', kwargs)
pagination_opts = get_pagination_options(
kwargs, RESOURCE_DEFAULT_PAGINATION)
policy_filter = pecan.request.auth_helper.get_resource_policy_filter(
pecan.request, "search resource", self._resource_type)
if policy_filter:
if attr_filter:
attr_filter = {"and": [
policy_filter,
attr_filter
]}
else:
attr_filter = policy_filter
resources = pecan.request.indexer.list_resources(
self._resource_type,
attribute_filter=attr_filter,
details=details,
history=history,
**pagination_opts)
if resources and len(resources) >= pagination_opts['limit']:
if history:
marker = "%s@%s" % (resources[-1].id,
resources[-1].revision)
else:
marker = str(resources[-1].id)
set_resp_link_hdr(marker, kwargs, pagination_opts)
return resources
@pecan.expose('json')
def post(self, **kwargs):
json_attrs = arg_to_list(kwargs.get('attrs', None))
try:
return [r.jsonify(json_attrs) for r in self._search(**kwargs)]
except indexer.IndexerException as e:
abort(400, six.text_type(e))
class SearchResourceController(rest.RestController):
@pecan.expose()
def _lookup(self, resource_type, *remainder):
try:
pecan.request.indexer.get_resource_type(resource_type)
except indexer.NoSuchResourceType as e:
abort(404, six.text_type(e))
return SearchResourceTypeController(resource_type), remainder
def _MetricSearchSchema(v):
"""Helper method to indirect the recursivity of the search schema"""
return SearchMetricController.MetricSearchSchema(v)
def _MetricSearchOperationSchema(v):
"""Helper method to indirect the recursivity of the search schema"""
return SearchMetricController.MetricSearchOperationSchema(v)
class SearchMetricController(rest.RestController):
MetricSearchOperationSchema = voluptuous.Schema(
voluptuous.All(
voluptuous.Length(min=1, max=1),
{
voluptuous.Any(
u"=", u"==", u"eq",
u"<", u"lt",
u">", u"gt",
u"<=", u"≤", u"le",
u">=", u"≥", u"ge",
u"!=", u"≠", u"ne",
u"%", u"mod",
u"+", u"add",
u"-", u"sub",
u"*", u"×", u"mul",
u"/", u"÷", u"div",
u"**", u"^", u"pow",
): voluptuous.Any(
float, int,
voluptuous.All(
[float, int,
voluptuous.Any(_MetricSearchOperationSchema)],
voluptuous.Length(min=2, max=2),
),
),
},
)
)
MetricSearchSchema = voluptuous.Schema(
voluptuous.Any(
MetricSearchOperationSchema,
voluptuous.All(
voluptuous.Length(min=1, max=1),
{
voluptuous.Any(
u"and", u"∨",
u"or", u"∧",
u"not",
): [_MetricSearchSchema],
}
)
)
)
class MeasureQuery(object):
binary_operators = {
u"=": operator.eq,
u"==": operator.eq,
u"eq": operator.eq,
u"<": operator.lt,
u"lt": operator.lt,
u">": operator.gt,
u"gt": operator.gt,
u"<=": operator.le,
u"≤": operator.le,
u"le": operator.le,
u">=": operator.ge,
u"≥": operator.ge,
u"ge": operator.ge,
u"!=": operator.ne,
u"≠": operator.ne,
u"ne": operator.ne,
u"%": operator.mod,
u"mod": operator.mod,
u"+": operator.add,
u"add": operator.add,
u"-": operator.sub,
u"sub": operator.sub,
u"*": operator.mul,
u"×": operator.mul,
u"mul": operator.mul,
u"/": operator.truediv,
u"÷": operator.truediv,
u"div": operator.truediv,
u"**": operator.pow,
u"^": operator.pow,
u"pow": operator.pow,
}
multiple_operators = {
u"or": any,
u"∨": any,
u"and": all,
u"∧": all,
}
def __init__(self, tree):
self._eval = self.build_evaluator(tree)
def __call__(self, value):
return self._eval(value)
def build_evaluator(self, tree):
try:
operator, nodes = list(tree.items())[0]
except Exception:
return lambda value: tree
try:
op = self.multiple_operators[operator]
except KeyError:
try:
op = self.binary_operators[operator]
except KeyError:
raise self.InvalidQuery("Unknown operator %s" % operator)
return self._handle_binary_op(op, nodes)
return self._handle_multiple_op(op, nodes)
def _handle_multiple_op(self, op, nodes):
elements = [self.build_evaluator(node) for node in nodes]
return lambda value: op((e(value) for e in elements))
def _handle_binary_op(self, op, node):
try:
iterator = iter(node)
except Exception:
return lambda value: op(value, node)
nodes = list(iterator)
if len(nodes) != 2:
raise self.InvalidQuery(
"Binary operator %s needs 2 arguments, %d given" %
(op, len(nodes)))
node0 = self.build_evaluator(node[0])
node1 = self.build_evaluator(node[1])
return lambda value: op(node0(value), node1(value))
class InvalidQuery(Exception):
pass
@pecan.expose('json')
def post(self, metric_id, start=None, stop=None, aggregation='mean',
granularity=None):
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"in": {"id": arg_to_list(metric_id)}})
for metric in metrics:
enforce("search metric", metric)
if not pecan.request.body:
abort(400, "No query specified in body")
query = deserialize_and_validate(self.MetricSearchSchema)
if start is not None:
try:
start = utils.to_timestamp(start)
except Exception:
abort(400, "Invalid value for start")
if stop is not None:
try:
stop = utils.to_timestamp(stop)
except Exception:
abort(400, "Invalid value for stop")
try:
predicate = self.MeasureQuery(query)
except self.MeasureQuery.InvalidQuery as e:
abort(400, six.text_type(e))
if granularity is not None:
granularity = sorted(
map(utils.to_timespan, arg_to_list(granularity)),
reverse=True)
metrics_and_aggregations = collections.defaultdict(list)
for metric in metrics:
if granularity is None:
granularity = sorted((
d.granularity
for d in metric.archive_policy.definition),
reverse=True)
for gr in granularity:
agg = metric.archive_policy.get_aggregation(
aggregation, gr)
if agg is None:
abort(400,
storage.AggregationDoesNotExist(
metric, aggregation, gr))
metrics_and_aggregations[metric].append(agg)
try:
timeseries = pecan.request.storage.get_aggregated_measures(
metrics_and_aggregations, start, stop)
except storage.MetricDoesNotExist as e:
# This can happen if all the metrics have been created but one
# doesn't have any measures yet.
abort(400, e)
return {
str(metric.id): [
(timestamp, aggregation.granularity, value)
for aggregation, ts in six.iteritems(aggregations_and_ts)
for timestamp, value in ts
if predicate(value)
]
for metric, aggregations_and_ts in six.iteritems(timeseries)
}
class ResourcesMetricsMeasuresBatchController(rest.RestController):
@staticmethod
def BackwardCompatibleMeasuresList(v):
v = voluptuous.Schema(
voluptuous.Any(MeasuresListSchema,
{voluptuous.Optional("archive_policy_name"):
six.text_type,
voluptuous.Optional("unit"):
six.text_type,
"measures": MeasuresListSchema}),
required=True)(v)
if isinstance(v, dict):
return v
else:
# Old format
return {"measures": v}
@pecan.expose('json')
def post(self, create_metrics=False):
creator = pecan.request.auth_helper.get_current_user(
pecan.request)
MeasuresBatchSchema = voluptuous.Schema(
{functools.partial(ResourceID, creator=creator):
{six.text_type: self.BackwardCompatibleMeasuresList}})
body = deserialize_and_validate(MeasuresBatchSchema)
known_metrics = []
unknown_metrics = []
unknown_resources = []
body_by_rid = {}
attribute_filter = {"or": []}
for original_resource_id, resource_id in body:
names = list(body[(original_resource_id, resource_id)].keys())
if names:
attribute_filter["or"].append({"and": [
{"=": {"resource_id": resource_id}},
{"in": {"name": names}}]})
if not attribute_filter["or"]:
pecan.response.status = 202
return
all_metrics = collections.defaultdict(list)
for metric in pecan.request.indexer.list_metrics(
attribute_filter=attribute_filter):
all_metrics[metric.resource_id].append(metric)
for original_resource_id, resource_id in body:
r = body[(original_resource_id, resource_id)]
body_by_rid[resource_id] = r
names = list(r.keys())
metrics = all_metrics[resource_id]
known_names = [m.name for m in metrics]
if strtobool("create_metrics", create_metrics):
already_exists_names = []
for name in names:
if name not in known_names:
metric_data = {"name": name}
for attr in ["archive_policy_name", "unit"]:
if attr in r[name]:
metric_data[attr] = r[name][attr]
metric = MetricsController.MetricSchema(metric_data)
try:
m = pecan.request.indexer.create_metric(
uuid.uuid4(),
creator=creator,
resource_id=resource_id,
name=metric.get('name'),
unit=metric.get('unit'),
archive_policy_name=metric[
'archive_policy_name'])
except indexer.NamedMetricAlreadyExists as e:
already_exists_names.append(e.metric_name)
except indexer.NoSuchResource:
unknown_resources.append({
'resource_id': six.text_type(resource_id),
'original_resource_id': original_resource_id})
break
except indexer.IndexerException as e:
# This catch NoSuchArchivePolicy, which is unlikely
# be still possible
abort(400, six.text_type(e))
else:
known_metrics.append(m)
if already_exists_names:
# Add metrics created in the meantime
known_names.extend(already_exists_names)
known_metrics.extend(
pecan.request.indexer.list_metrics(
attribute_filter={"and": [
{"=": {"resource_id": resource_id}},
{"in": {"name": already_exists_names}},
]}))
elif len(names) != len(metrics):
unknown_metrics.extend(
["%s/%s" % (six.text_type(resource_id), m)
for m in names if m not in known_names])
known_metrics.extend(metrics)
if unknown_resources:
abort(400, {"cause": "Unknown resources",
"detail": unknown_resources})
if unknown_metrics:
abort(400, "Unknown metrics: %s" % ", ".join(
sorted(unknown_metrics)))
for metric in known_metrics:
enforce("post measures", metric)
pecan.request.incoming.add_measures_batch(
dict((metric.id,
body_by_rid[metric.resource_id][metric.name]["measures"])
for metric in known_metrics))
pecan.response.status = 202
class MetricsMeasuresBatchController(rest.RestController):
# NOTE(sileht): we don't allow to mix both formats
# to not have to deal with id collision that can
# occurs between a metric_id and a resource_id.
# Because while json allow duplicate keys in dict payload
# only the last key will be retain by json python module to
# build the python dict.
MeasuresBatchSchema = voluptuous.Schema(
{utils.UUID: MeasuresListSchema}
)
@pecan.expose("json")
def post(self):
body = deserialize_and_validate(self.MeasuresBatchSchema)
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"in": {"id": list(body.keys())}})
if len(metrics) != len(body):
missing_metrics = sorted(set(body) - set(m.id for m in metrics))
abort(400, "Unknown metrics: %s" % ", ".join(
six.moves.map(str, missing_metrics)))
for metric in metrics:
enforce("post measures", metric)
pecan.request.incoming.add_measures_batch(
dict((metric.id, body[metric.id]) for metric in
metrics))
pecan.response.status = 202
class SearchController(object):
resource = SearchResourceController()
metric = SearchMetricController()
class AggregationResourceController(rest.RestController):
def __init__(self, resource_type, metric_name):
self.resource_type = resource_type
self.metric_name = metric_name
@pecan.expose('json')
def post(self, start=None, stop=None, aggregation='mean',
reaggregation=None, granularity=None, needed_overlap=100.0,
groupby=None, fill=None, refresh=False, resample=None,
**kwargs):
# First, set groupby in the right format: a sorted list of unique
# strings.
groupby = sorted(set(arg_to_list(groupby)))
# NOTE(jd) Sort by groupby so we are sure we do not return multiple
# groups when using itertools.groupby later.
try:
resources = SearchResourceTypeController(
self.resource_type)._search(sort=groupby,
filter=kwargs.get("filter"))
except indexer.InvalidPagination:
abort(400, "Invalid groupby attribute")
except indexer.IndexerException as e:
abort(400, six.text_type(e))
if resources is None:
return []
if not groupby:
metrics = list(filter(None,
(r.get_metric(self.metric_name)
for r in resources)))
return AggregationController.get_cross_metric_measures_from_objs(
metrics, start, stop, aggregation, reaggregation,
granularity, needed_overlap, fill, refresh, resample)
def groupper(r):
return tuple((attr, r[attr]) for attr in groupby)
results = []
for key, resources in itertools.groupby(resources, groupper):
metrics = list(filter(None,
(r.get_metric(self.metric_name)
for r in resources)))
results.append({
"group": dict(key),
"measures": AggregationController.get_cross_metric_measures_from_objs( # noqa
metrics, start, stop, aggregation, reaggregation,
granularity, needed_overlap, fill, refresh, resample)
})
return results
FillSchema = voluptuous.Schema(
voluptuous.Any(voluptuous.Coerce(float), "null", "dropna",
msg="Must be a float, 'dropna' or 'null'"))
def validate_qs(start=None, stop=None, granularity=None,
needed_overlap=None, fill=None):
if needed_overlap is not None:
try:
needed_overlap = float(needed_overlap)
except ValueError:
abort(400, {"cause": "Argument value error",
"detail": "needed_overlap",
"reason": "Must be a number"})
if start is not None:
try:
start = utils.to_timestamp(start)
except Exception:
abort(400, {"cause": "Argument value error",
"detail": "start",
"reason": "Must be a datetime or a timestamp"})
if stop is not None:
try:
stop = utils.to_timestamp(stop)
except Exception:
abort(400, {"cause": "Argument value error",
"detail": "stop",
"reason": "Must be a datetime or a timestamp"})
if granularity is not None:
try:
granularity = [utils.to_timespan(granularity)]
except ValueError as e:
abort(400, {"cause": "Argument value error",
"detail": "granularity",
"reason": six.text_type(e)})
if fill is not None:
try:
fill = FillSchema(fill)
except voluptuous.Error as e:
abort(400, {"cause": "Argument value error",
"detail": "fill",
"reason": str(e)})
return start, stop, granularity, needed_overlap, fill
class AggregationController(rest.RestController):
_custom_actions = {
'metric': ['POST', 'GET'],
}
@pecan.expose()
def _lookup(self, object_type, resource_type, key, metric_name,
*remainder):
if object_type != "resource" or key != "metric":
# NOTE(sileht): we want the raw 404 message here
# so use directly pecan
pecan.abort(404)
try:
pecan.request.indexer.get_resource_type(resource_type)
except indexer.NoSuchResourceType as e:
abort(404, six.text_type(e))
return AggregationResourceController(resource_type,
metric_name), remainder
@staticmethod
def get_cross_metric_measures_from_objs(metrics, start=None, stop=None,
aggregation='mean',
reaggregation=None,
granularity=None,
needed_overlap=100.0, fill=None,
refresh=False, resample=None):
start, stop, granularity, needed_overlap, fill = validate_qs(
start, stop, granularity, needed_overlap, fill)
if reaggregation is None:
reaggregation = aggregation
for metric in metrics:
enforce("get metric", metric)
number_of_metrics = len(metrics)
if number_of_metrics == 0:
return []
if resample:
if not granularity:
abort(400, 'A granularity must be specified to resample')
try:
resample = (resample if calendar.GROUPINGS.get(resample) else
utils.to_timespan(resample))
except ValueError as e:
abort(400, six.text_type(e))
if granularity is None:
granularities = (
definition.granularity
for m in metrics
for definition in m.archive_policy.definition
)
# granularities_in_common
granularity = [
g
for g, occurrence in six.iteritems(
collections.Counter(granularities))
if occurrence == len(metrics)
]
if not granularity:
abort(400, exceptions.UnAggregableTimeseries(
list((metric.id, aggregation)
for metric in metrics),
'No granularity match'))
aggregations = set()
for metric in metrics:
for g in granularity:
agg = metric.archive_policy.get_aggregation(
aggregation, g)
if agg is None:
abort(404, six.text_type(
storage.AggregationDoesNotExist(metric, aggregation, g)
))
aggregations.add(agg)
aggregations = sorted(aggregations, key=ATTRGETTER_GRANULARITY,
reverse=True)
operations = ["aggregate", reaggregation, []]
if resample:
operations[2].extend(
["resample", aggregation, resample,
["metric"] + [[str(m.id), aggregation]
for m in metrics]]
)
else:
operations[2].extend(
["metric"] + [[str(m.id), aggregation]
for m in metrics]
)
try:
if strtobool("refresh", refresh):
metrics_to_update = [
m for m in metrics
if pecan.request.incoming.has_unprocessed(m.id)]
for m in metrics_to_update:
try:
pecan.request.chef.refresh_metrics(
[m], pecan.request.conf.api.operation_timeout)
except chef.SackAlreadyLocked:
abort(503, 'Unable to refresh metric: %s. '
'Metric is locked. '
'Please try again.' % m.id)
if number_of_metrics == 1:
# NOTE(sileht): don't do the aggregation if we only have one
# metric
metric = metrics[0]
if (aggregation
not in metric.archive_policy.aggregation_methods):
abort(404, {
"cause":
"Aggregation method does not exist for this metric",
"detail": {
"metric": str(metric.id),
"aggregation_method": aggregation,
},
})
try:
results = pecan.request.storage.get_aggregated_measures(
{metric: aggregations}, start, stop, resample)[metric]
return [(timestamp, results[key].aggregation.granularity,
value)
for key in sorted(results.keys(),
reverse=True)
for timestamp, value in results[key]]
except storage.MetricDoesNotExist:
return []
return processor.get_measures(
pecan.request.storage,
[processor.MetricReference(m, aggregation) for m in metrics],
operations, start, stop,
granularity, needed_overlap, fill)["aggregated"]
except exceptions.UnAggregableTimeseries as e:
abort(400, e)
except storage.AggregationDoesNotExist as e:
abort(404, six.text_type(e))
MetricIDsSchema = [utils.UUID]
@pecan.expose('json')
def get_metric(self, metric=None, start=None, stop=None,
aggregation='mean', reaggregation=None, granularity=None,
needed_overlap=100.0, fill=None,
refresh=False, resample=None):
if pecan.request.method == 'GET':
try:
metric_ids = voluptuous.Schema(
self.MetricIDsSchema, required=True)(arg_to_list(metric))
except voluptuous.Error as e:
abort(400, "Invalid input: %s" % e)
else:
self._workaround_pecan_issue_88()
metric_ids = deserialize_and_validate(self.MetricIDsSchema)
metric_ids = [six.text_type(m) for m in metric_ids]
# Check RBAC policy
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"in": {"id": metric_ids}})
missing_metric_ids = (set(metric_ids)
- set(six.text_type(m.id) for m in metrics))
if missing_metric_ids:
# Return one of the missing one in the error
abort(404, six.text_type(storage.MetricDoesNotExist(
missing_metric_ids.pop())))
return self.get_cross_metric_measures_from_objs(
metrics, start, stop, aggregation, reaggregation,
granularity, needed_overlap, fill, refresh, resample)
post_metric = get_metric
def _workaround_pecan_issue_88(self):
# FIXME(sileht): https://github.com/pecan/pecan/pull/88
if pecan.request.path_info.startswith("/aggregation/resource"):
pecan.abort(405)
class CapabilityController(rest.RestController):
@staticmethod
@pecan.expose('json')
def get():
return dict(aggregation_methods=set(
archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS))
class StatusController(rest.RestController):
@staticmethod
@pecan.expose('json')
def get(details=True):
enforce("get status", {})
try:
members_req = pecan.request.coordinator.get_members(
metricd.MetricProcessor.GROUP_ID)
except tooz.NotImplemented:
members_req = None
try:
report = pecan.request.incoming.measures_report(
strtobool("details", details))
except incoming.ReportGenerationError:
abort(503, 'Unable to generate status. Please retry.')
report_dict = {"storage": {"summary": report['summary']}}
if 'details' in report:
report_dict["storage"]["measures_to_process"] = report['details']
report_dict['metricd'] = {}
if members_req:
members = members_req.get()
caps = [
pecan.request.coordinator.get_member_capabilities(
metricd.MetricProcessor.GROUP_ID, member)
for member in members
]
report_dict['metricd']['processors'] = [
member.decode() for member in members
]
members_data = {}
for member, cap in six.moves.zip(members, caps):
caps_data = {
six.ensure_str(k): v
for k, v in six.iteritems(cap.get())
}
members_data[member.decode()] = caps_data
report_dict['metricd']['statistics'] = members_data
else:
report_dict['metricd']['processors'] = None
report_dict['metricd']['statistics'] = {}
return report_dict
class MetricsBatchController(object):
measures = MetricsMeasuresBatchController()
class ResourcesMetricsBatchController(object):
measures = ResourcesMetricsMeasuresBatchController()
class ResourcesBatchController(object):
metrics = ResourcesMetricsBatchController()
class BatchController(object):
metrics = MetricsBatchController()
resources = ResourcesBatchController()
# Retry with exponential backoff for up to 1 minute
@tenacity.retry(
wait=tenacity.wait_exponential(multiplier=0.5, max=60),
retry=tenacity.retry_if_exception_type(
(indexer.NoSuchResource, indexer.ResourceAlreadyExists,
indexer.ResourceTypeAlreadyExists,
indexer.NamedMetricAlreadyExists)))
def get_or_create_resource_and_metrics(
creator, rid, original_resource_id, metric_names,
resource_attributes,
resource_type, resource_type_attributes=None):
try:
r = pecan.request.indexer.get_resource(resource_type, rid,
with_metrics=True)
except indexer.NoSuchResourceType:
if resource_type_attributes:
enforce("create resource type", {
'name': resource_type,
'state': 'creating',
'attributes': resource_type_attributes,
})
schema = pecan.request.indexer.get_resource_type_schema()
rt = schema.resource_type_from_dict(
resource_type, resource_type_attributes, 'creating')
pecan.request.indexer.create_resource_type(rt)
raise tenacity.TryAgain
else:
raise
except indexer.UnexpectedResourceTypeState as e:
# NOTE(sileht): Currently created by another thread
if not e.state.endswith("_error"):
raise tenacity.TryAgain
if r:
enforce("update resource", r)
exists_metric_names = [m.name for m in r.metrics]
metrics = MetricsSchema(dict(
(m, {}) for m in metric_names
if m not in exists_metric_names
))
if metrics:
return pecan.request.indexer.update_resource(
resource_type, rid,
metrics=metrics,
append_metrics=True,
create_revision=False
).metrics
else:
return r.metrics
else:
metrics = MetricsSchema(dict((m, {}) for m in metric_names))
target = {
"id": rid,
"resource_type": resource_type,
"creator": creator,
"original_resource_id": original_resource_id,
"metrics": metrics,
}
target.update(resource_attributes)
enforce("create resource", target)
kwargs = resource_attributes # no copy used since not used after
kwargs['metrics'] = metrics
kwargs['original_resource_id'] = original_resource_id
try:
return pecan.request.indexer.create_resource(
resource_type, rid, creator, **kwargs
).metrics
except indexer.ResourceAlreadyExists as e:
# NOTE(sileht): ensure the rid is not registered whitin another
# resource type.
r = pecan.request.indexer.get_resource('generic', rid)
if r.type != resource_type:
abort(409, e)
raise
class PrometheusWriteController(rest.RestController):
PROMETHEUS_RESOURCE_TYPE = {
"instance": {"type": "string",
"min_length": 1,
"max_length": 512,
"required": True},
"job": {"type": "string",
"min_length": 1,
"max_length": 512,
"required": True}
}
@pecan.expose()
def post(self):
buf = snappy.uncompress(pecan.request.body)
f = remote_pb2.WriteRequest()
f.ParseFromString(buf)
measures_by_rid = collections.defaultdict(dict)
for ts in f.timeseries:
attrs = dict((label.name, label.value) for label in ts.labels)
original_rid = (attrs.get("job", "none"),
attrs.get("instance", "none"))
name = attrs['__name__']
if ts.samples:
data = [{'timestamp': s.timestamp_ms / 1000.0,
'value': s.value} for s in ts.samples]
measures_by_rid[original_rid][name] = validate(
MeasuresListSchema, data)
creator = pecan.request.auth_helper.get_current_user(pecan.request)
measures_to_batch = {}
for (job, instance), measures in measures_by_rid.items():
original_rid = '%s@%s' % (job, instance)
rid = ResourceUUID(original_rid, creator=creator)
metric_names = list(measures.keys())
timeout = pecan.request.conf.api.operation_timeout
metrics = get_or_create_resource_and_metrics.retry_with(
stop=tenacity.stop_after_delay(timeout))(
creator, rid, original_rid, metric_names,
dict(job=job, instance=instance),
"prometheus", self.PROMETHEUS_RESOURCE_TYPE)
for metric in metrics:
enforce("post measures", metric)
measures_to_batch.update(
dict((metric.id, measures[metric.name]) for metric in
metrics if metric.name in measures))
pecan.request.incoming.add_measures_batch(measures_to_batch)
pecan.response.status = 202
class PrometheusController(object):
write = PrometheusWriteController()
class V1Controller(object):
def __init__(self):
# FIXME(sileht): split controllers to avoid lazy loading
from gnocchi.rest.aggregates import api as agg_api
from gnocchi.rest import influxdb
self.sub_controllers = {
"search": SearchController(),
"archive_policy": ArchivePoliciesController(),
"archive_policy_rule": ArchivePolicyRulesController(),
"metric": MetricsController(),
"batch": BatchController(),
"resource": ResourcesByTypeController(),
"resource_type": ResourceTypesController(),
"aggregation": AggregationController(),
"capabilities": CapabilityController(),
"status": StatusController(),
"aggregates": agg_api.AggregatesController(),
"influxdb": influxdb.InfluxDBController(),
}
for name, ctrl in self.sub_controllers.items():
setattr(self, name, ctrl)
if PROMETHEUS_SUPPORTED:
setattr(self, "prometheus", PrometheusController())
@pecan.expose('json')
def index(self):
return {
"version": "1.0",
"links": [
{"rel": "self",
"href": pecan.request.application_url}
] + [
{"rel": name,
"href": pecan.request.application_url + "/" + name}
for name in sorted(self.sub_controllers)
]
}
class VersionsController(object):
@staticmethod
@pecan.expose('json')
def index():
return {
"build": gnocchi.__version__,
"versions": [
{
"status": "CURRENT",
"links": [
{
"rel": "self",
"href": pecan.request.application_url + "/v1/"
}
],
"id": "v1.0",
"updated": "2015-03-19"
}
]
}
| apache-2.0 | -4,405,489,096,522,094,000 | 35.578223 | 94 | 0.557312 | false |
lakrahn-de/pwebs | pwebs/tests/test_main.py | 1 | 1478 | # Copyright (C) 2014-2014 Project
# License: http://www.gnu.org/licenses/gpl.html GPL version 2 or higher
import unittest
from pwebs import main
class TestMain(unittest.TestCase):
def test_threshold(self):
import argparse
self.assertSequenceEqual(main.threshold('i25,o30,i20,o40'),
((0, 25), (0, 30)))
with self.assertRaises(argparse.ArgumentTypeError):
main.threshold('iab,ocd')
self.assertSequenceEqual(main.threshold('i25:30,o30:35'),
((25, 30), (30, 35)))
with self.assertRaises(argparse.ArgumentTypeError):
main.threshold('i24:20,o30:40')
with self.assertRaises(argparse.ArgumentTypeError):
main.threshold('i24:30,o30:25')
self.assertSequenceEqual(main.threshold('i~:30,o30:35'),
(('~', 30), (30, 35)))
self.assertSequenceEqual(main.threshold('i20:30,o~:35'),
((20, 30), ('~', 35)))
self.assertSequenceEqual(main.threshold('in~:30,out~:35'),
(('~', 30), ('~', 35)))
self.assertSequenceEqual(main.threshold('i22'), ((0, 22), None))
def test_is_in_range(self):
self.assertTrue(main._is_in_range(20, '~', 20))
self.assertTrue(main._is_in_range(20, 15, 20))
self.assertTrue(main._is_in_range(0, '~', 10))
self.assertFalse(main._is_in_range(0, 10, 20))
| gpl-2.0 | 9,065,309,815,581,866,000 | 43.787879 | 72 | 0.558187 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/employment_v30.py | 1 | 13727 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.created_date_v30 import CreatedDateV30 # noqa: F401,E501
from orcid_api_v3.models.external_i_ds_v30 import ExternalIDsV30 # noqa: F401,E501
from orcid_api_v3.models.fuzzy_date_v30 import FuzzyDateV30 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30 import LastModifiedDateV30 # noqa: F401,E501
from orcid_api_v3.models.organization_v30 import OrganizationV30 # noqa: F401,E501
from orcid_api_v3.models.source_v30 import SourceV30 # noqa: F401,E501
from orcid_api_v3.models.url_v30 import UrlV30 # noqa: F401,E501
class EmploymentV30(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'created_date': 'CreatedDateV30',
'last_modified_date': 'LastModifiedDateV30',
'source': 'SourceV30',
'put_code': 'int',
'path': 'str',
'department_name': 'str',
'role_title': 'str',
'start_date': 'FuzzyDateV30',
'end_date': 'FuzzyDateV30',
'organization': 'OrganizationV30',
'url': 'UrlV30',
'external_ids': 'ExternalIDsV30',
'display_index': 'str',
'visibility': 'str'
}
attribute_map = {
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'put_code': 'put-code',
'path': 'path',
'department_name': 'department-name',
'role_title': 'role-title',
'start_date': 'start-date',
'end_date': 'end-date',
'organization': 'organization',
'url': 'url',
'external_ids': 'external-ids',
'display_index': 'display-index',
'visibility': 'visibility'
}
def __init__(self, created_date=None, last_modified_date=None, source=None, put_code=None, path=None, department_name=None, role_title=None, start_date=None, end_date=None, organization=None, url=None, external_ids=None, display_index=None, visibility=None): # noqa: E501
"""EmploymentV30 - a model defined in Swagger""" # noqa: E501
self._created_date = None
self._last_modified_date = None
self._source = None
self._put_code = None
self._path = None
self._department_name = None
self._role_title = None
self._start_date = None
self._end_date = None
self._organization = None
self._url = None
self._external_ids = None
self._display_index = None
self._visibility = None
self.discriminator = None
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
if put_code is not None:
self.put_code = put_code
if path is not None:
self.path = path
if department_name is not None:
self.department_name = department_name
if role_title is not None:
self.role_title = role_title
if start_date is not None:
self.start_date = start_date
if end_date is not None:
self.end_date = end_date
if organization is not None:
self.organization = organization
if url is not None:
self.url = url
if external_ids is not None:
self.external_ids = external_ids
if display_index is not None:
self.display_index = display_index
if visibility is not None:
self.visibility = visibility
@property
def created_date(self):
"""Gets the created_date of this EmploymentV30. # noqa: E501
:return: The created_date of this EmploymentV30. # noqa: E501
:rtype: CreatedDateV30
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this EmploymentV30.
:param created_date: The created_date of this EmploymentV30. # noqa: E501
:type: CreatedDateV30
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this EmploymentV30. # noqa: E501
:return: The last_modified_date of this EmploymentV30. # noqa: E501
:rtype: LastModifiedDateV30
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this EmploymentV30.
:param last_modified_date: The last_modified_date of this EmploymentV30. # noqa: E501
:type: LastModifiedDateV30
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this EmploymentV30. # noqa: E501
:return: The source of this EmploymentV30. # noqa: E501
:rtype: SourceV30
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this EmploymentV30.
:param source: The source of this EmploymentV30. # noqa: E501
:type: SourceV30
"""
self._source = source
@property
def put_code(self):
"""Gets the put_code of this EmploymentV30. # noqa: E501
:return: The put_code of this EmploymentV30. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this EmploymentV30.
:param put_code: The put_code of this EmploymentV30. # noqa: E501
:type: int
"""
self._put_code = put_code
@property
def path(self):
"""Gets the path of this EmploymentV30. # noqa: E501
:return: The path of this EmploymentV30. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this EmploymentV30.
:param path: The path of this EmploymentV30. # noqa: E501
:type: str
"""
self._path = path
@property
def department_name(self):
"""Gets the department_name of this EmploymentV30. # noqa: E501
:return: The department_name of this EmploymentV30. # noqa: E501
:rtype: str
"""
return self._department_name
@department_name.setter
def department_name(self, department_name):
"""Sets the department_name of this EmploymentV30.
:param department_name: The department_name of this EmploymentV30. # noqa: E501
:type: str
"""
self._department_name = department_name
@property
def role_title(self):
"""Gets the role_title of this EmploymentV30. # noqa: E501
:return: The role_title of this EmploymentV30. # noqa: E501
:rtype: str
"""
return self._role_title
@role_title.setter
def role_title(self, role_title):
"""Sets the role_title of this EmploymentV30.
:param role_title: The role_title of this EmploymentV30. # noqa: E501
:type: str
"""
self._role_title = role_title
@property
def start_date(self):
"""Gets the start_date of this EmploymentV30. # noqa: E501
:return: The start_date of this EmploymentV30. # noqa: E501
:rtype: FuzzyDateV30
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this EmploymentV30.
:param start_date: The start_date of this EmploymentV30. # noqa: E501
:type: FuzzyDateV30
"""
if start_date is None:
raise ValueError("Invalid value for `start_date`, must not be `None`") # noqa: E501
self._start_date = start_date
@property
def end_date(self):
"""Gets the end_date of this EmploymentV30. # noqa: E501
:return: The end_date of this EmploymentV30. # noqa: E501
:rtype: FuzzyDateV30
"""
return self._end_date
@end_date.setter
def end_date(self, end_date):
"""Sets the end_date of this EmploymentV30.
:param end_date: The end_date of this EmploymentV30. # noqa: E501
:type: FuzzyDateV30
"""
self._end_date = end_date
@property
def organization(self):
"""Gets the organization of this EmploymentV30. # noqa: E501
:return: The organization of this EmploymentV30. # noqa: E501
:rtype: OrganizationV30
"""
return self._organization
@organization.setter
def organization(self, organization):
"""Sets the organization of this EmploymentV30.
:param organization: The organization of this EmploymentV30. # noqa: E501
:type: OrganizationV30
"""
if organization is None:
raise ValueError("Invalid value for `organization`, must not be `None`") # noqa: E501
self._organization = organization
@property
def url(self):
"""Gets the url of this EmploymentV30. # noqa: E501
:return: The url of this EmploymentV30. # noqa: E501
:rtype: UrlV30
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this EmploymentV30.
:param url: The url of this EmploymentV30. # noqa: E501
:type: UrlV30
"""
self._url = url
@property
def external_ids(self):
"""Gets the external_ids of this EmploymentV30. # noqa: E501
:return: The external_ids of this EmploymentV30. # noqa: E501
:rtype: ExternalIDsV30
"""
return self._external_ids
@external_ids.setter
def external_ids(self, external_ids):
"""Sets the external_ids of this EmploymentV30.
:param external_ids: The external_ids of this EmploymentV30. # noqa: E501
:type: ExternalIDsV30
"""
self._external_ids = external_ids
@property
def display_index(self):
"""Gets the display_index of this EmploymentV30. # noqa: E501
:return: The display_index of this EmploymentV30. # noqa: E501
:rtype: str
"""
return self._display_index
@display_index.setter
def display_index(self, display_index):
"""Sets the display_index of this EmploymentV30.
:param display_index: The display_index of this EmploymentV30. # noqa: E501
:type: str
"""
self._display_index = display_index
@property
def visibility(self):
"""Gets the visibility of this EmploymentV30. # noqa: E501
:return: The visibility of this EmploymentV30. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this EmploymentV30.
:param visibility: The visibility of this EmploymentV30. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE", "public", "private",
"limited", "registered-only"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(EmploymentV30, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EmploymentV30):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | -2,466,509,074,726,212,000 | 28.394004 | 276 | 0.585197 | false |
Agicia/lpod-python | utils.py | 1 | 17649 | # -*- coding: UTF-8 -*-
#
# Copyright (c) 2009-2010 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Authors: David Versmisse <[email protected]>
# Hervé Cauwelier <[email protected]>
# Romain Gauthier <[email protected]>
#
# This file is part of Lpod (see: http://lpod-project.org).
# Lpod is free software; you can redistribute it and/or modify it under
# the terms of either:
#
# a) the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option)
# any later version.
# Lpod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Lpod. If not, see <http://www.gnu.org/licenses/>.
#
# b) the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Import from the Standard Library
from datetime import date, datetime, timedelta
from decimal import Decimal as dec
from os import getcwd
from os.path import splitdrive, join, sep
from re import search
from sys import _getframe, modules
from warnings import warn
# Import from lpod
from datatype import Boolean, Date, DateTime, Duration
CELL_TYPES = ('boolean', 'currency', 'date', 'float', 'percentage', 'string',
'time')
STYLE_FAMILIES = ('paragraph', 'text', 'section', 'table', 'table-column',
'table-row', 'table-cell', 'table-page', 'chart',
'default', 'drawing-page', 'graphic', 'presentation',
'control', 'ruby', 'list', 'number', 'page-layout',
'presentation-page-layout', 'font-face', 'master-page')
NOTE_CLASSES = ('footnote', 'endnote')
# This DPI is computed to have:
# 640 px (width of your wiki) <==> 17 cm (width of a normal ODT page)
DPI = 640 * dec('2.54') / 17
######################################################################
# Private API
######################################################################
def _get_abspath(local_path):
"""Returns the absolute path to the required file.
"""
mname = _getframe(1).f_globals.get('__name__')
if mname == '__main__' or mname == '__init__':
mpath = getcwd()
else:
module = modules[mname]
if hasattr(module, '__path__'):
mpath = module.__path__[0]
elif '.' in mname:
mpath = modules[mname[:mname.rfind('.')]].__path__[0]
else:
mpath = mname
drive, mpath = splitdrive(mpath)
mpath = drive + join(mpath, local_path)
# Make it working with Windows. Internally we use always the "/".
if sep == '\\':
mpath = mpath.replace(sep, '/')
return mpath
def _make_xpath_query(element_name, family=None, text_style=None,
draw_id=None, draw_name=None, draw_style=None, draw_text_style=None,
table_name=None, table_style=None, style_name=None,
display_name=None, note_class=None, text_id=None, text_name=None,
office_name=None, office_title=None, outline_level=None, level=None,
page_layout=None, master_page=None, parent_style=None,
presentation_class=None, position=None, **kw):
query = [element_name]
attributes = kw
if text_style:
attributes['text:style-name'] = text_style
if family:
attributes['style:family'] = family
if draw_id:
attributes['draw:id'] = draw_id
if draw_name:
attributes['draw:name'] = draw_name
if draw_style:
attributes['draw:style-name'] = draw_style
if draw_text_style:
attributes['draw:text-style-name'] = draw_text_style
if table_name:
attributes['table:name'] = table_name
if table_style:
attributes['table:style-name'] = table_style
if style_name:
attributes['style:name'] = style_name
if display_name:
attributes['style:display-name'] = display_name
if note_class:
attributes['text:note-class'] = note_class
if text_id:
attributes['text:id'] = text_id
if text_name:
attributes['text:name'] = text_name
if office_name:
attributes['office:name'] = office_name
if office_title:
attributes['office:title'] = office_title
if outline_level:
attributes['text:outline-level'] = outline_level
if level:
attributes['text:level'] = level
if page_layout:
attributes['style:page-layout-name'] = page_layout
if master_page:
attributes['draw:master-page-name'] = master_page
if parent_style:
attributes['style:parent-style-name'] = parent_style
if presentation_class:
attributes['presentation:class'] = presentation_class
# Sort attributes for reproducible test cases
for qname in sorted(attributes):
value = attributes[qname]
if value is True:
query.append(u'[@%s]' % qname)
else:
query.append(u'[@%s="%s"]' % (qname, unicode(value)))
query = ''.join(query)
if position is not None:
# A position argument that mimics the behaviour of a python's list
if position >= 0:
position = str(position + 1)
elif position == -1:
position = 'last()'
else:
position = 'last()-%d' % (abs(position) - 1)
query = u'(%s)[%s]' % (query, position)
return query
# These are listed exhaustively for keeping count of
# implemented style types
family_mapping = {
'paragraph': ('style:style', 'paragraph'),
'text': ('style:style', 'text'),
'section': ('style:style', 'section'),
'table': ('style:style', 'table'),
'table-column': ('style:style', 'table-column'),
'table-row': ('style:style', 'table-row'),
'table-cell': ('style:style', 'table-cell'),
'drawing-page': ('style:style', 'drawing-page'),
'graphic': ('style:style', 'graphic'),
'presentation': ('style:style', 'presentation'),
# False families
'list': ('text:list-style', None),
'outline': ('text:outline-style', None),
'page-layout': ('style:page-layout', None),
'presentation-page-layout': ('style:presentation-page-layout', None),
'master-page': ('style:master-page', None),
'font-face': ('style:font-face', None),
'number': ('number:number-style', None),
'percentage': ('number:percentage-style', None),
'time': ('number:time-style', None),
'date': ('number:date-style', None),
}
def _get_style_tagname(family):
if family not in family_mapping:
raise ValueError, "unknown family: " + family
return family_mapping[family]
def _get_style_family(name):
for family, (tagname, famattr) in family_mapping.iteritems():
if tagname == name:
return family
return None
def _expand_properties(properties):
# This mapping is not exhaustive, it only contains cases where replacing
# '_' with '-' and adding the "fo:" prefix is not enough
mapping = {# text
'font': 'style:font-name',
'size': 'fo:font-size',
'weight': 'fo:font-weight',
'style': 'fo:font-style',
'underline': 'style:text-underline-style',
'display': 'text:display',
'outline': 'style:text-outline',
'family_generic': 'style:font-family-generic',
'pitch': 'style:font-pitch',
# compliance with office suites
'font_style_name': 'style:font-style-name',
# paragraph
'align': 'fo:text-align',
'align-last': 'fo:text-align-last',
'indent': 'fo:text-indent',
'together': 'fo:keep-together',
# TODO 'page-break-before': 'fo:page-break-before',
# TODO 'page-break-after': 'fo:page-break-after',
'shadow': 'fo:text-shadow',
# Graphic
'stroke': 'draw:stroke',
'fill_color': 'draw:fill-color',
'fill_image_width': 'draw:fill-image-width',
'fill_image_height': 'draw:fill-image-height',
'textarea_vertical_align': 'draw:textarea-vertical-align',
'line_distance': 'draw:line-distance',
'guide_overhang': 'draw:guide-overhang',
'guide_distance': 'draw:guide-distance'
}
def map_key(key):
key = mapping.get(key, key).replace('_', '-')
if ":" not in key:
key = "fo:" + key
return key
if type(properties) is dict:
expanded = {}
for key, value in properties.iteritems():
key = map_key(key)
expanded[key] = value
elif type(properties) is list:
expanded = []
for key in properties:
key = map_key(key)
expanded.append(key)
return expanded
def _merge_dicts(d, *args, **kw):
"""Merge two or more dictionaries into a new dictionary object.
"""
new_d = d.copy()
for dic in args:
new_d.update(dic)
new_d.update(kw)
return new_d
#
# Non-public yet useful helpers
#
def _get_elements(context, element_name, content=None, url=None,
svg_title=None, svg_desc=None, dc_creator=None, dc_date=None, **kw):
query = _make_xpath_query(element_name, **kw)
elements = context.get_elements(query)
# Filter the elements with the regex (TODO use XPath)
if content is not None:
elements = [element for element in elements if element.match(content)]
if url is not None:
filtered = []
for element in elements:
url_attr = element.get_attribute('xlink:href')
if search(url, url_attr) is not None:
filtered.append(element)
elements = filtered
if dc_date is not None:
# XXX Date or DateTime?
dc_date = DateTime.encode(dc_date)
for variable, childname in [
(svg_title, 'svg:title'),
(svg_desc, 'svg:desc'),
(dc_creator, 'descendant::dc:creator'),
(dc_date, 'descendant::dc:date')]:
if not variable:
continue
filtered = []
for element in elements:
child = element.get_element(childname)
if child and child.match(variable):
filtered.append(element)
elements = filtered
return elements
def _get_element(context, element_name, position, **kw):
# TODO Transmit position not to load the whole list
result = _get_elements(context, element_name, **kw)
try:
return result[position]
except IndexError:
return None
def _set_value_and_type(element, value=None, value_type=None, text=None,
currency=None):
# Remove possible previous value and type
for name in ('office:value-type', 'office:boolean-value',
'office:value', 'office:date-value', 'office:string-value',
'office:time-value', 'table:formula'):
try:
element.del_attribute(name)
except KeyError:
pass
if type(value) is bool:
if value_type is None:
value_type = 'boolean'
if text is None:
text = u'true' if value else u'false'
value = Boolean.encode(value)
elif isinstance(value, (int, float, long, dec)):
if value_type is None:
value_type = 'float'
if text is None:
text = unicode(value)
value = str(value)
elif type(value) is date:
if value_type is None:
value_type = 'date'
if text is None:
text = unicode(Date.encode(value))
value = Date.encode(value)
elif type(value) is datetime:
if value_type is None:
value_type = 'date'
if text is None:
text = unicode(DateTime.encode(value))
value = DateTime.encode(value)
elif type(value) is str:
if value_type is None:
value_type = 'string'
if text is None:
text = unicode(value)
elif type(value) is unicode:
if value_type is None:
value_type = 'string'
if text is None:
text = value
elif type(value) is timedelta:
if value_type is None:
value_type = 'time'
if text is None:
text = unicode(Duration.encode(value))
value = Duration.encode(value)
elif value is not None:
raise TypeError, 'type "%s" is unknown' % type(value)
if value_type is not None:
element.set_attribute('office:value-type', value_type)
if value_type == 'boolean':
element.set_attribute('office:boolean-value', value)
elif value_type == 'currency':
element.set_attribute('office:value', value)
element.set_attribute('office:currency', currency)
elif value_type == 'date':
element.set_attribute('office:date-value', value)
elif value_type in ('float', 'percentage'):
element.set_attribute('office:value', value)
elif value_type == 'string':
element.set_attribute('office:string-value', value)
elif value_type == 'time':
element.set_attribute('office:time-value', value)
return text
######################################################################
# Public API
######################################################################
def get_value(element, value_type=None, try_get_text=True):
"""Only for "with office:value-type" elements
"""
if value_type is None:
value_type = element.get_attribute('office:value-type')
if value_type == 'boolean':
value = element.get_attribute('office:boolean-value')
return Boolean.decode(value)
elif value_type in ('float', 'percentage', 'currency'):
value = dec(element.get_attribute('office:value'))
# Return 3 instead of 3.0 if possible
if int(value) == value:
return int(value)
return value
elif value_type == 'date':
value = element.get_attribute('office:date-value')
if 'T' in value:
return DateTime.decode(value)
else:
return Date.decode(value)
elif value_type == 'string':
value = element.get_attribute('office:string-value')
if value is not None:
return unicode(value)
if try_get_text:
value = []
for para in element.get_elements('text:p'):
value.append(para.get_text(recursive=True))
if value:
return u"\n".join(value)
return None
elif value_type == 'time':
value = element.get_attribute('office:time-value')
return Duration.decode(value)
elif value_type is None:
return None
raise ValueError, 'unexpected value type "%s"' % value_type
def set_value(element, value):
"""Only for "with office:value-type" elements
"""
tag = element.get_tag()
# A table:cell ?
if tag == 'table:table-cell':
element.clear()
text = _set_value_and_type(element, value=value)
element.set_text_content(text)
return
# A text:variable-set ?
if tag == 'text:variable-set':
name = element.get_attribute('text:name')
display = element.get_attribute('text:display')
element.clear()
text = _set_value_and_type(element, value=value)
element.set_attribute('text:name', name)
if display is not None:
element.set_attribute('text:display', display)
element.set_text(text)
return
# A text:user-field-decl ?
if tag == 'text:user-field-decl':
name = element.get_attribute('text:name')
element.clear()
_set_value_and_type(element, value=value)
element.set_attribute('text:name', name)
return
# Else => error
raise ValueError, 'set_value: unexpected element "%s"' % tag
def convert_unicode(text):
"""Mostly used to compare lxml serialization to what is expected.
"""
result = []
for c in text:
code = ord(c)
if code >= 128:
result.append('&#%d;' % code)
else:
result.append(c)
return ''.join(result)
def oooc_to_ooow(formula):
"""Convert (proprietary) formula from calc format to writer format.
Arguments:
formula -- unicode
Return: unicode
"""
prefix, formula = formula.split(":=", 1)
assert "oooc" in prefix
# Convert cell addresses
formula = formula.replace("[.", "<").replace(":.", ":").replace("]", ">")
# Convert functions
formula = formula.replace("SUM(", "sum ").replace(")", "")
return "ooow:" + formula
def obsolete(old_name, new_func, *args, **kw):
def decorate(*dec_args, **dec_kw):
new_name = new_func.__name__
if args:
new_name += '(' + ', '.join(repr(x) for x in args) + ')'
message = '"%s" is obsolete, call "%s" instead' % (old_name,
new_name)
warn(message, category=DeprecationWarning)
return new_func(*(dec_args + args), **dec_kw)
return decorate
def isiterable(obj):
if isinstance(obj, basestring):
return False
try:
iter(obj)
except TypeError:
return False
return True
| apache-2.0 | -884,333,073,897,304,000 | 32.808429 | 78 | 0.576609 | false |
L33thium/xu4fanctl | xu4fanctl-1/sbin/fanctl.py | 1 | 2490 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
##########
## Fan control for odroid xu4
## when hit hiTmp manage fan speed until hit loTmp then stop.
## steps make fan wants to speed down more than speed up, for silence.
## recommanded governor : conservative
############################
import os, sys, signal, re, time, collections
# settings
hiTmp = 90
loTmp = 50
stepUp = 20
stepDown = 5
minSpd = 22 # in percent
# files location
if os.path.isdir("/sys/devices/odroid_fan.14"):
fanctl = "/sys/devices/odroid_fan.14"
elif os.path.isdir("/sys/devices/odroid_fan.13"):
fanctl = "/sys/devices/odroid_fan.13"
fTmp = "/sys/devices/10060000.tmu/temp"
fMode = fanctl+"/fan_mode"
fSpd = fanctl+"/pwm_duty"
class fan():
def __init__(self):
self.tmpLst = collections.deque(maxlen=300)
def setManual(self):
with open(fMode, "w") as f:
f.write("0")
def setAuto(self):
with open(fMode, "w") as f:
f.write("1")
def getTmp(self):
with open(fTmp, "r") as f:
t = f.read()
tmps = re.findall("[0-9]{5}", t)
tmps = map(int, tmps)
#temp = max(tmps) / 1000
temp = sum(tmps) / len(tmps) / 1000
self.tmpLst.append(temp)
tmpAvg = float(sum(self.tmpLst) / len(self.tmpLst))
return [temp, tmpAvg]
def cool(self):
delta = hiTmp - loTmp + 20
tmps = self.getTmp()
temp = tmps[0]
tmpAvg = tmps[1]
time.sleep(1)
while tmpAvg > loTmp:
tmps = self.getTmp()
temp = tmps[0]
tmpAvg = tmps[1]
diff = tmpAvg - loTmp
percent = int(float(diff) / float(delta) * 100)
if temp >= hiTmp:
self.setSpd(100)
else:
self.setSpd(percent)
time.sleep(1)
def setSpd(self, percent=0):
if percent > 100:
percent = 100
pwm = int(float(percent) * 255 / 100)
if pwm < 58 and pwm > 1:
pwm = 58
if pwm < 1: pwm = 1
with open(fSpd, "r") as f:
curPwm = int(f.read())
if not pwm == curPwm:
with open(fSpd, "w") as f:
f.write(str(pwm))
class GracefulKiller:
kill_now = False
def __init__(self):
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self,signum, frame):
self.kill_now = True
def main():
killer = GracefulKiller()
done = False
fan.setManual()
fan.setSpd(0)
while not done:
if killer.kill_now:
fan.setAuto()
break
if fan.getTmp()[0] > hiTmp:
fan.cool()
time.sleep(1)
if __name__ == "__main__":
fan = fan()
try:
main()
except Exception as error:
print('caught this error: ' + repr(error))
fan.setAuto()
| gpl-3.0 | 5,299,237,560,384,031,000 | 21.232143 | 70 | 0.626908 | false |
openstack/heat | heat/tests/openstack/heat/test_random_string.py | 1 | 12486 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from unittest import mock
from testtools import matchers
from heat.common import exception
from heat.common import template_format
from heat.engine import node_data
from heat.engine import stack as parser
from heat.engine import template
from heat.tests import common
from heat.tests import utils
class TestRandomString(common.HeatTestCase):
template_random_string = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret1:
Type: OS::Heat::RandomString
secret2:
Type: OS::Heat::RandomString
Properties:
length: 10
secret3:
Type: OS::Heat::RandomString
Properties:
length: 32
character_classes:
- class: digits
min: 1
- class: uppercase
min: 1
- class: lowercase
min: 20
character_sequences:
- sequence: (),[]{}
min: 1
- sequence: $_
min: 2
- sequence: '@'
min: 5
secret4:
Type: OS::Heat::RandomString
Properties:
length: 25
character_classes:
- class: digits
min: 1
- class: uppercase
min: 1
- class: lowercase
min: 20
secret5:
Type: OS::Heat::RandomString
Properties:
length: 10
character_sequences:
- sequence: (),[]{}
min: 1
- sequence: $_
min: 2
- sequence: '@'
min: 5
'''
def create_stack(self, templ):
self.stack = self.parse_stack(template_format.parse(templ))
self.assertIsNone(self.stack.create())
return self.stack
def parse_stack(self, t):
stack_name = 'test_stack'
tmpl = template.Template(t)
stack = parser.Stack(utils.dummy_context(), stack_name, tmpl)
stack.validate()
stack.store()
return stack
def assert_min(self, pattern, string, minimum):
self.assertGreaterEqual(len(re.findall(pattern, string)), minimum)
def test_random_string(self):
stack = self.create_stack(self.template_random_string)
secret1 = stack['secret1']
random_string = secret1.FnGetAtt('value')
self.assert_min('[a-zA-Z0-9]', random_string, 32)
self.assertRaises(exception.InvalidTemplateAttribute,
secret1.FnGetAtt, 'foo')
self.assertEqual(secret1.FnGetRefId(), random_string)
secret2 = stack['secret2']
random_string = secret2.FnGetAtt('value')
self.assert_min('[a-zA-Z0-9]', random_string, 10)
self.assertEqual(secret2.FnGetRefId(), random_string)
secret3 = stack['secret3']
random_string = secret3.FnGetAtt('value')
self.assertEqual(32, len(random_string))
self.assert_min('[0-9]', random_string, 1)
self.assert_min('[A-Z]', random_string, 1)
self.assert_min('[a-z]', random_string, 20)
self.assert_min(r'[(),\[\]{}]', random_string, 1)
self.assert_min('[$_]', random_string, 2)
self.assert_min('@', random_string, 5)
self.assertEqual(secret3.FnGetRefId(), random_string)
secret4 = stack['secret4']
random_string = secret4.FnGetAtt('value')
self.assertEqual(25, len(random_string))
self.assert_min('[0-9]', random_string, 1)
self.assert_min('[A-Z]', random_string, 1)
self.assert_min('[a-z]', random_string, 20)
self.assertEqual(secret4.FnGetRefId(), random_string)
secret5 = stack['secret5']
random_string = secret5.FnGetAtt('value')
self.assertEqual(10, len(random_string))
self.assert_min(r'[(),\[\]{}]', random_string, 1)
self.assert_min('[$_]', random_string, 2)
self.assert_min('@', random_string, 5)
self.assertEqual(secret5.FnGetRefId(), random_string)
# Prove the name is returned before create sets the ID
secret5.resource_id = None
self.assertEqual('secret5', secret5.FnGetRefId())
def test_hidden_sequence_property(self):
hidden_prop_templ = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 100
sequence: octdigits
'''
stack = self.create_stack(hidden_prop_templ)
secret = stack['secret']
random_string = secret.FnGetAtt('value')
self.assert_min('[0-7]', random_string, 100)
self.assertEqual(secret.FnGetRefId(), random_string)
# check, that property was translated according to the TranslationRule
self.assertIsNone(secret.properties['sequence'])
expected = [{'class': u'octdigits', 'min': 1}]
self.assertEqual(expected, secret.properties['character_classes'])
def test_random_string_refid_convergence_cache_data(self):
t = template_format.parse(self.template_random_string)
cache_data = {'secret1': node_data.NodeData.from_dict({
'uuid': mock.ANY,
'id': mock.ANY,
'action': 'CREATE',
'status': 'COMPLETE',
'reference_id': 'xyz'
})}
stack = utils.parse_stack(t, cache_data=cache_data)
rsrc = stack.defn['secret1']
self.assertEqual('xyz', rsrc.FnGetRefId())
def test_invalid_length(self):
template_random_string = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 5
character_classes:
- class: digits
min: 5
character_sequences:
- sequence: (),[]{}
min: 1
'''
exc = self.assertRaises(exception.StackValidationFailed,
self.create_stack, template_random_string)
self.assertEqual("Length property cannot be smaller than combined "
"character class and character sequence minimums",
str(exc))
def test_max_length(self):
template_random_string = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 512
'''
stack = self.create_stack(template_random_string)
secret = stack['secret']
random_string = secret.FnGetAtt('value')
self.assertEqual(512, len(random_string))
self.assertEqual(secret.FnGetRefId(), random_string)
def test_exceeds_max_length(self):
template_random_string = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 513
'''
exc = self.assertRaises(exception.StackValidationFailed,
self.create_stack, template_random_string)
self.assertIn('513 is out of range (min: 1, max: 512)',
str(exc))
class TestGenerateRandomString(common.HeatTestCase):
scenarios = [
('lettersdigits', dict(
length=1, seq='lettersdigits', pattern='[a-zA-Z0-9]')),
('letters', dict(
length=10, seq='letters', pattern='[a-zA-Z]')),
('lowercase', dict(
length=100, seq='lowercase', pattern='[a-z]')),
('uppercase', dict(
length=50, seq='uppercase', pattern='[A-Z]')),
('digits', dict(
length=512, seq='digits', pattern='[0-9]')),
('hexdigits', dict(
length=16, seq='hexdigits', pattern='[A-F0-9]')),
('octdigits', dict(
length=32, seq='octdigits', pattern='[0-7]'))
]
template_rs = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
'''
def parse_stack(self, t):
stack_name = 'test_stack'
tmpl = template.Template(t)
stack = parser.Stack(utils.dummy_context(), stack_name, tmpl)
stack.validate()
stack.store()
return stack
# test was saved to test backward compatibility with old behavior
def test_generate_random_string_backward_compatible(self):
stack = self.parse_stack(template_format.parse(self.template_rs))
secret = stack['secret']
char_classes = secret.properties['character_classes']
for char_cl in char_classes:
char_cl['class'] = self.seq
# run each test multiple times to confirm random generator
# doesn't generate a matching pattern by chance
for i in range(1, 32):
r = secret._generate_random_string([], char_classes, self.length)
self.assertThat(r, matchers.HasLength(self.length))
regex = '%s{%s}' % (self.pattern, self.length)
self.assertThat(r, matchers.MatchesRegex(regex))
class TestGenerateRandomStringDistribution(common.HeatTestCase):
def run_test(self, tmpl, iterations=5):
stack = utils.parse_stack(template_format.parse(tmpl))
secret = stack['secret']
secret.data_set = mock.Mock()
for i in range(iterations):
secret.handle_create()
return [call[1][1] for call in secret.data_set.mock_calls]
def char_counts(self, random_strings, char):
return [rs.count(char) for rs in random_strings]
def check_stats(self, char_counts, expected_mean, allowed_variance,
expected_minimum=0):
mean = float(sum(char_counts)) / len(char_counts)
self.assertLess(mean, expected_mean + allowed_variance)
self.assertGreater(mean, max(0, expected_mean - allowed_variance))
if expected_minimum:
self.assertGreaterEqual(min(char_counts), expected_minimum)
def test_class_uniformity(self):
template_rs = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 66
character_classes:
- class: lettersdigits
character_sequences:
- sequence: "*$"
'''
results = self.run_test(template_rs, 10)
for char in '$*':
self.check_stats(self.char_counts(results, char), 1.5, 2)
def test_repeated_sequence(self):
template_rs = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 40
character_classes: []
character_sequences:
- sequence: "**********$*****************************"
'''
results = self.run_test(template_rs)
for char in '$*':
self.check_stats(self.char_counts(results, char), 20, 6)
def test_overlapping_classes(self):
template_rs = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 624
character_classes:
- class: lettersdigits
- class: digits
- class: octdigits
- class: hexdigits
'''
results = self.run_test(template_rs, 20)
self.check_stats(self.char_counts(results, '0'), 10.3, 3)
def test_overlapping_sequences(self):
template_rs = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 60
character_classes: []
character_sequences:
- sequence: "01"
- sequence: "02"
- sequence: "03"
- sequence: "04"
- sequence: "05"
- sequence: "06"
- sequence: "07"
- sequence: "08"
- sequence: "09"
'''
results = self.run_test(template_rs)
self.check_stats(self.char_counts(results, '0'), 10, 5)
def test_overlapping_class_sequence(self):
template_rs = '''
HeatTemplateFormatVersion: '2012-12-12'
Resources:
secret:
Type: OS::Heat::RandomString
Properties:
length: 402
character_classes:
- class: octdigits
character_sequences:
- sequence: "0"
'''
results = self.run_test(template_rs, 10)
self.check_stats(self.char_counts(results, '0'), 51.125, 8, 1)
| apache-2.0 | -5,297,821,706,911,309,000 | 30.933504 | 78 | 0.603476 | false |
ninowalker/mapnik-trunk | tests/python_tests/save_map_test.py | 1 | 1782 | #!/usr/bin/env python
from nose.tools import *
from utilities import Todo
from utilities import execution_path
import tempfile
import os, sys, glob, mapnik2
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test():
# TODO: Write a better test
# 1. Construct map in memory
# 2. Save map as XML
# 3. Load map to a second object
# 4. Compare both map objects
map = mapnik2.Map(256, 256)
raise Todo("map comparison is currently broken dou to lacking relative paths support (#324,#340")
def compare_map(in_map):
mapnik2.load_map(map, in_map)
(handle, test_map) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map1-')
os.close(handle)
(handle, test_map2) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map2-')
os.close(handle)
if os.path.exists(test_map):
os.remove(test_map)
mapnik2.save_map(map, test_map)
new_map = mapnik2.Map(256, 256)
mapnik2.load_map(new_map, test_map)
open(test_map2,'w').write(mapnik2.save_map_to_string(new_map))
diff = ' diff %s %s' % (os.path.abspath(test_map),os.path.abspath(test_map2))
try:
eq_(open(test_map).read(),open(test_map2).read())
except AssertionError, e:
raise AssertionError('serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' % (in_map,diff))
if os.path.exists(test_map):
os.remove(test_map)
else:
# Fail, the map wasn't written
return False
for m in glob.glob("../data/good_maps/*.xml"):
compare_map(m)
| lgpl-2.1 | 4,557,533,603,966,494,000 | 30.263158 | 137 | 0.606061 | false |
peterloron/archive | archive.py | 1 | 5459 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Migrates files older than specified date from source to destination.
"""
import argparse
import os
import logging
import datetime
import time
import shutil
import random
from threading import Thread
from Queue import Queue
parser = argparse.ArgumentParser(description='Moves old files to a new location.')
parser.add_argument('-a', action="store", dest="age", default=90, type=int)
parser.add_argument('-s', action="store", dest="source_root")
parser.add_argument('-d', action="store", dest="dest_root")
parser.add_argument('-n', action="store_true", dest="no_op", default=False)
parser.add_argument('-t', action="store", dest="num_worker_threads", default=5, type=int)
parser.add_argument('--debug', action="store_true", dest="debug_mode", default=False)
shouldIKeepGoing = True
random.seed()
LOG_FILENAME = './archive.log'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
# Thread worker. Handles copying the file
def fileHandler(thread_id, args, q):
global shouldIKeepGoing
while shouldIKeepGoing:
(source_target, dest_target) = q.get()
if not args.no_op:
try:
shutil.move(source_target, dest_target)
except Exception, err:
logging.error("Failure while moving file -- %s" % err)
exit()
logging.info("[%d]Moved: %s to %s" % (thread_id, source_target, dest_target))
if args.debug_mode:
print("[%d]Moved: %s to %s" % (thread_id, source_target, dest_target))
q.task_done()
def main():
global shouldIKeepGoing
args = parser.parse_args()
count = 0
AGE_INTERVAL = datetime.timedelta(days=args.age)
NOW = datetime.datetime.now()
file_queue = Queue()
logging.info("***************************************************************")
logging.info("Starting archive run at %s" % time.strftime("%c"))
logging.info("Source: %s" % args.source_root)
logging.info("Dest: %s" % args.dest_root)
logging.info("Age cutoff: %d" % args.age)
# Go through the files in the directory and see if any need to be moved
try:
# fire up some worker threads
for i in range(args.num_worker_threads):
worker = Thread(target=fileHandler, args=(i, args, file_queue,))
worker.setDaemon(True)
worker.start()
for root, dirs, files in os.walk(str(args.source_root), topdown=False):
logging.info("Checking %s..." % root)
for thefile in files:
count = count + 1
source_target = os.path.join(root, thefile)
if os.path.islink(source_target):
break
stats = os.stat(source_target)
mod_date = datetime.datetime.fromtimestamp(stats.st_mtime)
acc_date = datetime.datetime.fromtimestamp(stats.st_mtime)
if args.debug_mode:
print("Source: %s" % source_target)
print("ATIME: %s" % acc_date.strftime("%c"))
print("MTIME: %s" % mod_date.strftime("%c"))
if (NOW - acc_date) > AGE_INTERVAL:
dest_target_path = os.path.join(args.dest_root, os.path.relpath(root, args.source_root))
dest_target = os.path.join(dest_target_path, thefile)
# create the directory if needed
if not os.path.exists(dest_target_path):
if not args.no_op:
os.makedirs(dest_target_path)
logging.info("Created dir: %s" % (dest_target_path))
if args.debug_mode:
print("Created dir: %s" % (dest_target_path))
# add to queue
file_queue.put((source_target, dest_target))
# wait for threads to be done processing the queue items
while not file_queue.empty():
time.sleep(0.1)
# Go through the directories and remove them if we can
for thedir in dirs:
target = os.path.join(root, thedir)
try:
if args.debug_mode:
print("Removing directory: %s" % target)
if not args.no_op:
os.rmdir(target)
logging.info("Removed directory: %s" % target)
except OSError, err:
if args.debug_mode:
print("RMDIR Failed: %s" % err)
continue
# finally, check the root source directory to see if it is now empty and can be removed.
try:
if args.debug_mode:
print("Removing directory: %s" % root)
if not args.no_op:
os.rmdir(root)
logging.info("Removed directory: %s" % root)
except OSError, err:
if args.debug_mode:
print("RMDIR Failed: %s" % err)
logging.info("Processed %d files in %d seconds." % (count, (datetime.datetime.now() - NOW).seconds))
logging.info("Done.")
except KeyboardInterrupt:
shouldIKeepGoing = False
raise
except Exception, err:
logging.error("Failure -- %s" % err)
exit()
# Start program
if __name__ == "__main__":
main()
| mit | -2,543,240,342,558,028,300 | 35.885135 | 108 | 0.549551 | false |
tanium/pytan | EXAMPLES/PYTAN_API/ask_manual_question_complex_query1.py | 1 | 5859 | #!/usr/bin/env python
"""
Ask a manual question using human strings by referencing the name of a two sensors sensor.
Supply 3 parameters for the second sensor, one of which is not a valid parameter (and will be ignored).
Supply one option to the second sensor.
Supply two question filters that limit the rows returned in the result to computers that match the sensor Operating System that contains Windows and does not contain Windows.
Supply two question options that 'or' the two question filters and ignore the case of any values while matching the question filters.
"""
# import the basic python packages we need
import os
import sys
import tempfile
import pprint
import traceback
# disable python from generating a .pyc file
sys.dont_write_bytecode = True
# change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API
pytan_loc = "~/gh/pytan"
pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
# Determine our script name, script dir
my_file = os.path.abspath(sys.argv[0])
my_dir = os.path.dirname(my_file)
# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
parent_dir = os.path.dirname(my_dir)
pytan_root_dir = os.path.dirname(parent_dir)
lib_dir = os.path.join(pytan_root_dir, 'lib')
# add pytan_loc and lib_dir to the PYTHONPATH variable
path_adds = [lib_dir, pytan_static_path]
[sys.path.append(aa) for aa in path_adds if aa not in sys.path]
# import pytan
import pytan
# create a dictionary of arguments for the pytan handler
handler_args = {}
# establish our connection info for the Tanium Server
handler_args['username'] = "Administrator"
handler_args['password'] = "Tanium2015!"
handler_args['host'] = "10.0.1.240"
handler_args['port'] = "443" # optional
handler_args['trusted_certs'] = "certs"
# optional, level 0 is no output except warnings/errors
# level 1 through 12 are more and more verbose
handler_args['loglevel'] = 1
# optional, use a debug format for the logging output (uses two lines per log entry)
handler_args['debugformat'] = False
# optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES
# very useful for capturing the full exchange of XML requests and responses
handler_args['record_all_requests'] = True
# instantiate a handler using all of the arguments in the handler_args dictionary
print "...CALLING: pytan.handler() with args: {}".format(handler_args)
handler = pytan.Handler(**handler_args)
# print out the handler string
print "...OUTPUT: handler string: {}".format(handler)
# setup the arguments for the handler() class
kwargs = {}
kwargs["question_filters"] = [u'Operating System, that contains:Windows',
u'Operating System, that does not contain:Windows']
kwargs["sensors"] = [u'Computer Name',
u'Folder Contents{folderPath=C:\\Program Files, invalidparam=test}, that regex match:.*Shared.*, opt:max_data_age:3600']
kwargs["question_options"] = [u'ignore_case', u'or']
kwargs["qtype"] = u'manual'
print "...CALLING: handler.ask with args: {}".format(kwargs)
response = handler.ask(**kwargs)
print "...OUTPUT: Type of response: ", type(response)
print "...OUTPUT: Pretty print of response:"
print pprint.pformat(response)
print "...OUTPUT: Equivalent Question if it were to be asked in the Tanium Console: "
print response['question_object'].query_text
if response['question_results']:
# call the export_obj() method to convert response to CSV and store it in out
export_kwargs = {}
export_kwargs['obj'] = response['question_results']
export_kwargs['export_format'] = 'csv'
print "...CALLING: handler.export_obj() with args {}".format(export_kwargs)
out = handler.export_obj(**export_kwargs)
# trim the output if it is more than 15 lines long
if len(out.splitlines()) > 15:
out = out.splitlines()[0:15]
out.append('..trimmed for brevity..')
out = '\n'.join(out)
print "...OUTPUT: CSV Results of response: "
print out
'''STDOUT from running this:
...CALLING: pytan.handler() with args: {'username': 'Administrator', 'record_all_requests': True, 'loglevel': 1, 'debugformat': False, 'host': '10.0.1.240', 'password': 'Tanium2015!', 'port': '443'}
...OUTPUT: handler string: PyTan v2.1.4 Handler for Session to 10.0.1.240:443, Authenticated: True, Platform Version: 6.5.314.4301
...CALLING: handler.ask with args: {'question_filters': [u'Operating System, that contains:Windows', u'Operating System, that does not contain:Windows'], 'sensors': [u'Computer Name', u'Folder Contents{folderPath=C:\\Program Files, invalidparam=test}, that regex match:.*Shared.*, opt:max_data_age:3600'], 'question_options': [u'ignore_case', u'or'], 'qtype': u'manual'}
2015-09-14 20:14:17,578 INFO pytan.pollers.QuestionPoller: ID 809: Reached Threshold of 99% (3 of 3)
...OUTPUT: Type of response: <type 'dict'>
...OUTPUT: Pretty print of response:
{'poller_object': <pytan.pollers.QuestionPoller object at 0x11b347e90>,
'poller_success': True,
'question_object': <taniumpy.object_types.question.Question object at 0x11b34d590>,
'question_results': <taniumpy.object_types.result_set.ResultSet object at 0x11b34ddd0>}
...OUTPUT: Equivalent Question if it were to be asked in the Tanium Console:
Get Computer Name and Folder Contents[C:\Program Files, test] containing "Shared" from all machines with ( Operating System containing "Windows" or any Operating System not containing "Windows" )
...CALLING: handler.export_obj() with args {'export_format': 'csv', 'obj': <taniumpy.object_types.result_set.ResultSet object at 0x11b34ddd0>}
...OUTPUT: CSV Results of response:
Computer Name,"Folder Contents[C:\Program Files, test]"
c1u14-virtual-machine.(none),[current result unavailable]
WIN-6U71ED4M23D,[current result unavailable]
TPT1.pytanlab.com,[current result unavailable]
'''
'''STDERR from running this:
'''
| mit | 7,895,586,940,574,294,000 | 42.69403 | 370 | 0.731866 | false |
Ricyteach/parmatter | src/parmatter/group/meta.py | 1 | 7243 | from ..utilities import args_kwargs_from_args
from collections import OrderedDict as od, namedtuple as nt
import parse
class SpecialAttrsMeta(type):
'''A base metaclass that removes special attribute names from the namespace
prior to passing them for initialization.
Special attributes are designated by the attribute "_special".
Any _special attributes not defined at runtime are ignored.'''
def __new__(mcls, name, bases, mapping):
cls = super().__new__(mcls,name,bases,mapping)
sentinel = object()
reserved_mapping = {n:mapping.pop(n, sentinel) for n in mcls._special}
for k,v in ((k,v) for k,v in reserved_mapping.items() if v is not sentinel):
setattr(cls, k, v)
return cls
@classmethod
def special_check(meta, **kwargs):
'''Check to make sure there are no conflicts with special attribute names.'''
try:
special = meta._special
# check for reserved names
for n in special:
try:
raise ValueError('The attribute name "{}" is reserved.'.format(kwargs[n]))
except KeyError:
continue
# no special names
except AttributeError:
pass
class FormatGroupMeta(SpecialAttrsMeta):
'''A metaclass that produces classes defining lines composed of
formatting members with optional line prefixes and separators between members.
Formatter type must provide a static args_parse() method with a signature of:
args, kwargs = FormatterType.args_parse(*args)
f = FormatterType(*args, **kwargs)
Usage:
class LineDef(metaclass=FormatGroupMeta):
_formatter_type = CustomStaticFormatter
_prefix = 'my prefix'
_sep = ', '
a = '{: 5>s}', 'foo'
b = '{: 10>f}', 0
c = '{}'
'''
_special = '_prefix _sep _formatter_type _formatters'.split()
def __init__(cls, name, bases, mapping):
formatter_type = cls._formatter_type
formatter_defs = {k:v for k,v in mapping.items() if not k.startswith('_') and not callable(v)}
formatter_args = {}
formatter_kwargs = {}
# build the formatter args, kwargs using formatter_type.args_parse
for k,args in formatter_defs.items():
args = [args] if isinstance(args, str) else args
formatter_args[k], formatter_kwargs[k] = formatter_type.args_parse(*args)
formatters = (formatter_type(*formatter_args[k], **formatter_kwargs[k]) for k in formatter_defs)
# pass each set of args and kwargs to the formatter type
cls._formatters = {k:formatter for k,formatter in zip(formatter_defs,formatters)}
# attempt to grab extra types dict from an existing compiler (assume all of them are identical)
try:
cls._extra_types = next(iter(cls._formatters.values()))._parser._extra_types
# no existing compiler
except (AttributeError, StopIteration):
pass
cls.__init__(name,bases,mapping)
def format(cls, *args, _asdict=True, _popmappings=True, **unified_namespace):
'''Return a combined formatted string using joined formatter members.
Mapping objects can represent individual member argslists/namespaces and the values
will be appended to the args of the member name matching the key.
Additional keyword arguments are passed to all formatteras as a "universal namespace".
_popmappings:
If True any Mapping object at the end of the args list is a member namespace. It will
be spun out as the args via the name of that member or method as a key.
_asdict:
If True any object in args list that includes an .asdict or ._asdict attribute will
be treated as a Mapping object via the name of that member or method as a key.'''
# optionally remove any mappings from the args list
if _popmappings:
# the slice of args in which to look for mappings (end to beginning)
slc=slice(-1,None,-1)
# spin out any Mapping (or optionally .asdict/._asdict) objects starting from the end of args
args, kwargs_from_args = args_kwargs_from_args(args, slc=slc, asdict=_asdict, ignore_conflicts=True, terminate_on_failure=True)
else:
args, kwargs_from_args = args, {}
# argslist to be passed to each formatter member on a per-member basis
try:
# use unpacking to disallow multiple argslists to same member name
format_args = od(**kwargs_from_args, **od((k,a) for k,a in zip(cls._formatters, args)))
except TypeError as exc:
if 'multiple values for keyword argument' in str(exc):
key_conflict = next(k for k,_ in zip(cls._formatters, args) if k in kwargs_from_args)
raise TypeError('Multiple argument sets provided under member name: {}.'.format(key_conflict)) from None
else:
raise
# convert any single namespace arguments to an args list
format_args = od((k,(a if not isinstance(a,str) and hasattr(a, '__iter__') else [a])) for k,a in format_args.items())
return cls._prefix + cls._sep.join(formatter.format(*format_args.get(member,[]), **unified_namespace) for member,formatter in cls._formatters.items())
def unformat(cls, string, evaluate_result=True):
'''Inverse of format. Match my format group to the string exactly.
Return a parse.Result or parse.Match instance or None if there's no match.
'''
fmat_str = (cls._sep if cls._sep else ' ').join(member._format_str for member in cls)
# try to get extra type from precompiled parser set at initialization
try:
extra_types = cls._extra_types
# parser wasn't precompiled so just assume the default
except AttributeError:
extra_types = dict(s=str)
print('fmat_str:\n', fmat_str, 'string:\n', string[len(cls._prefix):], sep='\n')
result = parse.parse(fmat_str, string[len(cls._prefix):], extra_types, evaluate_result=evaluate_result)
# replace default output tuple with namedtuple
if result is not None and result.fixed:
result.fixed=list(result.fixed)
def is_positional_field(member_parse):
return member_parse[1:3]!=(None,None) and (member_parse[1] == '' or parse.parse('{:d}',member_parse[1]) is not None or parse.parse('{:d}{}',member_parse[1]) is not None)
fixed_counts=[len([member_parse for member_parse in member.parse(member._format_str) if is_positional_field(member_parse)]) for member in cls]
results=[]
for count in fixed_counts:
r=[]
for _ in range(count):
r.append(result.fixed.pop(0))
results.append(r)
NT=nt(cls.__name__+'Data', ' '.join(cls._formatters))
result.fixed=NT(*(r if len(r)>1 else r[0] for r in results))
return result
def __iter__(cls):
yield from cls._formatters.values() | bsd-2-clause | 441,881,972,316,338,240 | 51.875912 | 185 | 0.619219 | false |
FireBladeNooT/Medusa_1_6 | medusa/providers/torrent/html/scc.py | 1 | 7435 | # coding=utf-8
# Author: Idan Gutman
# Modified by jkaberg, https://github.com/jkaberg for SceneAccess
#
# This file is part of Medusa.
#
# Medusa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Medusa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Medusa. If not, see <http://www.gnu.org/licenses/>.
"""Provider code for SCC."""
from __future__ import unicode_literals
import re
import traceback
from requests.compat import urljoin
from requests.utils import dict_from_cookiejar
from ..torrent_provider import TorrentProvider
from .... import logger, tv_cache
from ....bs4_parser import BS4Parser
from ....helper.common import convert_size, try_int
class SCCProvider(TorrentProvider):
"""SceneAccess Torrent provider."""
def __init__(self):
"""Initialize the class."""
super(self.__class__, self).__init__('SceneAccess')
# Credentials
self.username = None
self.password = None
# URLs
self.url = 'https://sceneaccess.eu'
self.urls = {
'login': urljoin(self.url, 'login'),
'search': urljoin(self.url, 'all?search={string}&method=1&{cats}'),
}
# Proper Strings
self.proper_strings = ['PROPER', 'REPACK', 'REAL']
# Miscellaneous Options
self.categories = {
# Archive, non-scene HD, non-scene SD;
# need to include non-scene because WEB-DL packs get added to those categories
'Season': 'c26=26&c44=44&c45=45',
# TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264
'Episode': 'c11=11&c17=17&c27=27&c33=33&c34=34&c44=44&c45=45',
# Season + Episode
'RSS': 'c11=11&c17=17&c26=26&c27=27&c33=33&c34=34&c44=44&c45=45',
}
# Torrent Stats
self.minseed = None
self.minleech = None
# Cache
self.cache = tv_cache.TVCache(self, min_time=20)
def search(self, search_strings, age=0, ep_obj=None):
"""
Search a provider and parse the results.
:param search_strings: A dict with mode (key) and the search value (value)
:param age: Not used
:param ep_obj: Not used
:returns: A list of search results (structure)
"""
results = []
if not self.login():
return results
for mode in search_strings:
logger.log('Search mode: {0}'.format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {search}'.format
(search=search_string), logger.DEBUG)
search_url = self.urls['search'].format(string=self._strip_year(search_string),
cats=self.categories[mode])
response = self.get_url(search_url, returns='response')
if not response or not response.text:
logger.log('No data returned from provider', logger.DEBUG)
continue
results += self.parse(response.text, mode)
return results
def parse(self, data, mode):
"""
Parse search results for items.
:param data: The raw response from a search
:param mode: The current mode used to search, e.g. RSS
:return: A list of items found
"""
items = []
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', attrs={'id': 'torrents-table'})
torrent_rows = torrent_table('tr') if torrent_table else []
# Continue only if at least one release is found
if len(torrent_rows) < 2:
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
return items
for row in torrent_rows[1:]:
try:
title = row.find('td', class_='ttr_name').find('a').get('title')
torrent_url = row.find('td', class_='td_dl').find('a').get('href')
download_url = urljoin(self.url, torrent_url)
if not all([title, torrent_url]):
continue
seeders = try_int(row.find('td', class_='ttr_seeders').get_text(), 1)
leechers = try_int(row.find('td', class_='ttr_leechers').get_text())
# Filter unseeded torrent
if seeders < min(self.minseed, 1):
if mode != 'RSS':
logger.log("Discarding torrent because it doesn't meet the "
"minimum seeders: {0}. Seeders: {1}".format
(title, seeders), logger.DEBUG)
continue
torrent_size = row.find('td', class_='ttr_size').contents[0]
size = convert_size(torrent_size) or -1
item = {
'title': title,
'link': download_url,
'size': size,
'seeders': seeders,
'leechers': leechers,
'pubdate': None,
}
if mode != 'RSS':
logger.log('Found result: {0} with {1} seeders and {2} leechers'.format
(title, seeders, leechers), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError, KeyError, ValueError, IndexError):
logger.log('Failed parsing provider. Traceback: {0!r}'.format
(traceback.format_exc()), logger.ERROR)
return items
def login(self):
"""Login method used for logging in before doing search and torrent downloads."""
if any(dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {
'username': self.username,
'password': self.password,
'submit': 'come on in',
}
response = self.get_url(self.urls['login'], post_data=login_params, returns='response')
if not response or not response.text:
logger.log('Unable to connect to provider', logger.WARNING)
return False
if any([re.search(r'Username or password incorrect', response.text),
re.search(r'<title>SceneAccess \| Login</title>', response.text), ]):
logger.log('Invalid username or password. Check your settings', logger.WARNING)
return False
return True
@staticmethod
def _strip_year(search_string):
"""Remove brackets from search string year."""
if not search_string:
return search_string
return re.sub(r'\((?P<year>\d{4})\)', '\g<year>', search_string)
provider = SCCProvider()
| gpl-3.0 | -8,627,801,519,026,012,000 | 36.550505 | 101 | 0.551715 | false |
strus38/WPaaS | wpars/glance.py | 1 | 5431 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Code inspired from Docker and modified to fit our needs
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import flask
import glanceclient
from keystoneclient.v2_0 import client as keystoneclient
class GlanceStorage(object):
"""
This class stores the image layers into OpenStack Glance.
"""
disk_format = 'raw'
container_format = 'wparrip'
def __init__(self, config):
self._config = config
def _get_auth_token(self):
args = {}
for arg in ['username', 'password', 'tenant_name', 'auth_url']:
env_name = 'OS_{0}'.format(arg.upper())
if env_name not in os.environ:
raise ValueError('Cannot find env var "{0}"'.format(env_name))
args[arg] = os.environ[env_name]
keystone = keystoneclient.Client(**args)
return keystone.auth_token
def _get_endpoint(self):
if 'OS_GLANCE_URL' not in os.environ:
raise ValueError('Cannot find env var "OS_GLANCE_URL"')
return os.environ['OS_GLANCE_URL']
def _create_glance_client(self):
token = flask.request.headers.get('X-Meta-Auth-Token')
endpoint = flask.request.headers.get('X-Meta-Glance-Endpoint')
if not token:
token = self._get_auth_token()
if not endpoint:
endpoint = self._get_endpoint()
return glanceclient.Client('1', endpoint=endpoint, token=token)
def _read_image_info_file(image_name):
try:
f = open(image_local+'/'+image_name, "r")
except IOError:
return None
else:
with f:
obj = json.loads(f.read())
return obj
def _init_path(self, path, create=True):
"""This resolve a standard Wparrip <image>.info file
and returns: glance_image obj, property_name
!The image_id should be in sync with what Glance has!
If property name is None, we want to reach the image_data
"""
localpath, filename = os.path.split(path)
obj_res = _read_image_info_file(path)
if not 'id' in obj_res:
raise ValueError('Invalid image info file: {0}'.format(path))
image_id = obj_res['id']
glance = self._create_glance_client()
image = self._find_image_by_id(glance, image_id)
if not image and create is True:
if 'X-Meta-Glance-Image-Id' in flask.request.headers:
try:
i = glance.images.get(
flask.request.headers['X-Meta-Glance-Image-Id'])
if i.status == 'queued':
# We allow taking existing images only when queued
image = i
image.update(properties={'id': image_id},
purge_props=False)
except Exception:
pass
if not image:
image = glance.images.create(
disk_format=self.disk_format,
container_format=self.container_format,
properties={'id': image_id})
try:
image.update(is_public=True, purge_props=False)
except Exception:
pass
propname = 'meta_{0}'.format(filename)
if filename == 'layer':
propname = None
return image, propname
def _find_image_by_id(self, glance, image_id):
filters = {
'disk_format': self.disk_format,
'container_format': self.container_format,
'properties': {'id': image_id}
}
images = [i for i in glance.images.list(filters=filters)]
if images:
return images[0]
def _clear_images_name(self, glance, image_name):
images = glance.images.list(filters={'name': image_name})
for image in images:
image.update(name=None, purge_props=False)
def get_content(self, path):
(image, propname) = self._init_path(path, False)
if not propname:
raise ValueError('Wrong call (should be stream_read)')
if not image or propname not in image.properties:
raise IOError('No such image {0}'.format(path))
return image.properties[propname]
def put_content(self, path, content):
(image, propname) = self._init_path(path)
if not propname:
raise ValueError('Wrong call (should be stream_write)')
props = {propname: content}
image.update(properties=props, purge_props=False)
def stream_read(self, path):
(image, propname) = self._init_path(path, False)
if propname:
raise ValueError('Wrong call (should be get_content)')
if not image:
raise IOError('No such image {0}'.format(path))
return image.data(do_checksum=False)
def stream_write(self, path, fp):
(image, propname) = self._init_path(path)
if propname:
raise ValueError('Wrong call (should be put_content)')
image.update(data=fp, purge_props=False)
def exists(self, path):
(image, propname) = self._init_path(path, False)
if not image:
return False
if not propname:
return True
return (propname in image.properties)
def remove(self, path):
(image, propname) = self._init_path(path, False)
if not image:
return
if propname:
# Delete only the image property
props = image.properties
if propname in props:
del props[propname]
image.update(properties=props)
return
image.delete()
def get_size(self, path):
(image, propname) = self._init_path(path, False)
if not image:
raise OSError('No such image: \'{0}\''.format(path))
return image.size
| apache-2.0 | -8,204,466,227,459,851,000 | 29.511236 | 75 | 0.690112 | false |
csala/zato | code/zato-common/src/zato/common/test/__init__.py | 1 | 11503 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2012 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from datetime import datetime
from random import choice, randint
from unittest import TestCase
from uuid import uuid4
# anyjson
from anyjson import loads
# base32_crockford
from base32_crockford import decode
# Bunch
from bunch import Bunch
# mock
from mock import MagicMock, Mock
# nose
from nose.tools import eq_
# six
from six import string_types
# SQLAlchemy
from sqlalchemy import create_engine
# Zato
from zato.common import CHANNEL, DATA_FORMAT, SIMPLE_IO
from zato.common.log_message import CID_LENGTH
from zato.common.odb import model
from zato.common.util import new_cid
def rand_bool():
return choice((True, False))
def rand_csv(count=3):
return ','.join(str(elem) for elem in rand_int(count=count))
def rand_dict():
out = {}
funcs = [rand_bool, rand_int, rand_string]
for x in range(rand_int(30)):
out[choice(funcs)()] = choice(funcs)()
return out
def rand_list():
out = []
funcs = [rand_bool, rand_int, rand_string]
for x in range(rand_int(30)):
out.append(choice(funcs)())
return out
def rand_list_of_dicts():
out = []
for x in range(rand_int(30)):
out.append(rand_dict())
return out
def rand_opaque():
return rand_object()
rand_nested = rand_opaque
def rand_datetime():
return datetime.utcnow().isoformat() # Random in the sense of not repeating
def rand_int(start=1, stop=100, count=1):
if count == 1:
return randint(start, stop)
else:
return [randint(start, stop) for x in range(count)]
def rand_float(start=1.0, stop=100.0):
return float(rand_int(start, stop))
def rand_string(count=1):
if count == 1:
return 'a' + uuid4().hex
else:
return ['a' + uuid4().hex for x in range(count)]
def rand_unicode():
return u'ϠϡϢϣϤϥϦϧϨϩϪϫϬϭ'
def rand_object():
return object()
def rand_date_utc(as_string=False):
value = datetime.utcnow() # Now is as random as any other date
if as_string:
return value.isoformat()
return value
def is_like_cid(cid):
""" Raises ValueError if the cid given on input does not look like a genuine CID
produced by zato.common.util.new_cid
"""
if not isinstance(cid, string_types):
raise ValueError('CID `{}` should be string like instead of `{}`'.format(cid, type(cid)))
len_given = len(cid)
len_expected = CID_LENGTH + 1 # CID_LENGTH doesn't count 'K' in
if len_given != len_expected:
raise ValueError('CID `{}` should have length `{}` instead of `{}`'.format(cid, len_expected, len_given))
if not cid.startswith('K'):
raise ValueError('CID `{}` should start with `K`'.format(cid))
value = decode(cid[1:])
if(value >> 128) != 0:
raise ValueError('There aren\'t 128 bits in CID `{}`'.format(value))
return True
class Expected(object):
""" A container for the data a test expects the service to return.
"""
def __init__(self):
self.data = []
def add(self, item):
self.data.append(item)
def get_data(self):
if not self.data or len(self.data) > 1:
return self.data
else:
return self.data[0]
class FakeBrokerClient(object):
def __init__(self):
self.publish_args = []
self.publish_kwargs = []
self.invoke_async_args = []
self.invoke_async_kwargs = []
def publish(self, *args, **kwargs):
raise NotImplementedError()
def invoke_async(self, *args, **kwargs):
self.invoke_async_args.append(args)
self.invoke_async_kwargs.append(kwargs)
class FakeKVDB(object):
class FakeConn(object):
def __init__(self):
self.setnx_args = None
self.setnx_return_value = True
self.expire_args = None
self.delete_args = None
def return_none(self, *ignored_args, **ignored_kwargs):
return None
get = hget = return_none
def setnx(self, *args):
self.setnx_args = args
return self.setnx_return_value
def expire(self, *args):
self.expire_args = args
def delete(self, args):
self.delete_args = args
def __init__(self):
self.conn = self.FakeConn()
def translate(self, *ignored_args, **ignored_kwargs):
raise NotImplementedError()
class FakeServices(object):
def __getitem__(self, ignored):
return {'slow_threshold': 1234}
class FakeServiceStore(object):
def __init__(self, name_to_impl_name=None, impl_name_to_service=None):
self.services = FakeServices()
self.name_to_impl_name = name_to_impl_name or {}
self.impl_name_to_service = impl_name_to_service or {}
def new_instance(self, impl_name):
return self.impl_name_to_service[impl_name]()
class FakeServer(object):
""" A fake mock server used in test cases.
"""
def __init__(self, service_store_name_to_impl_name=None, service_store_impl_name_to_service=None):
self.kvdb = FakeKVDB()
self.service_store = FakeServiceStore(service_store_name_to_impl_name, service_store_impl_name_to_service)
self.fs_server_config = Bunch()
self.fs_server_config.misc = Bunch()
self.fs_server_config.misc.zeromq_connect_sleep = 0.1
self.fs_server_config.misc.internal_services_may_be_deleted = False
self.repo_location = rand_string()
self.delivery_store = None
self.user_config = Bunch()
class ForceTypeWrapper(object):
""" Makes comparison between two ForceType elements use their names.
"""
def __init__(self, value):
self.value = value
def __cmp__(self, other):
# Compare to either other's name or to other directly. In the latter case it means it's a plain string name
# of a SIO attribute.
return cmp(self.value.name, getattr(other, 'name', other))
class ServiceTestCase(TestCase):
def invoke(self, class_, request_data, expected, mock_data={}, channel=CHANNEL.HTTP_SOAP, job_type=None,
data_format=DATA_FORMAT.JSON, service_store_name_to_impl_name=None, service_store_impl_name_to_service=None):
""" Sets up a service's invocation environment, then invokes and returns
an instance of the service.
"""
instance = class_()
worker_store = MagicMock()
worker_store.worker_config = MagicMock
worker_store.worker_config.outgoing_connections = MagicMock(return_value=(None, None, None, None))
worker_store.worker_config.cloud_openstack_swift = MagicMock(return_value=None)
worker_store.worker_config.cloud_aws_s3 = MagicMock(return_value=None)
worker_store.invoke_matcher.is_allowed = MagicMock(return_value=True)
simple_io_config = {
'int_parameters': SIMPLE_IO.INT_PARAMETERS.VALUES,
'int_parameter_suffixes': SIMPLE_IO.INT_PARAMETERS.SUFFIXES,
'bool_parameter_prefixes': SIMPLE_IO.BOOL_PARAMETERS.SUFFIXES,
}
class_.update(
instance, channel, FakeServer(service_store_name_to_impl_name, service_store_impl_name_to_service),
None, worker_store, new_cid(), request_data, request_data, simple_io_config=simple_io_config,
data_format=data_format, job_type=job_type)
def get_data(self, *ignored_args, **ignored_kwargs):
return expected.get_data()
instance.get_data = get_data
for attr_name, mock_path_data_list in mock_data.iteritems():
setattr(instance, attr_name, Mock())
attr = getattr(instance, attr_name)
for mock_path_data in mock_path_data_list:
for path, value in mock_path_data.iteritems():
splitted = path.split('.')
new_path = '.return_value.'.join(elem for elem in splitted) + '.return_value'
attr.configure_mock(**{new_path:value})
broker_client_publish = getattr(self, 'broker_client_publish', None)
if broker_client_publish:
instance.broker_client = FakeBrokerClient()
instance.broker_client.publish = broker_client_publish
instance.call_hooks('before')
instance.handle()
instance.call_hooks('after')
return instance
def _check_sio_request_input(self, instance, request_data):
for k, v in request_data.iteritems():
self.assertEquals(getattr(instance.request.input, k), v)
sio_keys = set(getattr(instance.SimpleIO, 'input_required', []))
sio_keys.update(set(getattr(instance.SimpleIO, 'input_optional', [])))
given_keys = set(request_data.keys())
diff = sio_keys ^ given_keys
self.assertFalse(diff, 'There should be no difference between sio_keys {} and given_keys {}, diff {}'.format(
sio_keys, given_keys, diff))
def check_impl(self, service_class, request_data, response_data, response_elem, mock_data={}):
expected_data = sorted(response_data.items())
instance = self.invoke(service_class, request_data, None, mock_data)
self._check_sio_request_input(instance, request_data)
if response_data:
if not isinstance(instance.response.payload, basestring):
response = loads(instance.response.payload.getvalue())[response_elem] # Raises KeyError if 'response_elem' doesn't match
else:
response = loads(instance.response.payload)[response_elem]
self.assertEqual(sorted(response.items()), expected_data)
def check_impl_list(self, service_class, item_class, request_data, # noqa
response_data, request_elem, response_elem, mock_data={}): # noqa
expected_keys = response_data.keys()
expected_data = tuple(response_data for x in range(rand_int(10)))
expected = Expected()
for datum in expected_data:
item = item_class()
for key in expected_keys:
value = getattr(datum, key)
setattr(item, key, value)
expected.add(item)
instance = self.invoke(service_class, request_data, expected, mock_data)
response = loads(instance.response.payload.getvalue())[response_elem]
for idx, item in enumerate(response):
expected = expected_data[idx]
given = Bunch(item)
for key in expected_keys:
given_value = getattr(given, key)
expected_value = getattr(expected, key)
eq_(given_value, expected_value)
self._check_sio_request_input(instance, request_data)
def wrap_force_type(self, elem):
return ForceTypeWrapper(elem)
class ODBTestCase(TestCase):
def setUp(self):
self.engine = create_engine('sqlite:///:memory:')
model.Base.metadata.create_all(self.engine)
def tearDown(self):
model.Base.metadata.drop_all(self.engine)
| gpl-3.0 | -7,934,916,597,127,886,000 | 32.205202 | 136 | 0.61511 | false |
rarmknecht/nlpfun | basic_info.py | 1 | 6347 | #!/usr/bin/python2
# Randy Armknecht
# 19 Feb 2014
#
# Playing around with the Natural Language Processing Toolkit (nltk)
# http://www.nltk.org/
#
from __future__ import division
import sys
import nltk
from nltk.corpus import cmudict
from nltk.corpus import stopwords
from pprint import pprint
from hyphen import Hyphenator as hy
DICT = cmudict.dict()
SYLLABLE_AVG = 1.66
# START - Implemented from http://www.slideshare.net/pbpimpale/natural-language-toolkit-nltk-basics
def unusual_words(text):
text_vocab = set(w.lower() for w in text if w.isalpha())
english_vocab = set(w.lower() for w in nltk.corpus.words.words())
unusual = text_vocab.difference(english_vocab)
return sorted(unusual)
def problem_words(text):
return sorted(set(w.lower() for w in text if not w.isalpha()))
def content_fraction(text):
stopwords = nltk.corpus.stopwords.words('english')
content = [w for w in text if w.lower() not in stopwords]
return len(content) / len(text)
def plot_word_freq(text):
text_vocab = [w.lower() for w in text if w.isalpha()]
fdist = nltk.FreqDist(text_vocab)
fdist.plot()
def long_words(text,length=10):
text_vocab = [w.lower() for w in text if w.isalpha()]
return set([w for w in text_vocab if len(w) > length])
def topic_words(text,length=7,freq=7):
text_vocab = [w.lower() for w in text if w.isalpha()]
fdist = nltk.FreqDist(text_vocab)
return sorted([w for w in set(text_vocab) if len(w) > length and fdist[w] > freq])
def vocab_size(text):
return len(set(text))
def vocab_richness(text):
return len(text) / vocab_size(text)
def word_context(text,word):
return text.concordance(word)
# END - Implemented from http://www.slideshare.net/pbpimpale/natural-language-toolkit-nltk-basics
def get_raw(fname):
data = ""
with open(fname) as f:
data = f.read()
return data
def massage_raw(raw):
modified = ''.join([character for character in raw if ord(character) < 128])
sentences = nltk.sent_tokenize(modified)
words = nltk.word_tokenize(modified)
tokens = []
stops = [unicode(word) for word in stopwords.words('english')] + [',', '.', '?', '!', ':', ';', '-', ')', '(']
for w in words:
if w not in stops:
tokens.append(w)
return (nltk.Text(tokens), sentences)
def nsyl(word):
return len([i for i in DICT[word.lower()][0] if i[-1].isdigit()])
# return [len(list(y for y in x if y[-1].isdigit())) for x in DICT[word.lower()]][0]
# http://stackoverflow.com/a/5615724 translated to python
def count_syllables(word):
# Special Cases
if word in ['ll', 'noye', 'shae']:
return 1
# Back to Our Regular Scheduled Programming
vowels = ['a','e','i','o','u','y']
curword = word
syls = 0
lastWasVowel = False
for wc in curword:
foundVowel = False
for v in vowels:
# Don't Count Diphthongs
if v == wc and lastWasVowel:
foundVowel = True
lastWasVowel = True
break;
elif v == wc and not lastWasVowel:
syls += 1
foundVowel = True
lastWasVowel = True
break;
# If Fully cycle and no vowel found, set lastWasVowel to False
if not foundVowel:
lastWasVowel = False
# Remove es, it's usually silent
if len(curword) > 2 and curword[-2:] == "es":
syls -= 1
elif len(curword) > 1 and curword[-1] == "e":
syls -= 1
return syls
# Modified form of https://gist.github.com/drinks/2483508
def flesch_kincaid(text,sentences):
syllables = []
misses = []
words = [word for word in text if (len(word) > 1) or (word.lower() in ['a', 'i'])]
for word in words:
try:
ns = nsyl(word)
syllables.append(ns)
except KeyError:
n = count_syllables(word.lower())
if n == 0:
misses.append(word.lower())
else:
syllables.append(n)
word_count = len(words) - len(misses)
sentence_count = len(sentences)
syllable_count = sum(syllables)
#m_dist = nltk.FreqDist(misses)
#for t in m_dist.keys():
# print m_dist[t], t, count_syllables(t)
#for m in set(misses):
# print "%s %d" % (m, m_dist[m])
words_sents = word_count / sentence_count
syl_words = syllable_count / word_count
if word_count > 0 and sentence_count > 0:
results = {
'words': word_count,
'syllables': syllable_count,
'missed_count': len(misses),
'missed_pct': len(misses) / (word_count + len(misses)),
'sentences': sentence_count,
'grade_level': (0.39 * words_sents) + (11.8 * syl_words) - 15.59,
'reading_ease': 206.835 - (1.015 * words_sents) - (84.6 * syl_words),
}
return results
# From: http://engineroom.trackmaven.com/blog/monthly-challenge-natural-language-processing/
def top10_bigrams(words):
bigram_measure = nltk.collocations.BigramAssocMeasures()
bigram_finder = nltk.collocations.BigramCollocationFinder.from_words(words)
# Filter to top 20 results; otherwise processing is long
bigram_finder.apply_freq_filter(20)
for bigram in bigram_finder.score_ngrams(bigram_measure.raw_freq)[:10]:
print(bigram)
# Modified the above to print trigrams, and look at words with a frequency of at least 10
def top10_trigrams(words):
trigram_measure = nltk.collocations.TrigramAssocMeasures()
trigram_finder = nltk.collocations.TrigramCollocationFinder.from_words(words)
# Filter at least 10 instances of each word, and measure based on pmi metric
# http://www.nltk.org/api/nltk.metrics.html#nltk.metrics.association.NgramAssocMeasures.pmi
trigram_finder.apply_freq_filter(10)
for trigram in trigram_finder.score_ngrams(trigram_measure.pmi)[:10]:
print(trigram)
if __name__ == "__main__":
if len(sys.argv) is not 2:
print("Usage: %s <text_file>" % (sys.argv[0]))
sys.exit(0)
(text,sentences) = massage_raw(get_raw(sys.argv[1]))
pprint(flesch_kincaid(text,sentences))
print("\nBigrams\n====================")
top10_bigrams(text)
print("\nTrigrams\n====================")
top10_trigrams(text)
| mit | -6,004,009,814,675,770,000 | 31.382653 | 114 | 0.618245 | false |
by46/recipe | templates/python.lib/{{cookiecutter.project_safe_name}}/setup.py | 1 | 1964 | from __future__ import print_function
import io
import os.path
import re
from distutils.text_file import TextFile
from setuptools import find_packages, setup
home = os.path.abspath(os.path.dirname(__file__))
missing = object()
def read_description(*files, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = [io.open(name, encoding=encoding).read() for name in files]
return sep.join(buf)
def read_dependencies(requirements=missing):
if requirements is None:
return []
if requirements is missing:
requirements = 'requirements.txt'
if not os.path.isfile(requirements):
return []
text = TextFile(requirements, lstrip_ws=True)
try:
return text.readlines()
finally:
text.close()
def read_version(version_file):
with open(version_file, 'rb') as fd:
result = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE)
return result.group(1) if result else '0.0.1'
setup(
name='{{cookiecutter.project_slug}}',
version=read_version('{{cookiecutter.project_slug}}/__init__.py'),
license='The MIT License',
description='demo',
author='recipe',
author_email='[email protected]',
install_requires=read_dependencies(),
include_package_data=True,
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
]
)
| mit | -6,274,117,673,014,789,000 | 29.215385 | 71 | 0.6222 | false |
DeepThoughtTeam/tensorflow | tensorflow/python/framework/tensor_shape_test.py | 1 | 13264 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for shape inference helper classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class DimensionTest(test_util.TensorFlowTestCase):
def testDimension(self):
dim = tensor_shape.Dimension(12)
self.assertEqual(12, dim.value)
self.assertEqual(12, int(dim))
self.assertEqual(dim, tensor_shape.Dimension(12))
self.assertEqual(tensor_shape.Dimension(15),
dim + tensor_shape.Dimension(3))
self.assertEqual(tensor_shape.Dimension(15), dim + 3)
self.assertEqual(tensor_shape.Dimension(24),
dim * tensor_shape.Dimension(2))
self.assertEqual(tensor_shape.Dimension(24), dim * 2)
self.assertEqual(
tensor_shape.Dimension(6), dim // tensor_shape.Dimension(2))
self.assertEqual(tensor_shape.Dimension(6), dim // 2)
self.assertEqual(tensor_shape.Dimension(12),
dim.merge_with(tensor_shape.Dimension(12)))
self.assertEqual(tensor_shape.Dimension(12), dim.merge_with(12))
self.assertLess(tensor_shape.Dimension(12), tensor_shape.Dimension(13))
self.assertGreater(tensor_shape.Dimension(13), tensor_shape.Dimension(12))
self.assertLessEqual(tensor_shape.Dimension(12), tensor_shape.Dimension(12))
self.assertLessEqual(tensor_shape.Dimension(12), tensor_shape.Dimension(13))
self.assertGreater(tensor_shape.Dimension(13), tensor_shape.Dimension(12))
self.assertGreaterEqual(tensor_shape.Dimension(12),
tensor_shape.Dimension(12))
self.assertGreaterEqual(tensor_shape.Dimension(13),
tensor_shape.Dimension(12))
with self.assertRaises(ValueError):
dim.merge_with(tensor_shape.Dimension(13))
def testUnknownDimension(self):
dim = tensor_shape.Dimension(None)
self.assertIs(None, dim.value)
self.assertEqual(dim.value, tensor_shape.Dimension(None).value)
self.assertEqual(tensor_shape.Dimension(None).value,
(dim + tensor_shape.Dimension(None)).value)
self.assertEqual(tensor_shape.Dimension(None).value,
(dim * tensor_shape.Dimension(None)).value)
self.assertEqual(
tensor_shape.Dimension(None).value,
(dim // tensor_shape.Dimension(None)).value)
self.assertEqual(tensor_shape.Dimension(None).value,
dim.merge_with(tensor_shape.Dimension(None)).value)
self.assertIs(None,
tensor_shape.Dimension(None) < tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) <= tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) > tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) >= tensor_shape.Dimension(None))
def testKnownAndUnknownDimensions(self):
known = tensor_shape.Dimension(12)
unknown = tensor_shape.Dimension(None)
self.assertEqual(
tensor_shape.Dimension(None).value, (known + unknown).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (unknown + known).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (known * unknown).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (unknown * known).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (known // unknown).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (unknown // known).value)
self.assertEqual(
tensor_shape.Dimension(12), known.merge_with(unknown))
self.assertEqual(
tensor_shape.Dimension(12), unknown.merge_with(known))
self.assertIs(None,
tensor_shape.Dimension(12) < tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(12) <= tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(12) > tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(12) >= tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) < tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) <= tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) > tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) >= tensor_shape.Dimension(12))
def testAsDimension(self):
self.assertEqual(tensor_shape.Dimension(12),
tensor_shape.as_dimension(tensor_shape.Dimension(12)))
self.assertEqual(tensor_shape.Dimension(12), tensor_shape.as_dimension(12))
self.assertEqual(
tensor_shape.Dimension(None).value,
tensor_shape.as_dimension(tensor_shape.Dimension(None)).value)
self.assertEqual(tensor_shape.Dimension(None).value,
tensor_shape.as_dimension(None).value)
def testEquality(self):
self.assertTrue(tensor_shape.Dimension(12) == tensor_shape.Dimension(12))
self.assertFalse(tensor_shape.Dimension(12) == tensor_shape.Dimension(13))
self.assertIs(None,
tensor_shape.Dimension(12) == tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) == tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) == tensor_shape.Dimension(None))
def testInequality(self):
self.assertTrue(tensor_shape.Dimension(12) != tensor_shape.Dimension(13))
self.assertFalse(tensor_shape.Dimension(12) != tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(12) != tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) != tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) != tensor_shape.Dimension(None))
class ShapeTest(test_util.TensorFlowTestCase):
def testUnknownShape(self):
s = tensor_shape.TensorShape(None)
with self.assertRaises(ValueError):
s.assert_is_fully_defined()
self.assertIs(None, s.ndims)
with self.assertRaises(ValueError):
len(s)
self.assertFalse(s)
self.assertIs(None, s.dims)
def testFullyDefinedShape(self):
s = tensor_shape.TensorShape([tensor_shape.Dimension(3),
tensor_shape.Dimension(4),
tensor_shape.Dimension(7)])
s.assert_is_fully_defined()
self.assertEqual(3, s.ndims)
self.assertEqual(3, len(s))
self.assertTrue(s)
s.assert_has_rank(3)
self.assertEqual([tensor_shape.Dimension(3),
tensor_shape.Dimension(4),
tensor_shape.Dimension(7)], s.dims)
self.assertEqual(tensor_shape.Dimension(3), s[0])
self.assertEqual(tensor_shape.Dimension(4), s[1])
self.assertEqual(tensor_shape.Dimension(7), s[2])
self.assertEqual([3, 4, 7], s.as_list())
s.assert_is_compatible_with([3, 4, 7])
s.assert_same_rank([6, 3, 7])
def testPartiallyDefinedShape(self):
s = tensor_shape.TensorShape([tensor_shape.Dimension(3),
tensor_shape.Dimension(None),
tensor_shape.Dimension(7)])
with self.assertRaises(ValueError):
s.assert_is_fully_defined()
self.assertEqual(3, s.ndims)
self.assertEqual(3, len(s))
self.assertTrue(s)
s.assert_has_rank(3)
self.assertEqual(tensor_shape.Dimension(3), s[0])
self.assertEqual(tensor_shape.Dimension(None).value, s[1].value)
self.assertEqual(tensor_shape.Dimension(7), s[2])
s.assert_same_rank([6, 3, 7])
def testMergeFullShapes(self):
self.assertEqual([3, 4, 7],
tensor_shape.TensorShape([3, 4, 7]).merge_with(
tensor_shape.TensorShape([3, 4, 7])).as_list())
with self.assertRaises(ValueError):
tensor_shape.TensorShape([3, 4, 7]).merge_with(
tensor_shape.TensorShape([6, 3, 7]))
def testMergePartialShapes(self):
s1 = tensor_shape.TensorShape([tensor_shape.Dimension(3),
tensor_shape.Dimension(None),
tensor_shape.Dimension(7)])
s2 = tensor_shape.TensorShape([tensor_shape.Dimension(None),
tensor_shape.Dimension(4),
tensor_shape.Dimension(7)])
self.assertEqual([3, 4, 7], s1.merge_with(s2).as_list())
def testMergeFullAndUnknownShape(self):
self.assertEqual([3, 4, 7],
tensor_shape.TensorShape([3, 4, 7]).merge_with(
tensor_shape.TensorShape(None)).as_list())
def testSlice(self):
known = tensor_shape.TensorShape([0, 1, 2, 3, 4])
self.assertEqual(tensor_shape.Dimension(2), known[2])
tensor_shape.TensorShape([1, 2, 3]).assert_is_compatible_with(known[1:4])
unknown = tensor_shape.TensorShape(None)
self.assertEqual(tensor_shape.Dimension(None).value, unknown[2].value)
tensor_shape.TensorShape(
[None, None, None]).assert_is_compatible_with(unknown[1:4])
def testConcatenate(self):
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape([1, 2]).concatenate(
tensor_shape.TensorShape([3, 4])))
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape([1, 2]).concatenate(
tensor_shape.TensorShape(None)))
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape(None).concatenate(
tensor_shape.TensorShape([3, 4])))
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape(None).concatenate(
tensor_shape.TensorShape(None)))
tensor_shape.TensorShape([1, 2, 3]).assert_is_compatible_with(
tensor_shape.TensorShape([1, 2]).concatenate(
tensor_shape.Dimension(3)))
def testHelpers(self):
tensor_shape.TensorShape([]).assert_is_compatible_with(
tensor_shape.scalar())
tensor_shape.TensorShape([37]).assert_is_compatible_with(
tensor_shape.vector(37))
tensor_shape.TensorShape(
[94, 43]).assert_is_compatible_with(tensor_shape.matrix(94, 43))
def testTruedivFails(self):
unknown = tensor_shape.Dimension(None)
self.assertEqual((unknown // unknown).value, None)
with self.assertRaisesRegexp(TypeError, r"unsupported operand type"):
unknown / unknown # pylint: disable=pointless-statement
def testConvertFromProto(self):
proto = tensor_util.make_tensor_shape_proto([])
self.assertEqual(tensor_shape.TensorShape([]),
tensor_shape.TensorShape(proto))
self.assertEqual(tensor_shape.TensorShape([]),
tensor_shape.as_shape(proto))
proto = tensor_util.make_tensor_shape_proto([1, 37, 42])
self.assertEqual(tensor_shape.TensorShape([1, 37, 42]),
tensor_shape.TensorShape(proto))
self.assertEqual(tensor_shape.TensorShape([1, 37, 42]),
tensor_shape.as_shape(proto))
partial_proto_shape = tensor_shape.as_shape(
tensor_util.make_tensor_shape_proto([-1, 37, 42]))
partial_shape = tensor_shape.TensorShape([None, 37, 42])
self.assertNotEqual(partial_proto_shape, partial_shape)
self.assertEqual(partial_proto_shape[0].value, None)
self.assertEqual(partial_proto_shape[1].value, 37)
self.assertEqual(partial_proto_shape[2].value, 42)
self.assertTrue(partial_shape.is_compatible_with(partial_proto_shape))
def testStr(self):
self.assertEqual("<unknown>", str(tensor_shape.unknown_shape()))
self.assertEqual("(?,)", str(tensor_shape.unknown_shape(ndims=1)))
self.assertEqual("(?, ?)", str(tensor_shape.unknown_shape(ndims=2)))
self.assertEqual("(?, ?, ?)", str(tensor_shape.unknown_shape(ndims=3)))
self.assertEqual("()", str(tensor_shape.scalar()))
self.assertEqual("(7,)", str(tensor_shape.vector(7)))
self.assertEqual("(3, 8)", str(tensor_shape.matrix(3, 8)))
self.assertEqual("(4, 5, 2)", str(tensor_shape.TensorShape([4, 5, 2])))
self.assertEqual("(32, ?, 1, 9)",
str(tensor_shape.TensorShape([32, None, 1, 9])))
if __name__ == "__main__":
googletest.main()
| apache-2.0 | 4,033,223,273,599,487,000 | 43.810811 | 80 | 0.656966 | false |
QuLogic/ocropy | tests/test_pageseg.py | 1 | 1522 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import unittest
import os
from PIL import Image
from nose.tools import raises
from kraken.pageseg import segment
from kraken.lib.exceptions import KrakenInputException
thisfile = os.path.abspath(os.path.dirname(__file__))
resources = os.path.abspath(os.path.join(thisfile, 'resources'))
class TestPageSeg(unittest.TestCase):
"""
Tests of the page segmentation functionality
"""
@raises(KrakenInputException)
def test_segment_color(self):
"""
Test correct handling of color input.
"""
with Image.open(os.path.join(resources, 'input.jpg')) as im:
segment(im)
def test_segment_bw(self):
"""
Tests segmentation of bi-level input.
"""
with Image.open(os.path.join(resources, 'bw.png')) as im:
lines = segment(im)
# test if line count is roughly correct
self.assertAlmostEqual(len(lines), 30, msg='Segmentation differs '
'wildly from true line count', delta=5)
# check if lines do not extend beyond image
for box in lines:
self.assertLess(0, box[0], msg='Line x0 < 0')
self.assertLess(0, box[1], msg='Line y0 < 0')
self.assertGreater(im.size[0], box[2], msg='Line x1 > {}'.format(im.size[0]))
self.assertGreater(im.size[1], box[3], msg='Line y1 > {}'.format(im.size[1]))
| apache-2.0 | 4,246,072,506,899,317,000 | 32.822222 | 93 | 0.604468 | false |
jeffersonfparil/GTWAS_POOL_RADseq_SIM | simulateVAR.py | 1 | 3264 | #!/usr/bin/env python
import os, sys, math, random
import numpy as np
from Bio import SeqIO
from Bio import Seq
work_DIR = sys.argv[1]
input_SEQ = sys.argv[2]
output_SEQ = sys.argv[3]
output_VCF = sys.argv[4]
varPerScaf = int(sys.argv[5]) #number of variants per scaffold
# #for testing:
# work_DIR = "/mnt/SIMULATED/DNA"
# input_SEQ = "Fixed.fasta"
# output_SEQ = "Variant.fasta"
# output_VCF = "Variant.vcf"
# varPerScaf = 20
os.chdir(work_DIR)
#(1->2) SIMULATE ALLELIC VARIANTS FROM A REFERENCE GENOME AND
# SPIT OUT THE VCF AND FASTA FILES WITH THE SIMULATED MUTATIONS
fasta_sequences = SeqIO.parse(input_SEQ,'fasta')
fasta_index = SeqIO.index(input_SEQ,'fasta')
NEW_FASTA = []
VCF = np.array([np.repeat(0, 9)])
for fasta in fasta_sequences:
name, sequence = fasta.id, str(fasta.seq)
LEN = len(sequence)
POS = np.random.choice(range(LEN), replace=False, size=varPerScaf)
for i in range(len(POS)):
availPOS = set(range(LEN)) - set(POS)
while sequence[POS[i]] != "A" and sequence[POS[i]] != "T" and sequence[POS[i]] != "C" and sequence[POS[i]] != "G":
POS[i] = np.random.choice(list(availPOS), replace=False, size=1)
POS.sort()
MUT = np.random.choice(["Substitution", "Duplication", "Deletion"], size=len(POS), p=[0.9, 0.01, 0.09])
#CH0 = np.random.chisquare(0.3, varPerScaf)
CH0 = np.random.choice([1], varPerScaf) #bcftools does not seem to like more than 1 base in the reference
CH1 = np.random.chisquare(0.3, varPerScaf)
CHR = []
for cha in CH0:
CHR.append(int(math.ceil(cha)))
CHA = []
for cha in CH1:
CHA.append(int(math.ceil(cha)))
REF=[]
ALT=[]
for i in range(len(POS)):
if MUT[i] == "Substitution":
SUB=[]
for s in range(int(CHA[i])):
SUB.append(random.choice(["A", "T", "C", "G"]))
while "".join(SUB) == sequence[POS[i]:POS[i]+CHR[i]]:
SUB=[]
for s in range(int(CHA[i])):
SUB.append(random.choice(["A", "T", "C", "G"]))
sequence2 = sequence[:POS[i]] + "".join(SUB) + sequence[POS[i]+CHR[i]:]
REF.append(sequence[POS[i]:POS[i]+CHR[i]])
ALT.append("".join(SUB))
else:
if MUT[i] == "Duplication":
sequence2 = sequence[:POS[i]+CHR[i]] + sequence[POS[i]:POS[i]+CHR[i]] + sequence[POS[i]+CHR[i]+1:]
REF.append(sequence[POS[i]:POS[i]+CHR[i]])
ALT.append(sequence[POS[i]:POS[i]+CHR[i]] + sequence[POS[i]:POS[i]+CHR[i]])
else:
sequence2 = sequence[:POS[i]] + sequence[POS[i]+1:]
#REF.append(sequence[POS[i]-1:POS[i]+CHR[i]])
#ALT.append(sequence2[POS[i]-1:POS[i]])
REF.append(sequence[POS[i]:POS[i]+CHR[i]])
ALT.append('<DEL>')
#fasta.seq = Seq.Seq(sequence2)
#NEW_FASTA.append(fasta)
CHROM = np.repeat(name, varPerScaf)
POS = POS + 1
ID = np.repeat(".", varPerScaf)
QUAL = np.repeat(".", varPerScaf)
FILTER = np.repeat("PASS", varPerScaf)
INFO = np.repeat(".", varPerScaf)
FORMAT = np.repeat("GT", varPerScaf)
vcf = np.stack((CHROM, POS, ID, REF, ALT, QUAL, FILTER, INFO, FORMAT), axis=-1)
VCF = np.concatenate((VCF, vcf), axis=0)
#FASTA OUTPUT:
#SeqIO.write(NEW_FASTA, output_SEQ, "fasta")
#WRITE VCF FILE:
VCF = VCF[1:len(VCF)]
np.savetxt("outPy.txt", VCF, fmt='%s' ,delimiter="\t")
os.system("cat VCF.header outPy.txt > " + output_VCF)
os.system("rm outPy.txt")
###:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::### | gpl-3.0 | 9,102,283,531,665,851,000 | 34.48913 | 116 | 0.630208 | false |
edosedgar/xs-pkg | deep_learning/HW2/notmnist.py | 1 | 2059 | import os
import numpy as np
from scipy.misc import imread,imresize
from urllib.request import urlretrieve
def load_notmnist(path=".", letters='ABCDEFGHIJ',
img_shape=(28,28),test_size=0.25,one_hot=False):
root = os.path.join(path, "notMNIST_small")
# download data if it's missing. If you have any problems, go to the urls and load it manually.
if not os.path.exists(root):
print("Downloading data...")
urlretrieve(
"http://yaroslavvb.com/upload/notMNIST/notMNIST_small.tar.gz",
"notMNIST_small.tar.gz")
print("Extracting ...")
import tarfile
with tarfile.open("notMNIST_small.tar.gz", "r:gz") as tar:
tar.extractall(path=path)
data,labels = [],[]
print("Parsing...")
for letter in sorted(os.listdir(root)):
if letter not in letters: continue
for img_name in sorted(os.listdir(os.path.join(root, letter))):
img_path = os.path.join(root, letter, img_name)
try:
data.append(imresize(imread(img_path), img_shape))
labels.append(letter,)
except:
print("found broken img: %s [it's ok if <10 images are broken]" % img_path)
data = np.stack(data)[:,None].astype('float32')
data = (data - np.mean(data)) / np.std(data)
#convert classes to ints
letter_to_i = {l:i for i,l in enumerate(letters)}
labels = np.array(list(map(letter_to_i.get, labels)))
if one_hot:
labels = (np.arange(np.max(labels) + 1)[None,:] == labels[:, None]).astype('float32')
#split into train/test
np.random.seed(666)
permutation = np.arange(len(data))
np.random.shuffle(permutation)
data = data[permutation]
labels = labels[permutation]
n_train_samples = int(round(len(data) * (1.0 - test_size)))
X_train, X_test = data[:n_train_samples], data[n_train_samples:]
y_train, y_test = labels[:n_train_samples], labels[n_train_samples:]
return X_train, y_train, X_test, y_test
| gpl-2.0 | 1,673,140,495,444,828,200 | 34.5 | 99 | 0.606605 | false |
arbuz001/sms-tools | workspace/A6/loadTestCases.py | 1 | 1114 | import pickle
PA = 'A6'
def load(partId, caseId=1):
"""
This function returns the example test-cases for a specific part of an assignment.
Input:
partId (int) = part number of the assignment (1 for A*Part1, 2 for A*Part2 and so on)
caseId (int) = caseId = k to return the kth test case. Typically there are two per part.
Output:
testcase (dict) = {'input': <input test case>, 'output': <expected output for the input test case>}
"""
data = pickle.load(open('testInput%s.pkl'%PA,'r'))
part = u'%s-part-%d'%(PA, partId)
if not data['exampleInputs'].has_key(part):
print "There are no example test cases required for this part. You can keep on improving the pitch track and submit once you are satisfied. Plots provide you feedback on the accuracy of the pitch track."
return None
if caseId > len(data['exampleInputs'][part]) or caseId <=0:
print "Please provide a valid caseId (>=1), number of test cases in this assignment are %d"%(len(data['exampleInputs'][part]))
return None
return {'input': data['exampleInputs'][part][caseId-1], 'output': data['exampleOutputs'][part][caseId-1]}
| agpl-3.0 | -1,313,970,534,816,519,200 | 40.259259 | 205 | 0.704668 | false |
yobibyte/DeepFried2 | DeepFried2/utils.py | 1 | 2973 | import DeepFried2 as df
import numpy as _np
from warnings import warn as _warn
from numbers import Number as _Number
def create_param_state_as(other, initial_value=0, prefix='state_for_'):
return df.th.shared(other.get_value()*0 + initial_value,
broadcastable=other.broadcastable,
name=prefix + str(other.name)
)
def _check_dtype_mistake(dtype):
"""
It's a very common mistake (at least for me) to pass-in a float64 when I
really want to pass in a `floatX`, and it would go unnoticed and slow-down
the computations a lot if I wouldn't check it here.
"""
if _np.issubdtype(dtype, _np.floating) and dtype != df.floatX:
_warn("Input array of floating-point dtype {} != df.floatX detected. Is this really what you want?".format(dtype))
def make_tensor(dtype, ndim, name):
_check_dtype_mistake(dtype)
return df.th.tensor.TensorType(dtype, (False,) * ndim)(name)
def tensors_for_ndarrays(datas, basename):
if isinstance(datas, _np.ndarray):
return make_tensor(datas.dtype, datas.ndim, basename)
if isinstance(datas, (list, tuple)):
return [tensors_for_ndarrays(data, "{}_{}".format(basename, i)) for i, data in enumerate(datas)]
# Could potentially make it "any iterable" by removing above check.
# But would need to guarantee we never iterate over it twice, which is harder!
raise TypeError("I only understand lists or tuples of numpy arrays! (possibly nested)")
def count_params(module, learnable_only=True):
return sum(p.get_value().size for p in module.parameters(learnable_only=learnable_only))
def flatten(what, types=(list, tuple), none_to_empty=False):
if what is None and none_to_empty:
return []
if not isinstance(what, types):
return [what]
# NOTE: I actually timed that this is faster than the comprehension,
# even though it probably doesn't matter :)
# 350us vs 250us
ret = []
for sub in what:
ret += flatten(sub, types=types, none_to_empty=none_to_empty)
return ret
def expand(tup, ndim, name=None, expand_nonnum=False):
if isinstance(tup, (tuple, list)) and len(tup) == ndim:
return tup
if isinstance(tup, _Number) or expand_nonnum:
return (tup,) * ndim
if not expand_nonnum:
return tup
raise ValueError("Bad number of dimensions{}: is {} but should be {}.".format((" for " + name) if name else "", len(tup), ndim))
def typename(obj):
return type(obj).__name__
def pad(symb_input, padding):
assert symb_input.ndim == len(padding), "symb_input ({}d) and padding ({}d) must have the same dimensionality".format(symb_input.ndim, len(padding))
padded_shape = tuple((s+2*p) for s,p in zip(symb_input.shape, padding))
padded_input = df.T.zeros(padded_shape)
slicing = [slice(None) if p == 0 else slice(p,s+p) for s,p in zip(symb_input.shape, padding)]
return df.T.set_subtensor(padded_input[slicing], symb_input)
| mit | -2,337,813,472,281,590,300 | 34.392857 | 152 | 0.671712 | false |
adael/goldminer | goldminer/draw.py | 1 | 11274 | from math import ceil
from bearlibterminal import terminal
from goldminer import settings, texts, colors
from goldminer.actor import Actor
from goldminer.inventory import Inventory
from goldminer.history import History
from goldminer.geom import Rect
from goldminer.items import Item
from goldminer.worldmap import Tile
from goldminer.util import chunks
class Border:
def __init__(self, top, bottom, left, right, topLeft, topRight, bottomLeft, bottomRight):
self.top = top
self.bottom = bottom
self.left = left
self.right = right
self.topLeft = topLeft
self.topRight = topRight
self.bottomLeft = bottomLeft
self.bottomRight = bottomRight
color_stack = []
double_border = Border(
top=0x2550,
bottom=0x2550,
left=0x2551,
right=0x2551,
topLeft=0x2554,
topRight=0x2557,
bottomLeft=0x255A,
bottomRight=0x255D
)
single_border = Border(
top=0x2500,
bottom=0x2500,
left=0x2502,
right=0x2502,
topLeft=0x250C,
topRight=0x2510,
bottomLeft=0x2514,
bottomRight=0x2518
)
def push_colors():
color_stack.append((terminal.state(terminal.TK_COLOR), terminal.state(terminal.TK_BKCOLOR)))
def pop_colors():
(fg, bg) = color_stack.pop()
terminal.color(fg)
terminal.bkcolor(bg)
def color_for_value(value, colors=None):
if not colors:
colors = ["dark red", "red", "orange", "yellow", "dark green", "green"]
ncolors = len(colors) - 1
percent = round(value * ncolors / 100, 0)
index = int(min(ncolors, max(0, percent)))
return colors[index]
def draw_double_line(x, y, width):
draw_line(x, y, width, "[U+2550]")
def draw_line(x, y, width, code="[U+2500]"):
terminal.print_(x, y, code * width)
def draw_progress_label(x, y, label, value, max_value, color):
label += " [color={}]{}[color=white]/{}".format(color, value, max_value)
terminal.print_(x, y, label)
def draw_progress(x, y, width, percent, color, bkcolor="dark gray"):
fill_width = int(percent * width / 100)
terminal.print_(x, y, "[bkcolor={}]".format(bkcolor) + (" " * width))
terminal.print_(x, y, "[bkcolor={}]".format(color) + (" " * fill_width))
def draw_rect(rect_, border=double_border):
draw_box(rect_.left, rect_.top, rect_.right - 1, rect_.bottom - 1, border)
def draw_box(x1, y1, x2, y2, border=double_border):
for cx in range(x1, x2):
terminal.put(cx, y1, border.top)
terminal.put(cx, y2, border.bottom)
for cy in range(y1, y2):
terminal.put(x1, cy, border.left)
terminal.put(x2, cy, border.right)
terminal.put(x1, y1, border.topLeft)
terminal.put(x2, y1, border.topRight)
terminal.put(x2, y2, border.bottomRight)
terminal.put(x1, y2, border.bottomLeft)
def draw_corners(x1, y1, x2, y2, border=single_border):
terminal.put(x1, y1, border.topLeft)
terminal.put(x2, y1, border.topRight)
terminal.put(x2, y2, border.bottomRight)
terminal.put(x1, y2, border.bottomLeft)
def draw_window(rect_, caption, color="white", bkcolor="black"):
push_colors()
terminal.color(color)
terminal.bkcolor(bkcolor)
terminal.clear_area(rect_.x, rect_.y, rect_.width, rect_.height)
draw_line(rect_.x + 1, rect_.y + 2, rect_.width - 2, "[U+2594]")
draw_rect(rect_)
terminal.print_(rect_.center_x, rect_.y + 1, "[align=center]" + caption)
pop_colors()
def draw_select_box(control, x, y):
padding_left = 2
w, h = calculate_select_box_dimension(control)
w += padding_left
index = 0
py = 0
for item in control.items:
color = colors.white
if item.active and control.item_focused_index == index:
color = colors.yellow
elif not item.active:
color = colors.gray
box = "[bbox={}]".format(w - padding_left)
(_, height) = terminal.measure(box + item.label)
terminal.color(color)
terminal.print_(x + 2, y + py, box + item.label)
if index == control.item_focused_index:
terminal.color(color)
terminal.put(x, y + py, ">")
py += height
index += 1
def calculate_select_box_dimension(ctrl):
w, h = 3, 3
for item in ctrl.items:
w = max(len(item.label), w)
for item in ctrl.items:
box = "[bbox={}]".format(w)
(_, m) = terminal.measure(box + item.label)
h = max(m, h)
return w, h
# GenerateWorldState
def draw_generate_world():
terminal.color(colors.black)
terminal.bkcolor(colors.white_ice)
terminal.clear()
terminal.print_(10, 10, "Generating world...")
# PlayingState
def draw_game_layout():
terminal.color(colors.beige)
draw_rect(settings.screen_rect)
draw_rect(settings.map_window_rect)
draw_rect(settings.gui_rect)
draw_rect(settings.status_rect)
def draw_world(world):
terminal.clear()
draw_game_layout()
draw_world_map(world.camera, world.world_map)
draw_world_actors(world.camera, world.actors)
draw_world_player(world.camera, world.player)
draw_actor_stats(world.player)
draw_history(world.player.history)
world.player.history.trim()
terminal.refresh()
def draw_world_map(camera, world_map):
for x, y in settings.map_rect:
px, py = camera.camera_to_map(x, y)
if world_map.inside_map(px, py):
draw_tile(world_map.tile(px, py), x, y)
def draw_world_actors(camera, actors):
for actor in actors:
x, y = camera.map_to_camera(actor.x, actor.y)
draw_actor(actor, x, y)
def draw_world_player(camera, player):
x, y = camera.map_to_camera(player.x, player.y)
draw_player(player, x, y)
if player.orientation:
push_colors()
(px, py) = camera.map_to_camera(*player.looking_position())
terminal.color(terminal.pick_color(px, py))
terminal.bkcolor("#222222")
terminal.put(px, py, terminal.pick(px, py))
pop_colors()
def draw_tile(tile: Tile, x, y):
if not tile.explored:
return
draw_char(x, y, tile.char, tile.color if tile.in_sight else colors.not_in_sight)
def draw_actor(actor, x, y):
draw_entity(actor, x, y)
def draw_player(player: Actor, x, y):
draw_entity(player, x, y)
def draw_chest(chest, x, y):
draw_entity(chest, x, y)
def draw_entity(entity, x, y):
draw_char(x, y, entity.char, entity.color)
def draw_char(x, y, char, color):
terminal.color(color)
terminal.put(x, y, char)
def draw_actor_stats(actor):
r = settings.gui_rect
terminal.color('azure')
draw_rect(r)
x = r.left + 2
y = r.top + 2
width = r.width - 4
draw_gui_stat(actor.fighter.hp, x, y, width, settings.hp_colors)
y += 3
draw_gui_stat(actor.fighter.water, x, y, width, settings.water_colors)
y += 3
draw_gui_stat(actor.fighter.food, x, y, width, settings.food_colors)
y += 3
draw_gui_stat(actor.fighter.fatigue, x, y, width, colors.get_bright_range(colors.brown))
y += 3
terminal.print_(x, y, "Position: {}x{}".format(actor.x, actor.y))
y += 4
terminal.color("#AA6939")
terminal.print_(x, y, "Inventory:")
draw_double_line(x, y + 1, width)
draw_mini_inventory(actor.inventory, x, y + 3, width)
def draw_gui_stat(stat, x, y, width, colors, bkcolor="dark gray"):
color = color_for_value(stat.percent, colors)
draw_progress_label(x, y, stat.label, int(round(stat.value, 0)), stat.max_value, color)
draw_progress(x, y + 1, width, stat.percent, color, bkcolor)
def draw_mini_inventory(inventory: Inventory, x: int, y: int, width: int):
"""
It draws the in-game mini-inventory
"""
items = ["[color={}]{} [/color]".format(item.color, item.char) for item in inventory.items]
while len(items) < inventory.capacity:
items.append("[color=#404040]- [/color]")
lines = chunks(items, ceil(width/2))
for line_items in lines:
terminal.print_(x, y, "[bbox={}]".format(width) + "".join(line_items))
y += 1
def draw_history(history: History):
r = settings.status_rect
x, y = r.x + 1, r.bottom - 2
color = "white"
for msgtime, msg in reversed(history.messages):
if y <= r.y:
return
s = "{} [color={}][bbox={}]{}".format(msgtime.strftime("%H:%M:%S"), color, r.width, msg)
terminal.print_(x, y, s)
(_, mh) = terminal.measure(s)
y -= mh
color = "dark gray"
# MenuState
def draw_menu_state(lst):
terminal.clear()
caption = ".*{Gold Miner}*."
terminal.color("yellow")
terminal.print_(10, 10, caption)
draw_double_line(10, 11, len(caption))
draw_select_box(lst, 10, 13)
terminal.refresh()
def draw_menu_option_state(lst):
terminal.clear_area(30, 14, 60, 30)
terminal.color("yellow")
terminal.print_(30, 14, "Screen size")
draw_double_line(30, 15, len("Screen size"))
draw_select_box(lst, 30, 16)
terminal.refresh()
def draw_inventory_window(inventory: Inventory, selected_index):
draw_window(settings.gui_rect, "Inventory window", colors.inventory_item_hover_bg, colors.inventory_bk_color)
if inventory.is_empty():
inner_width = settings.gui_rect.width - 2
px = settings.gui_rect.x + 4
py = settings.gui_rect.y + 4
msg = texts.pick(texts.inventory_is_empty)
terminal.print_(px, py, "[bbox={}][color={}]{}".format(inner_width, colors.teal, msg))
terminal.print_(px, py + 2, "[bbox={}][color={}]<< {}".format(inner_width, colors.white, texts.press_back))
else:
draw_inventory_state_items(inventory.items, selected_index)
terminal.refresh()
# Inventory state
def draw_inventory_state_items(items, selected_index):
line_x = settings.gui_rect.x + 1
line_y = settings.gui_rect.y + 3
line_w = settings.gui_rect.width - 3
item_w = 2
item_h = 3
index = 0
for item in items:
text_x = line_x + 4
text_y = line_y + 1
if index == selected_index:
item_bg = colors.inventory_item_hover_bg
item_fg = colors.inventory_item_hover_fg
else:
item_bg = colors.inventory_bk_color
item_fg = colors.inventory_item_fg
label = "[bbox={}][color=white] {}[/color]".format(line_w, item.description)
_, mh = terminal.measure(label)
cy = mh
# draw icon
terminal.bkcolor(colors.inventory_bk_color)
terminal.color(colors.white)
draw_corners(line_x, line_y, line_x + item_w, line_y + item_w)
terminal.color(item.color)
terminal.put(line_x + 1, line_y + 1, item.char)
# draw highlight
terminal.bkcolor(item_bg)
terminal.clear_area(text_x, line_y, line_w - 4, item_h)
# draw text
terminal.print_(text_x, text_y, label)
# restore background color
terminal.bkcolor(colors.black)
# calculations
line_y += max(3, cy + 1)
index += 1
def draw_view_item_window(lst, item: Item):
rect = Rect.from_rect(settings.gui_rect)
draw_window(rect, item.description, colors.white, colors.inventory_bk_color)
terminal.bkcolor(colors.inventory_bk_color)
draw_select_box(lst, rect.x + 1, rect.y + 3)
terminal.refresh()
| mit | -7,870,676,331,743,460,000 | 26.700246 | 115 | 0.621873 | false |
EmanueleCannizzaro/scons | test/Repository/Local.py | 1 | 4031 | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Repository/Local.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import os.path
import TestSCons
test = TestSCons.TestSCons()
test.subdir('repository', ['repository', 'src'],
'work', ['work', 'src'])
repository_aaa_out = test.workpath('repository', 'aaa.out')
repository_build_bbb_1 = test.workpath('repository', 'build', 'bbb.1')
repository_build_bbb_2 = test.workpath('repository', 'build', 'bbb.2')
work_aaa_mid = test.workpath('work', 'aaa.mid')
work_aaa_out = test.workpath('work', 'aaa.out')
work_build_bbb_1 = test.workpath('work', 'build', 'bbb.1')
work_build_bbb_2 = test.workpath('work', 'build', 'bbb.2')
opts = "-Y " + test.workpath('repository')
#
test.write(['repository', 'SConstruct'], r"""
def copy(env, source, target):
source = str(source[0])
target = str(target[0])
print 'copy() < %s > %s' % (source, target)
open(target, "wb").write(open(source, "rb").read())
Build = Builder(action=copy)
env = Environment(BUILDERS={'Build':Build}, BBB='bbb')
env.Build('aaa.mid', 'aaa.in')
env.Build('aaa.out', 'aaa.mid')
Local('aaa.out')
Export("env")
VariantDir('build', 'src')
SConscript('build/SConscript')
""")
test.write(['repository', 'src', 'SConscript'], r"""
def bbb_copy(env, source, target):
target = str(target[0])
print 'bbb_copy()'
open(target, "wb").write(open('build/bbb.1', "rb").read())
Import("env")
env.Build('bbb.1', 'bbb.0')
env.Local('${BBB}.1')
env.Command('bbb.2', 'bbb.x', bbb_copy)
env.Depends('bbb.2', 'bbb.1')
""")
test.write(['repository', 'aaa.in'], "repository/aaa.in\n")
test.write(['repository', 'src', 'bbb.0'], "repository/src/bbb.0\n")
test.write(['repository', 'src', 'bbb.x'], "repository/src/bbb.x\n")
#
test.run(chdir = 'repository', options = opts, arguments = '.')
test.fail_test(test.read(repository_aaa_out) != "repository/aaa.in\n")
test.fail_test(test.read(repository_build_bbb_2) != "repository/src/bbb.0\n")
test.up_to_date(chdir = 'repository', options = opts, arguments = '.')
# Make the entire repository non-writable, so we'll detect
# if we try to write into it accidentally.
test.writable('repository', 0)
#
test.run(chdir = 'work', options = opts, arguments = 'aaa.out build/bbb.2')
test.fail_test(os.path.exists(work_aaa_mid))
test.fail_test(test.read(work_aaa_out) != "repository/aaa.in\n")
test.fail_test(test.read(work_build_bbb_1) != "repository/src/bbb.0\n")
test.fail_test(os.path.exists(work_build_bbb_2))
#
test.write(['work', 'aaa.in'], "work/aaa.in\n")
#
test.run(chdir = 'work', options = opts, arguments = '.')
test.fail_test(test.read(work_aaa_mid) != "work/aaa.in\n")
test.fail_test(test.read(work_aaa_out) != "work/aaa.in\n")
test.up_to_date(chdir = 'work', options = opts, arguments = '.')
#
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | -804,789,594,159,497,100 | 32.591667 | 98 | 0.689159 | false |
igoumiri/pyMST | adhoc/cyleq.py | 1 | 7837 | # old16 is to preserve the code that was adding one element to all the
# radial arrays in adhoc.py and cyleq.py.
# Generic cylindrical equilibrium solutions
def zfunc(rho, bz, bq, lam, press):
return -lam * bq - press / (bz**2 + bq**2) * bz
def qfunc(rho, bz, bq, lam, press):
if rho == 0.0:
return (lam * bz)
else:
return (lam * bz) - (1.0/rho + press / (bz**2 + bq**2) ) * bq
#def press(rho, beta0):
def press_quadratic(rho, beta0):
"""Pressure function that returns quadratic p, gradp."""
p = (beta0 / 2.0)*(1 - rho**2)
gradp = (beta0 / 2.0) * (-2.0 * rho)
return p, gradp
def press_cubic(rho, beta0):
"""Pressure function that returns matched cubic p, gradp.
Found that
p/p0 = 1 - (4/3)rho**3+(1/3)rho**12
(matched polynomial with dp/dr(rho=1) = 0
closely matches measured p profile from Biewer's thesis
I like this type of polynomial since dp/dr = 0 at edge
is required physically.
Note the quartic profile case for completeness:
p = (beta0/2.0)*(-4*rho**3+3*rho**4)
gradp = ( beta0/2.0 ) * ( -12.0*rho**2 + 12.0*rho**3)
"""
p = (beta0 / 2.0)*(1.0 - (4.0/3.0)*rho**3+(1.0/3.0)*rho**12)
gradp = (beta0 / 2.0)*(-4.0*rho**2 + 4.0*rho**11)
return p, gradp
def lam_to_eq(lam, pars, ip,
pmodel='quadratic', beta=0.07, n=51, ret='all',
corr='cyl', d=0.01, a=0.52, Ra=1.50):
"""
Given 1D lambda-profile function and ip as a scaling quantity,
return various field quantities from the cylindrical equilibrium
model.
Note ip must be in mks for this to work right, while ip comes from
MDSplus in kA.
lam: a function that takes in radius x and parameters pars
and outputs lambda at that x. Note that lam in this file
always means lambda * b, the local inverse-scale length
times the minor radius at which the plasma current vanishes.
beta: the average pressure over Bpw**2/(2*mu0), i. e. 'poloidal beta'.
n: number of radial points.
a and Ra: minor and major radius of measurements in mks
ret='scalars': returns ip, btw, btave, b0, beta0 as a tuple,
for use in optimization codes like simplex.
ret='all' (default): returns things from 'scalars', as well as
Ra, a, d, Rb, b, rho, bq, bz, jq, jz, p, gradp, q, lam,
all as named in a dictionary.
"""
import numpy as np
import scipy.integrate as sig
mu0 = np.pi * 4E-7
# The original JSS value:
m_max = 4 # beta iterations
# KJM 2012-02 to use conditional loop with tolerance.
# m_max = 10 # beta iterations
h = 1.0 / n
hh = h / 2.0
# Normalized radial coordinate
rho = np.linspace(0.0, 1.0, n)
# Define B arrays.
bz = np.zeros(n)
bq = np.zeros(n)
# Integrate pressure gradient for profile and average pressure
# factor.
if pmodel == 'quadratic':
press = press_quadratic
elif pmodel == 'cubic':
press = press_cubic
p, gradp = press(rho, 2.0)
p = p - p[-1]
avg_p_fac = 0.5 / sig.simps(rho*p, rho)
# beta0_tol = 1E-3
# 1E-3 gives same number of iterations as m_max=4 with no condition.
for m in range(m_max): #loop for different beta
if m == 0: #first time, zero beta
beta0 = 0.0
else: #next times, derive beta0 for given beta
#general pressure profile
beta0 = avg_p_fac * beta * bq[-1]**2
# print beta0, abs(beta0 - beta0_old) / beta0
# if abs(beta0 - beta0_old) / beta0 < beta0_tol:
# break
# beta0_old = beta0
# print beta0
bz[0] = 1.0 #axis values of
bq[0] = 0.0 #field components
for i in range(n-1):
x = rho[i]
y = lam(x, *pars)
p, z = press(x, beta0)
t1_z = h * zfunc(x, bz[i], bq[i], y, z)
t1_q = h * qfunc(x, bz[i], bq[i], y, z)
x = rho[i] + hh
y = lam(x, *pars)
p, z = press(x, beta0)
t2_z = h * zfunc(x, bz[i]+t1_z/2.0, bq[i]+t1_q/2.0, y, z)
t2_q = h * qfunc(x, bz[i]+t1_z/2.0, bq[i]+t1_q/2.0, y, z)
t3_z = h * zfunc(x, bz[i]+t2_z/2.0, bq[i]+t2_q/2.0, y, z)
t3_q = h * qfunc(x, bz[i]+t2_z/2.0, bq[i]+t2_q/2.0, y, z)
x = rho[i+1]
y = lam(x, *pars)
p, z = press(x, beta0)
t4_z = h * zfunc(x, bz[i]+t3_z, bq[i]+t3_q, y, z)
t4_q = h * qfunc(x, bz[i]+t3_z, bq[i]+t3_q, y, z)
bz[i+1] = bz[i] + (t1_z + 2.0*t2_z + 2.0*t3_z + t4_z) / 6.0
bq[i+1] = bq[i] + (t1_q + 2.0*t2_q + 2.0*t3_q + t4_q) / 6.0
# print m
# Calculate corrections to fields.
#d = 0.01 # outboard gap between LCFS & shell, in meters
if corr == 'tor':
b = a - d / (1.0 - a / Ra) #LCFS plasma radius, in meters
Rb = Ra + a - b - d #LCFS plasma major radius, in meters
# Note b = 0.504694, Rb = 1.50531 for MST.
# Toroidal geometry factors
tg_a = Ra * (1.0 - np.sqrt(1.0 - (a / Ra)**2) )
tg_b = Rb * (1.0 - np.sqrt(1.0 - (b / Rb)**2) )
elif corr == 'cyl':
b = a - d #LCFS plasma radius, in meters
Rb = Ra + a - b - d #LCFS plasma major radius, in meters
# Note b = 0.51, Rb = Ra = 1.5 for MST.
# Get final field profiles, where bz is done before bq to avoid a bug.
bpw = mu0 * ip / 2.0 / np.pi / a
bpw_b = bpw * a / b
bz = bz * bpw_b / bq[-1]
bq = bq * bpw_b / bq[-1]
btave_b = 2.0 * sig.simps(rho * bz, rho)
# New beta0 value may be slightly inconsistent with fields,
# so recalculate it.
beta0 = avg_p_fac * beta * bq[-1]**2
# Find BTW and BTAVE using values at/inside LCFS
if corr == 'tor':
btw = bz[-1] / tg_b * tg_a / (a / b)**2
btave = ( btave_b + bz[-1] * (tg_a / tg_b - 1.0) ) / (a / b)**2
elif corr == 'cyl':
btw = bz[-1]
btave = ( btave_b * b**2 + btw * (a**2 - b**2) ) / a**2
if ret == 'scalars':
return ip, btw, btave, bz[0], beta0
elif ret == 'all':
# Get pressure and gradient in MKS.
p, gradp = press(rho, beta0)
p = bz[0] * bz[0] / mu0 * p
gradp = bz[0] * bz[0] / mu0 / b * gradp
# Safety factor q = r * bt / (Ra * bp)
#q = deriv(r * bz) / deriv(Ra * bq)
y = lam(0.0, *pars)
q = 2.0 * b / Rb / y + np.zeros(n)
q[1:] = rho[1:] * b * bz[1:] / Rb / bq[1:]
# Added 2015-10, KM
q[0] = np.polyval(np.polyfit(rho[1:4], q[1:4], 2), rho[0])
# Get parallel current in MKS.
y = lam(rho, *pars)
jq = y * bq / mu0 / b
jz = y * bz / mu0 / b
# Add perpendicular current for ASSUMED pressure profile.
bb = bz * bz + bq * bq
jq = jq + bz / bb * gradp
jz = jz - bq / bb * gradp
# Get total poloidal and toroidal fluxes (not per radian).
r = rho*b
psi = 2.0*np.pi*Ra*sig.cumtrapz(
np.append(bq, bpw), np.append(r, a), initial=0.0)
Psi = psi[-1]
psi = psi[:-1]
phi = 2.0*np.pi*sig.cumtrapz(
np.append(r, a)*np.append(bz, btw), np.append(r, a),
initial=0.0)
Phi = phi[-1]
phi = phi[:-1]
return {
'ip':ip, 'bpw':bpw, 'btw':btw, 'btave':btave, 'b0':bz[0],
'beta0':beta0, 'F':btw/btave, 'Theta':bpw/btave,
'bpw_b':bpw_b, 'btw_b':bz[-1], 'btave_b':btave_b,
'b0':bz[0], 'beta0':beta0,
'a':a, 'Ra':Ra, 'd':d, 'b':b, 'Rb':Rb, 'rho':rho, 'r':r,
'bq':bq, 'bz':bz, 'jq':jq, 'jz':jz,
'Psi':Psi, 'psi':psi, 'Phi':Phi, 'phi':phi,
'p':p, 'gradp':gradp,
'q':q, 'lam':y,
'pars':pars, 'pmodel':pmodel, 'beta':beta,
'corr':corr
}
| mit | -1,738,313,170,859,749,000 | 35.451163 | 74 | 0.511803 | false |
ownport/ansiblite | src/ansiblite/playbook/handler.py | 1 | 2018 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansiblite.errors import AnsibleError
from ansiblite.playbook.attribute import FieldAttribute
from ansiblite.playbook.task import Task
class Handler(Task):
_listen = FieldAttribute(isa='list')
def __init__(self, block=None, role=None, task_include=None):
self._flagged_hosts = []
super(Handler, self).__init__(block=block, role=role, task_include=task_include)
def __repr__(self):
''' returns a human readable representation of the handler '''
return "HANDLER: %s" % self.get_name()
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Handler(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def flag_for_host(self, host):
#assert instanceof(host, Host)
if host not in self._flagged_hosts:
self._flagged_hosts.append(host)
def has_triggered(self, host):
return host in self._flagged_hosts
def serialize(self):
result = super(Handler, self).serialize()
result['is_handler'] = True
return result
| gpl-3.0 | -1,476,278,930,421,509,400 | 35.690909 | 97 | 0.701685 | false |
wavelets/GroundHog | groundhog/layers/rec_layers.py | 1 | 34050 | """
Recurrent layers.
TODO: write more documentation
"""
__docformat__ = 'restructedtext en'
__authors__ = ("Razvan Pascanu "
"KyungHyun Cho "
"Caglar Gulcehre ")
__contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import numpy
import copy
import theano
import theano.tensor as TT
# Nicer interface of scan
from theano.sandbox.scan import scan
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from groundhog import utils
from groundhog.utils import sample_weights, \
sample_weights_classic,\
init_bias, \
constant_shape
from basic import Layer
class RecurrentMultiLayer(Layer):
"""
Constructs a recurrent layer whose transition from h_tm1 to h_t is given
by an MLP or logistic regression. In our ICLR submission this is a
DT-RNN model.
"""
def __init__(self,
rng,
n_hids=[500,500],
activation = [TT.tanh, TT.tanh],
scale=.01,
sparsity = -1,
activ_noise=0.,
weight_noise=False,
dropout = 1.,
init_fn='sample_weights',
bias_fn='init_bias',
bias_scale = 0.,
grad_scale = 1.,
profile = 0,
name=None):
"""
:type rng: numpy random generator
:param rng: numpy random generator
:type n_in: int
:param n_in: number of inputs units
:type n_hids: list of ints
:param n_hids: Number of hidden units on each layer of the MLP
:type activation: string/function or list of
:param activation: Activation function for the embedding layers. If
a list it needs to have a value for each layer. If not, the same
activation will be applied to all layers
:type scale: float or list of
:param scale: depending on the initialization function, it can be
the standard deviation of the Gaussian from which the weights
are sampled or the largest singular value. If a single value it
will be used for each layer, otherwise it has to have one value
for each layer
:type sparsity: int or list of
:param sparsity: if a single value, it will be used for each layer,
otherwise it has to be a list with as many values as layers. If
negative, it means the weight matrix is dense. Otherwise it
means this many randomly selected input units are connected to
an output unit
:type weight_noise: bool
:param weight_noise: If true, the model is used with weight noise
(and the right shared variable are constructed, to keep track of the
noise)
:type dropout: float
:param dropout: the probability with which hidden units are dropped
from the hidden layer. If set to 1, dropout is not used
:type init_fn: string or function
:param init_fn: function used to initialize the weights of the
layer. We recommend using either `sample_weights_classic` or
`sample_weights` defined in the utils
:type bias_fn: string or function
:param bias_fn: function used to initialize the biases. We recommend
using `init_bias` defined in the utils
:type bias_scale: float
:param bias_scale: argument passed to `bias_fn`, depicting the scale
of the initial bias
:type grad_scale: float or theano scalar
:param grad_scale: factor with which the gradients with respect to
the parameters of this layer are scaled. It is used for
differentiating between the different parameters of a model.
:type name: string
:param name: name of the layer (used to name parameters). NB: in
this library names are very important because certain parts of the
code relies on name to disambiguate between variables, therefore
each layer should have a unique name.
"""
self.grad_scale = grad_scale
if type(n_hids) not in (list, tuple):
n_hids = [n_hids]
n_layers = len(n_hids)
if type(scale) not in (list, tuple):
scale = [scale] * n_layers
if type(sparsity) not in (list, tuple):
sparsity = [sparsity] * n_layers
for idx, sp in enumerate(sparsity):
if sp < 0: sparsity[idx] = n_hids[idx]
if type(activation) not in (list, tuple):
activation = [activation] * n_layers
if type(bias_scale) not in (list, tuple):
bias_scale = [bias_scale] * (n_layers-1)
if type(bias_fn) not in (list, tuple):
bias_fn = [bias_fn] * (n_layers-1)
if type(init_fn) not in (list, tuple):
init_fn = [init_fn] * n_layers
for dx in xrange(n_layers):
if dx < n_layers-1:
if type(bias_fn[dx]) is str or type(bias_fn[dx]) is unicode:
bias_fn[dx] = eval(bias_fn[dx])
if type(init_fn[dx]) is str or type(init_fn[dx]) is unicode:
init_fn[dx] = eval(init_fn[dx])
if type(activation[dx]) is str or type(activation[dx]) is unicode:
activation[dx] = eval(activation[dx])
self.scale = scale
self.n_layers = n_layers
self.sparsity = sparsity
self.activation = activation
self.n_hids = n_hids
self.bias_scale = bias_scale
self.bias_fn = bias_fn
self.init_fn = init_fn
self.weight_noise = weight_noise
self.activ_noise = activ_noise
self.profile = profile
self.dropout = dropout
assert rng is not None, "random number generator should not be empty!"
super(RecurrentMultiLayer, self).__init__(n_hids[0],
n_hids[-1],
rng,
name)
self.trng = RandomStreams(self.rng.randint(int(1e6)))
self.params = []
self._init_params()
def _init_params(self):
self.W_hhs = []
self.b_hhs = []
for dx in xrange(self.n_layers):
W_hh = self.init_fn[dx](self.n_hids[(dx-1)%self.n_layers],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_hhs.append(theano.shared(value=W_hh, name="W%d_%s" %
(dx,self.name)))
if dx > 0:
self.b_hhs.append(theano.shared(
self.bias_fn[dx-1](self.n_hids[dx],
self.bias_scale[dx-1],
self.rng),
name='b%d_%s' %(dx, self.name)))
self.params = [x for x in self.W_hhs] + [x for x in self.b_hhs]
self.params_grad_scale = [self.grad_scale for x in self.params]
if self.weight_noise:
self.nW_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_hhs]
self.nb_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.b_hhs]
self.noise_params = [x for x in self.nW_hhs] + [x for x in self.nb_hhs]
self.noise_params_shape_fn = [constant_shape(x.get_value().shape)
for x in self.noise_params]
def step_fprop(self,
state_below,
mask=None,
dpmask=None,
state_before=None,
use_noise=True,
no_noise_bias=False):
"""
Constructs the computational graph of a single step of the recurrent
layer.
:type state_below: theano variable
:param state_below: the input to the layer
:type mask: None or theano variable
:param mask: mask describing the length of each sequence in a
minibatch
:type state_before: theano variable
:param state_before: the previous value of the hidden state of the
layer
:type use_noise: bool
:param use_noise: flag saying if weight noise should be used in
computing the output of this layer
:type no_noise_bias: bool
:param no_noise_bias: flag saying if weight noise should be added to
the bias as well
"""
rval = []
if self.weight_noise and use_noise and self.noise_params:
W_hhs = [(x+y) for x, y in zip(self.W_hhs, self.nW_hhs)]
if not no_noise_bias:
b_hhs = [(x+y) for x, y in zip(self.b_hhs, self.nb_hss)]
else:
b_hhs = self.b_hhs
else:
W_hhs = self.W_hhs
b_hhs = self.b_hhs
preactiv = TT.dot(state_before, W_hhs[0]) +state_below
h = self.activation[0](preactiv)
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,:h.shape[1]]
dpidx = h.shape[1]
else:
h = h * dpmask[:h.shape[0]]
dpidx = h.shape[0]
else:
h = h * self.dropout
rval +=[h]
for dx in xrange(1, self.n_layers):
preactiv = TT.dot(h, W_hhs[dx]) + b_hhs[dx-1]
h = self.activation[dx](preactiv)
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,dpidx:dpidx+h.shape[1]]
dpidx = dpidx + h.shape[1]
else:
h = h * dpmask[dpidx:dpidx+h.shape[0]]
dpidx = dpidx + h.shape[0]
else:
h = h * self.dropout
rval += [h]
if mask is not None:
if h.ndim ==2 and mask.ndim==1:
mask = mask.dimshuffle(0,'x')
h = mask * h + (1-mask) * state_before
rval[-1] = h
return rval
def fprop(self,
state_below,
mask=None,
init_state=None,
n_steps=None,
batch_size=None,
use_noise=True,
truncate_gradient=-1,
no_noise_bias = False):
"""
Evaluates the forward through a recurrent layer
:type state_below: theano variable
:param state_below: the input of the recurrent layer
:type mask: None or theano variable
:param mask: mask describing the length of each sequence in a
minibatch
:type init_state: theano variable or None
:param init_state: initial state for the hidden layer
:type n_steps: None or int or theano scalar
:param n_steps: Number of steps the recurrent netowrk does
:type batch_size: int
:param batch_size: the size of the minibatch over which scan runs
:type use_noise: bool
:param use_noise: flag saying if weight noise should be used in
computing the output of this layer
:type truncate_gradient: int
:param truncate_gradient: If negative, no truncation is used,
otherwise truncated BPTT is used, where you go backwards only this
amount of steps
:type no_noise_bias: bool
:param no_noise_bias: flag saying if weight noise should be added to
the bias as well
"""
if theano.config.floatX=='float32':
floatX = numpy.float32
else:
floatX = numpy.float64
if n_steps is None:
n_steps = state_below.shape[0]
if batch_size and batch_size != 1:
n_steps = n_steps / batch_size
if batch_size is None and state_below.ndim == 3:
batch_size = state_below.shape[1]
if state_below.ndim == 2 and \
(not isinstance(batch_size,int) or batch_size > 1):
state_below = state_below.reshape((n_steps, batch_size, self.nin))
if not init_state:
if not isinstance(batch_size, int) or batch_size != 1:
init_state = TT.alloc(floatX(0), batch_size, self.nhid)
else:
init_state = TT.alloc(floatX(0), self.nhid)
if mask:
inps = [state_below, mask]
fn = lambda x,y,z : self.step_fprop(x,y,None, z, use_noise=use_noise,
no_noise_bias=no_noise_bias)
else:
inps = [state_below]
fn = lambda tx, ty: self.step_fprop(tx, None, None, ty,
use_noise=use_noise,
no_noise_bias=no_noise_bias)
if self.dropout < 1. and use_noise:
# build dropout mask outside scan
allhid = numpy.sum(self.n_hids)
shape = state_below.shape
if state_below.ndim == 3:
alldpmask = self.trng.binomial(
(n_steps, batch_size, allhid),
n = 1, p = self.dropout, dtype=state_below.dtype)
else:
alldpmask = self.trng.binomial(
(n_steps, allhid),
n = 1, p = self.dropout, dtype=state_below.dtype)
inps.append(alldpmask)
if mask:
fn = lambda x,y,z,u : self.step_fprop(x,y,z,u,use_noise=use_noise)
else:
fn = lambda tx, ty, tu: self.step_fprop(tx,None,ty,tu,
use_noise=use_noise)
rval, updates = theano.scan(fn,
sequences = inps,
outputs_info = [None]*(self.n_layers-1) +
[init_state],
name='layer_%s'%self.name,
profile=self.profile,
truncate_gradient = truncate_gradient,
n_steps = n_steps)
if not isinstance(rval,(list, tuple)):
rval = [rval]
new_h = rval[-1]
self.out = rval[-1]
self.rval = rval
self.updates =updates
return self.out
class RecurrentMultiLayerInp(RecurrentMultiLayer):
"""
Similar to the RecurrentMultiLayer, with the exception that the input is
fed into the top layer of the MLP (rather than being an input to the
MLP).
"""
def _init_params(self):
self.W_hhs = []
self.b_hhs = []
for dx in xrange(self.n_layers):
W_hh = self.init_fn[dx](self.n_hids[(dx-1)%self.n_layers],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_hhs.append(theano.shared(value=W_hh, name="W%d_%s" %
(dx,self.name)))
if dx < self.n_layers-1:
self.b_hhs.append(theano.shared(
self.bias_fn[dx](self.n_hids[dx],
self.bias_scale[dx],
self.rng),
name='b%d_%s' %(dx, self.name)))
self.params = [x for x in self.W_hhs] + [x for x in self.b_hhs]
self.params_grad_scale = [self.grad_scale for x in self.params]
self.restricted_params = [x for x in self.params]
if self.weight_noise:
self.nW_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_hhs]
self.nb_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.b_hhs]
self.noise_params = [x for x in self.nW_hhs] + [x for x in self.nb_hhs]
self.noise_params_shape_fn = [constant_shape(x.get_value().shape)
for x in self.noise_params]
def step_fprop(self,
state_below,
mask=None,
dpmask=None,
state_before=None,
no_noise_bias=False,
use_noise=True):
"""
See parent class
"""
rval = []
if self.weight_noise and use_noise and self.noise_params:
W_hhs = [(x+y) for x, y in zip(self.W_hhs,self.nW_hss)]
if not no_noise_bias:
b_hhs = [(x+y) for x, y in zip(self.b_hhs,self.nb_hhs)]
else:
b_hhs = self.b_hhs
else:
W_hhs = self.W_hhs
b_hhs = self.b_hhs
h = self.activation[0](TT.dot(state_before,
W_hhs[0])+b_hhs[0])
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,:h.shape[1]]
dpidx = h.shape[1]
else:
h = h * dpmask[:h.shape[0]]
dpidx = h.shape[0]
else:
h = h * self.dropout
rval += [h]
for dx in xrange(1, self.n_layers-1):
h = self.activation[dx](TT.dot(h,
W_hhs[dx])+b_hhs[dx])
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,dpidx:dpidx+h.shape[1]]
dpidx = dpidx + h.shape[1]
else:
h = h * dpmask[dpidx:dpidx+h.shape[0]]
dpidx = dpidx + h.shape[0]
else:
h = h * self.dropout
rval += [h]
h = self.activation[-1](TT.dot(h, W_hhs[-1]) + state_below)
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,dpidx:dpidx+h.shape[1]]
dpidx = dpidx + h.shape[1]
else:
h = h * dpmask[dpidx:dpidx+h.shape[0]]
dpidx = dpidx + h.shape[0]
else:
h = h * self.dropout
rval += [h]
if mask is not None:
if h.ndim ==2 and mask.ndim==1:
mask = mask.dimshuffle(0,'x')
h = mask * h + (1-mask) * state_before
rval[-1] = h
return rval
class RecurrentMultiLayerShortPath(RecurrentMultiLayer):
"""
A similar layer to RecurrentMultiLayer (the DT-RNN), with the difference
that we have shortcut connections in the MLP representing the transition
from previous hidden state to the next
"""
def _init_params(self):
self.W_hhs = []
self.b_hhs = []
self.W_shortp = []
for dx in xrange(self.n_layers):
W_hh = self.init_fn[dx](self.n_hids[(dx-1)%self.n_layers],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_hhs.append(theano.shared(value=W_hh, name="W%d_%s" %
(dx,self.name)))
if dx > 0:
W_shp = self.init_fn[dx](self.n_hids[self.n_layers-1],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_shortp.append(theano.shared(value=W_shp,
name='W_s%d_%s'%(dx,self.name)))
self.b_hhs.append(theano.shared(
self.bias_fn[dx-1](self.n_hids[dx],
self.bias_scale[dx-1],
self.rng),
name='b%d_%s' %(dx, self.name)))
self.params = [x for x in self.W_hhs] + [x for x in self.b_hhs] +\
[x for x in self.W_shortp]
self.params_grad_scale = [self.grad_scale for x in self.params]
self.restricted_params = [x for x in self.params]
if self.weight_noise:
self.nW_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_hhs]
self.nb_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.b_hhs]
self.nW_shortp = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_shortp]
self.noise_params = [x for x in self.nW_hhs] + [x for x in self.nb_hhs] + [x for x in self.nW_shortp]
self.noise_params_shape_fn = [constant_shape(x.get_value().shape) for x in self.noise_params]
def step_fprop(self,
state_below,
mask=None,
dpmask=None,
state_before=None,
no_noise_bias=False,
use_noise=True):
"""
See parent class
"""
rval = []
if self.weight_noise and use_noise and self.noise_params:
W_hhs = [(x+y) for x, y in zip(self.W_hhs,self.nW_hhs)]
if not no_noise_bias:
b_hhs = [(x+y) for x, y in zip(self.b_hhs,self.nb_hhs)]
else:
b_hhs = self.b_hhs
W_shp = [(x+y) for x, y in zip(self.W_shortp,self.nW_shortp)]
else:
W_hhs = self.W_hhs
b_hhs = self.b_hhs
W_shp = self.W_shortp
h = self.activation[0](TT.dot(state_before,
W_hhs[0])+state_below)
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,:h.shape[1]]
dpidx = h.shape[1]
else:
h = h * dpmask[:h.shape[0]]
dpidx = h.shape[0]
else:
h = h * self.dropout
rval += [h]
for dx in xrange(1, self.n_layers):
h = self.activation[dx](TT.dot(h,
W_hhs[dx])+
TT.dot(state_before,
W_shp[dx-1])+b_hhs[dx-1])
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,dpidx:dpidx+h.shape[1]]
dpidx = dpidx + h.shape[1]
else:
h = h * dpmask[dpidx:dpidx+h.shape[0]]
dpidx = dpidx + h.shape[0]
else:
h = h * self.dropout
rval += [h]
if mask is not None:
if h.ndim ==2 and mask.ndim==1:
mask = mask.dimshuffle(0,'x')
h = mask * h + (1-mask) * state_before
rval[-1] = h
return rval
class RecurrentMultiLayerShortPathInp(RecurrentMultiLayer):
"""
Similar to the RecurrentMultiLayerShortPath class, just that the input
is fed into the last layer of the MLP (similar to
RecurrentMultiLayerInp).
"""
def _init_params(self):
self.W_hhs = []
self.b_hhs = []
self.W_shortp = []
for dx in xrange(self.n_layers):
W_hh = self.init_fn[dx](self.n_hids[(dx-1)%self.n_layers],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_hhs.append(theano.shared(value=W_hh, name="W%d_%s" %
(dx,self.name)))
if dx > 0:
W_shp = self.init_fn[dx](self.n_hids[self.n_layers-1],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_shortp.append(theano.shared(value=W_shp,
name='W_s%d_%s'%(dx,self.name)))
if dx < self.n_layers-1:
self.b_hhs.append(theano.shared(
self.bias_fn[dx](self.n_hids[dx],
self.bias_scale[dx],
self.rng),
name='b%d_%s' %(dx, self.name)))
self.params = [x for x in self.W_hhs] + [x for x in self.b_hhs] +\
[x for x in self.W_shortp]
self.restricted_params = [x for x in self.params]
self.params_grad_scale = [self.grad_scale for x in self.params]
if self.weight_noise:
self.nW_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_hhs]
self.nb_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.b_hhs]
self.nW_shortp = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_shortp]
self.noise_params = [x for x in self.nW_hhs] + [x for x in self.nb_hhs] + [x for x in self.nW_shortp]
self.noise_params_shape_fn = [constant_shape(x.get_value().shape) for x in self.noise_params]
def step_fprop(self,
state_below,
mask=None,
dpmask=None,
state_before=None,
no_noise_bias=False,
use_noise=True):
"""
See parent class
"""
rval = []
if self.weight_noise and use_noise and self.noise_params:
W_hhs = [(x+y) for x, y in zip(self.W_hhs, self.nW_hhs)]
if not no_noise_bias:
b_hhs = [(x+y) for x, y in zip(self.b_hhs, self.nb_hhs)]
else:
b_hhs = self.b_hhs
W_shp = [(x+y) for x, y in zip(self.W_shortp, self.nW_shortp)]
else:
W_hhs = self.W_hhs
b_hhs = self.b_hhs
W_shp = self.W_shortp
h = self.activation[0](TT.dot(state_before,
W_hhs[0])+b_hhs[0])
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,:h.shape[1]]
dpidx = h.shape[1]
else:
h = h * dpmask[:h.shape[0]]
dpidx = h.shape[0]
else:
h = h * self.dropout
rval += [h]
for dx in xrange(1, self.n_layers-1):
h = self.activation[dx](TT.dot(h,
W_hhs[dx])+
TT.dot(state_before,
W_shp[dx-1])+b_hhs[dx])
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,dpidx:dpidx+h.shape[1]]
dpidx = dpidx + h.shape[1]
else:
h = h * dpmask[dpidx:dpidx+h.shape[0]]
dpidx = dpidx + h.shape[0]
else:
h = h * self.dropout
rval += [h]
h = self.activation[-1](TT.dot(h, W_hhs[-1]) +
TT.dot(state_before, W_shp[-1])+state_below)
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,:h.shape[1]]
dpidx = h.shape[1]
else:
h = h * dpmask[:h.shape[0]]
dpidx = h.shape[0]
else:
h = h * self.dropout
rval +=[h]
if mask is not None:
if h.ndim ==2 and mask.ndim==1:
mask = mask.dimshuffle(0,'x')
h = mask * h + (1-mask) * state_before
rval += [h]
return rval
class RecurrentMultiLayerShortPathInpAll(RecurrentMultiLayer):
"""
Similar to RecurrentMultiLayerShortPathInp class, just that the input is
fed to all layers of the MLP depicting the deep transition between h_tm1
to h_t.
"""
def _init_params(self):
self.W_hhs = []
self.W_shortp = []
for dx in xrange(self.n_layers):
W_hh = self.init_fn[dx](self.n_hids[(dx-1)%self.n_layers],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_hhs.append(theano.shared(value=W_hh, name="W%d_%s" %
(dx,self.name)))
if dx > 0:
W_shp = self.init_fn[dx](self.n_hids[self.n_layers-1],
self.n_hids[dx],
self.sparsity[dx],
self.scale[dx],
rng=self.rng)
self.W_shortp.append(theano.shared(value=W_shp,
name='W_s%d_%s'%(dx,self.name)))
self.params = [x for x in self.W_hhs] +\
[x for x in self.W_shortp]
self.params_grad_scale = [self.grad_scale for x in self.params]
self.restricted_params = [x for x in self.params]
if self.weight_noise:
self.nW_hhs = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_hhs]
self.nW_shortp = [theano.shared(x.get_value()*0, name='noise_'+x.name) for x in self.W_shortp]
self.noise_params = [x for x in self.nW_hhs] + [x for x in self.nW_shortp]
self.noise_params_shape_fn = [constant_shape(x.get_value().shape) for x in self.noise_params]
def step_fprop(self,
state_below,
mask=None,
dpmask=None,
state_before=None,
no_noise_bias=False,
use_noise=True):
"""
See parent class
"""
rval = []
if self.weight_noise and use_noise and self.noise_params:
W_hhs = [(x+y) for x, y in zip(self.W_hhs,self.nW_hhs)]
W_shp = [(x+y) for x, y in zip(self.W_shortp,self.nW_shortp)]
else:
W_hhs = self.W_hhs
W_shp = self.W_shortp
def slice_state_below(dx, sb = state_below):
st = 0
for p in xrange(dx):
st += self.n_hids[p]
ed = st + self.n_hids[dx]
if sb.ndim == 1:
return sb[st:ed]
else:
return sb[:,st:ed]
h = self.activation[0](TT.dot(state_before, W_hhs[0]) + slice_state_below(0))
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,:h.shape[1]]
dpidx = h.shape[1]
else:
h = h * dpmask[:h.shape[0]]
dpidx = h.shape[0]
else:
h = h * self.dropout
rval += [h]
for dx in xrange(1, self.n_layers):
h = self.activation[dx](TT.dot(h, W_hhs[dx]) +
TT.dot(state_before, W_shp[dx-1]) +
slice_state_below(dx))
if self.activ_noise and use_noise:
h = h + self.trng.normal(h.shape, avg=0, std=self.activ_noise, dtype=h.dtype)
if self.dropout < 1.:
if use_noise:
if h.ndim == 2:
h = h * dpmask[:,dpidx:dpidx+h.shape[1]]
dpidx = dpidx + h.shape[1]
else:
h = h * dpmask[dpidx:dpidx+h.shape[0]]
dpidx = dpidx + h.shape[0]
else:
h = h * self.dropout
rval += [h]
if mask is not None:
if h.ndim ==2 and mask.ndim==1:
mask = mask.dimshuffle(0,'x')
h = mask * h + (1-mask) * state_before
rval[-1] = h
return rval
| bsd-3-clause | -443,794,965,629,435,000 | 39.729665 | 113 | 0.472041 | false |
mufaddalq/cloudstack-datera-driver | api/test/integration/api/test/account/testCreateAccount.py | 1 | 2136 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import factory
import marvin
from marvin import cloudstackTestCase
from marvin.integration.lib.base import *
from marvin.integration.lib import utils
class AccountFactory(factory.Factory):
FACTORY_FOR = createAccount.createAccountCmd
firstname = 'firstname-'+random_gen()
lastname = 'lastname-'+random_gen()
email = factory.lazy_attribute(lambda e: '{0}.{1}@cloudstack.org'.format(e.firstname, e.lastname).lower())
class AdminAccountFactory(AccountFactory):
accounttype = 1
class UserAccountFactory(AccountFactory):
accounttype = 0
class TestCreateAccount(cloudstackTestCase):
def setUp(self):
self.apiClient = self.testClient.getApiClient()
self.userApiClient = self.testClient.getUserApiClient(account='test'+utils.random_gen(), 'ROOT')
def test_createAccountAsAdmin(self):
"""
creates an account for a user as admin
"""
Account.create(self.apiClient, services=None)
from marvin.cloudstackAPI.createAccount import createAccountCmd
self.assertEqual(True, False)
def test_createAccountAsUser(self):
"""
negative: create account as a user
"""
self.assertEqual(True, False)
def tearDown(self):
self.apiClient.close()
self.userApiClient.close()
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 2,810,052,616,260,023,300 | 34.016393 | 110 | 0.722378 | false |
lrq3000/pyFileFixity | pyFileFixity/lib/gooey/gui/build_spec_validator.py | 1 | 1627 | '''
Validates that the json has meaningful keys
'''
import itertools
a = {
'required' : [
{
'component': 'TextField',
'data': {
'display_name': 'filename',
'help_text': 'path to file you want to process',
'command_args': ['-f', '--infile']
}
},
{
'component': 'FileChooser',
'data': {
'display_name': 'Output Location',
'help_text': 'Where to save the file',
'command_args': ['-o', '--outfile']
}
}
],
'optional' : [
{
'component': 'RadioGroup',
'data': [
{
'display_name': 'Output Location',
'help_text': 'Where to save the file',
'command_args': ['-o', '--outfile']
}, {
'display_name': 'Output Location',
'help_text': 'Where to save the file',
'command_args': ['-o', '--outfile']
}
]
}
]
}
VALID_WIDGETS = (
'FileChooser',
'DirChooser',
'DateChooser',
'TextField',
'Dropdown',
'Counter',
'RadioGroup'
)
class MalformedBuildSpecException(Exception):
pass
def validate(json_string):
required = json_string.get('required')
optional = json_string.get('optional')
if not required or not optional:
raise MalformedBuildSpecException("All objects must be children of 'required,' or 'optional'")
objects = [item for key in json_string for item in json_string[key]]
for obj in objects:
if obj['component'] not in VALID_WIDGETS:
raise MalformedBuildSpecException("Invalid Component name: {0}".format(obj['component']))
if __name__ == '__main__':
validate(a)
| mit | 2,159,226,274,328,498,200 | 19.594937 | 98 | 0.559312 | false |
nblago/utils | src/utils/tns_query.py | 1 | 2954 | # -*- coding: utf-8 -*-
"""
Created on Wed Deb 14 14:21:41 2018
Script to query TNS with different parameters:
either for transients discovered between two different dates,
or a cone search radius around a given RA, DEC
@author: nadiablago
"""
from __future__ import print_function
try:
# For Python 3.0 and later
from urllib.request import urlopen
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen
import sys, os
#reload(sys)
#sys.setdefaultencoding('utf8')
import numpy as np
from astropy.table import Table
from astropy.coordinates import SkyCoord
from astropy.table import Column
import astropy.units as u
import sys
if sys.version_info[0] < 3:
from StringIO import StringIO
else:
from io import StringIO
def get_tns_date2date(date1, date2):
'''
Queries the TNS and obtains the targets reported between two dates.
It parses the coordinates and transporms them into decimals.
It writes a csv table with RA, DEC in degrees, which is directly ingestable into a postresql file.
date1: in the format of: YYYY-MM-DD
date2: in the format of: YYYY-MM-DD
'''
url = "https://wis-tns.weizmann.ac.il/search?&date_start%5Bdate%5D={0}&date_end%5Bdate%5D={1}&format=csv&num_page=500".format(date1, date2)
cont_url = urlopen(url)
cont = cont_url.read()
t = Table.read(StringIO(cont), format='csv')
coords = np.array([t["RA"], t["DEC"]]).T
c = SkyCoord(coords, frame='icrs', unit=(u.hourangle, u.deg))
radeg = Column(c.ra, name='RA')
decdeg = Column(c.dec, name='DEC')
t.remove_column("RA")
t.remove_column("DEC")
t.add_column(radeg, index=1)
t.add_column(decdeg, index=2)
t.write("tns_query_%s_%s.csv"%(date1, date2), format="csv")
return t
def get_tns_ra_dec(ra, dec, rad=15):
'''
Queries the TNS and obtains the targets reported for the specified RA, DEC position.
Provided that ASASSN targets are there, a 7 arcsec position error is expected.
By default we will use 10 arcsec.
ra: float
position in degrees
dec: float
position in degrees
rad: float, optional
Search radius in arcseconds.
'''
url = "https://wis-tns.weizmann.ac.il/search?&name=&ra={0}&decl={1}&radius={2}&coords_unit=arcsec&format=csv".format(ra, dec, rad)
cont_url = urlopen(url)
cont = cont_url.read()
t = Table.read(StringIO(cont), format='ascii.csv')
if len(t) > 0:
coords = np.array([t["RA"], t["DEC"]]).T
c = SkyCoord(coords, frame='icrs', unit=(u.hourangle, u.deg))
basecoord = SkyCoord(ra, dec, frame='icrs', unit=(u.deg, u.deg))
#In case there are several objects in the match radius, we select the closest one
dist = c.separation(basecoord)
closest = t[np.argmin(dist)]
else:
closest = None
return closest | mit | 9,166,153,378,906,148,000 | 28.55 | 143 | 0.646919 | false |
stanographer/plover | plover/gui_qt/main_window.py | 1 | 10334 |
from functools import partial
import json
from PyQt5.QtCore import QCoreApplication, Qt
from PyQt5.QtGui import QCursor, QIcon, QKeySequence
from PyQt5.QtWidgets import (
QMainWindow,
QMenu,
)
from plover import log
from plover.oslayer import wmctrl
from plover.registry import registry
from plover.resource import resource_filename
from plover.gui_qt.log_qt import NotificationHandler
from plover.gui_qt.main_window_ui import Ui_MainWindow
from plover.gui_qt.config_window import ConfigWindow
from plover.gui_qt.about_dialog import AboutDialog
from plover.gui_qt.trayicon import TrayIcon
from plover.gui_qt.utils import WindowState, find_menu_actions
class MainWindow(QMainWindow, Ui_MainWindow, WindowState):
ROLE = 'main'
def __init__(self, engine, use_qt_notifications):
super(MainWindow, self).__init__()
self.setupUi(self)
if hasattr(self, 'setUnifiedTitleAndToolBarOnMac'):
self.setUnifiedTitleAndToolBarOnMac(True)
self._engine = engine
self._active_dialogs = {}
self._dialog_class = {
'about' : AboutDialog,
'configuration' : ConfigWindow,
}
all_actions = find_menu_actions(self.menubar)
# Dictionaries.
self.dictionaries = self.scroll_area.widget()
self.dictionaries.add_translation.connect(self._add_translation)
self.dictionaries.setFocus()
edit_menu = all_actions['menu_Edit'].menu()
edit_menu.addAction(self.dictionaries.action_Undo)
edit_menu.addSeparator()
edit_menu.addMenu(self.dictionaries.menu_AddDictionaries)
edit_menu.addAction(self.dictionaries.action_EditDictionaries)
edit_menu.addAction(self.dictionaries.action_RemoveDictionaries)
edit_menu.addSeparator()
edit_menu.addAction(self.dictionaries.action_MoveDictionariesUp)
edit_menu.addAction(self.dictionaries.action_MoveDictionariesDown)
# Tray icon.
self._trayicon = TrayIcon()
self._trayicon.enable()
self._trayicon.clicked.connect(self._engine.toggle_output)
if use_qt_notifications:
handler = NotificationHandler()
handler.emitSignal.connect(self._trayicon.log)
log.add_handler(handler)
popup_menu = QMenu()
for action_name in (
'action_ToggleOutput',
'action_Reconnect',
'',
'menu_Tools',
'',
'action_Configure',
'',
'menu_Help',
'',
'action_Show',
'action_Quit',
):
if action_name:
popup_menu.addAction(all_actions[action_name])
else:
popup_menu.addSeparator()
self._trayicon.set_menu(popup_menu)
engine.signal_connect('machine_state_changed', self._trayicon.update_machine_state)
engine.signal_connect('quit', self.on_quit)
self.action_Quit.triggered.connect(engine.quit)
# Populate tools bar/menu.
tools_menu = all_actions['menu_Tools'].menu()
# Toolbar popup menu for selecting which tools are shown.
self.toolbar_menu = QMenu()
self.toolbar.setContextMenuPolicy(Qt.CustomContextMenu)
self.toolbar.customContextMenuRequested.connect(
lambda: self.toolbar_menu.popup(QCursor.pos())
)
for tool_plugin in registry.list_plugins('gui.qt.tool'):
tool = tool_plugin.obj
action_parameters = []
if tool.ICON is not None:
icon = tool.ICON
# Internal QT resources start with a `:`.
if not icon.startswith(':'):
icon = resource_filename(icon)
action_parameters.append(QIcon(icon))
action_parameters.append(tool.TITLE)
toolbar_action = None
for parent in (tools_menu, self.toolbar, self.toolbar_menu):
action = parent.addAction(*action_parameters)
action.setObjectName(tool_plugin.name)
if tool.__doc__ is not None:
action.setToolTip(tool.__doc__)
if tool.SHORTCUT is not None:
action.setShortcut(QKeySequence.fromString(tool.SHORTCUT))
if parent == self.toolbar_menu:
action.setCheckable(True)
action.setChecked(True)
assert toolbar_action is not None
action.toggled.connect(toolbar_action.setVisible)
else:
if parent == self.toolbar:
toolbar_action = action
action.triggered.connect(partial(self._activate_dialog,
tool_plugin.name,
args=()))
self._dialog_class[tool_plugin.name] = tool
engine.signal_connect('output_changed', self.on_output_changed)
# Machine.
self.machine_type.addItems(
_(plugin.name)
for plugin in registry.list_plugins('machine')
)
engine.signal_connect('config_changed', self.on_config_changed)
engine.signal_connect('machine_state_changed',
lambda machine, state:
self.machine_state.setText(_(state.capitalize()))
)
self.restore_state()
# Commands.
engine.signal_connect('add_translation', partial(self._add_translation, manage_windows=True))
engine.signal_connect('focus', self._focus)
engine.signal_connect('configure', partial(self._configure, manage_windows=True))
engine.signal_connect('lookup', partial(self._activate_dialog, 'lookup',
manage_windows=True))
# Load the configuration (but do not start the engine yet).
if not engine.load_config():
self.on_configure()
# Apply configuration settings.
config = self._engine.config
self.machine_type.setCurrentText(config['machine_type'])
self._configured = False
self.dictionaries.on_config_changed(config)
self.set_visible(not config['start_minimized'])
# Start the engine.
engine.start()
def set_visible(self, visible):
if visible:
self.show()
else:
if self._trayicon.is_enabled():
self.hide()
else:
self.showMinimized()
def _activate_dialog(self, name, args=(), manage_windows=False):
if manage_windows:
previous_window = wmctrl.GetForegroundWindow()
dialog = self._active_dialogs.get(name)
if dialog is None:
dialog_class = self._dialog_class[name]
dialog = self._active_dialogs[name] = dialog_class(self._engine, *args)
dialog.setWindowIcon(self.windowIcon())
def on_finished():
del self._active_dialogs[name]
dialog.deleteLater()
if manage_windows:
wmctrl.SetForegroundWindow(previous_window)
dialog.finished.connect(on_finished)
dialog.show()
dialog.activateWindow()
dialog.raise_()
def _add_translation(self, dictionary=None, manage_windows=False):
if not dictionary:
dictionary = None
self._activate_dialog('add_translation', args=(dictionary,),
manage_windows=manage_windows)
def _focus(self):
self.set_visible(True)
self.activateWindow()
self.raise_()
def _configure(self, manage_windows=False):
self._activate_dialog('configuration', manage_windows=manage_windows)
def _lookup(self, manage_windows=False):
self._activate_dialog('lookup', manage_windows=manage_windows)
def _restore_state(self, settings):
if settings.contains('hidden_toolbar_tools'):
hidden_toolbar_tools = json.loads(settings.value('hidden_toolbar_tools'))
for action in self.toolbar_menu.actions():
if action.objectName() in hidden_toolbar_tools:
action.setChecked(False)
def _save_state(self, settings):
hidden_toolbar_tools = set()
for action in self.toolbar_menu.actions():
if not action.isChecked():
hidden_toolbar_tools.add(action.objectName())
settings.setValue('hidden_toolbar_tools', json.dumps(list(sorted(hidden_toolbar_tools))))
def on_config_changed(self, config_update):
if 'machine_type' in config_update:
self.machine_type.setCurrentText(config_update['machine_type'])
if not self._configured:
self._configured = True
if config_update.get('show_suggestions_display', False):
self._activate_dialog('suggestions')
if config_update.get('show_stroke_display', False):
self._activate_dialog('paper_tape')
def on_machine_changed(self, machine_type):
self._engine.config = { 'machine_type': machine_type }
def on_output_changed(self, enabled):
self._trayicon.update_output(enabled)
self.output_enable.setChecked(enabled)
self.output_disable.setChecked(not enabled)
self.action_ToggleOutput.setChecked(enabled)
def on_toggle_output(self, enabled):
self._engine.output = enabled
def on_enable_output(self):
self.on_toggle_output(True)
def on_disable_output(self):
self.on_toggle_output(False)
def on_configure(self):
self._configure()
def on_reconnect(self):
self._engine.reset_machine()
def on_manage_dictionaries(self):
self._activate_dialog('dictionary_manager')
def on_about(self):
self._activate_dialog('about')
def on_quit(self):
for dialog in list(self._active_dialogs.values()):
dialog.close()
self.save_state()
self._trayicon.disable()
self.hide()
QCoreApplication.quit()
def on_show(self):
self._focus()
def closeEvent(self, event):
self.hide()
if not self._trayicon.is_enabled():
self._engine.quit()
event.ignore()
| gpl-2.0 | 4,994,969,487,663,809,000 | 37.996226 | 101 | 0.600832 | false |
Letractively/aha-gae | aha/wsgi/appinit.py | 1 | 3932 | # -*- coding: utf-8 -*-
# appinits.py
# Init functions for application
__author__ = 'Atsushi Shibata <[email protected]>'
__docformat__ = 'plaintext'
__licence__ = 'BSD'
__all__ = ['initConfig', 'initPlugins', 'run', 'get_app']
import os
import sys
import re
import logging
import wsgiref.handlers
def initConfig(basedir):
"""
Initialize config object
"""
# add the project's directory to the import path list.
sys.path = [basedir,
os.path.join(basedir, 'application'),
os.path.join(basedir, 'lib')]+sys.path
import aha
config = aha.Config()
# setup the templates location
config.application_dir = os.path.join(basedir, 'application')
config.messages_dir = os.path.join(config.application_dir, 'messages')
config.template_dirs = [os.path.join(config.application_dir, 'template'),
'plugin']
config.debug = False
config.useappstatus = False
if os.environ.get('SERVER_SOFTWARE', '').startswith('Dev'):
config.debug = True
return config
def initPlugins(basedir):
"""
Initialize the installed plugins
"""
plugin_root = os.path.join(basedir, 'plugin')
if os.path.exists(plugin_root):
plugins = os.listdir(plugin_root)
for plugin in plugins:
if not re.match('^__|^\.', plugin):
try:
exec('from plugin import %s' % plugin)
except ImportError, e:
from traceback import format_exc
logging.error('Unable to import %s' % (plugin))
logging.error(format_exc())
except SyntaxError, e:
from traceback import format_exc
logging.error('Unable to import name %s' % (plugin))
logging.error(format_exc())
_debugged_app = None
def run(debug = False, useappstatus = False, dispatcher = None):
"""
A function to run wsgi server
"""
from aha.wsgi.cwsgiapp import CustomHandler
app = get_app(debug, dispatcher)
if useappstatus:
from google.appengine.ext.appstats import recording
app = app = recording.appstats_wsgi_middleware(app)
from google.appengine.ext.webapp.util import run_wsgi_app
run_wsgi_app(app)
else:
CustomHandler().run(app)
def get_app(debug = False, dispatcher = None):
"""
A function to get wsgi server object.
"""
if debug:
# use our debug.utils with Jinja2 templates
from aha.wsgi.cwsgiapp import (CWSGIApplication, MainHandler)
from aha.wsgi.debug import utils
app = CWSGIApplication(
[(r'.*', MainHandler)],
debug = debug)
sys.modules['werkzeug.debug.utils'] = utils
import inspect
inspect.getsourcefile = inspect.getfile
patch_werkzeug()
from werkzeug import DebuggedApplication
global _debugged_app
if not _debugged_app:
_debugged_app = app = DebuggedApplication(app, evalex = True)
else:
app = _debugged_app
return app
else:
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp import WSGIApplication
from aha.wsgi.cwsgiapp import MainHandler
app = WSGIApplication(
[(r'.*', MainHandler)],
debug = debug)
return app
def patch_werkzeug():
"""
A function to patch werkzeug to make it work on app engine
"""
from werkzeug.debug.console import HTMLStringO
def seek(self, n, mode=0):
pass
def readline(self):
if len(self._buffer) == 0:
return ''
ret = self._buffer[0]
del self._buffer[0]
return ret
# Apply all other patches.
HTMLStringO.seek = seek
HTMLStringO.readline = readline | bsd-3-clause | -6,219,672,598,750,610,000 | 26.893617 | 77 | 0.591302 | false |
fishjord/gsutil | gslib/commands/requesterpays.py | 1 | 6627 | # -*- coding: utf-8 -*-
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of requesterpays configuration command for buckets."""
from __future__ import absolute_import
from gslib import metrics
from gslib.command import Command
from gslib.command_argument import CommandArgument
from gslib.cs_api_map import ApiSelector
from gslib.exception import CommandException
from gslib.exception import NO_URLS_MATCHED_TARGET
from gslib.help_provider import CreateHelpText
from gslib.third_party.storage_apitools import storage_v1_messages as apitools_messages
from gslib.util import NO_MAX
_SET_SYNOPSIS = """
gsutil requesterpays set [on|off] bucket_url...
"""
_GET_SYNOPSIS = """
gsutil requesterpays get bucket_url...
"""
_SYNOPSIS = _SET_SYNOPSIS + _GET_SYNOPSIS.lstrip('\n')
_SET_DESCRIPTION = """
<B>SET</B>
The "set" sub-command requires an additional sub-command, either "on" or
"off", which, respectively, will enable or disable requester pays for the
specified bucket(s).
"""
_GET_DESCRIPTION = """
<B>GET</B>
The "get" sub-command gets the requester pays configuration for a
bucket and displays whether or not it is enabled.
"""
_DESCRIPTION = """
The Requester Pays Configuration feature enables you to configure a Google
Cloud Storage bucket to indicate that the requester will pay all costs
related to accessing the bucket and its objects.
The gsutil requesterpays command has two sub-commands:
""" + _SET_DESCRIPTION + _GET_DESCRIPTION
_DETAILED_HELP_TEXT = CreateHelpText(_SYNOPSIS, _DESCRIPTION)
_get_help_text = CreateHelpText(_GET_SYNOPSIS, _GET_DESCRIPTION)
_set_help_text = CreateHelpText(_SET_SYNOPSIS, _SET_DESCRIPTION)
class RequesterPaysCommand(Command):
"""Implementation of gsutil requesterpays command."""
# Command specification. See base class for documentation.
command_spec = Command.CreateCommandSpec(
'requesterpays',
usage_synopsis=_SYNOPSIS,
min_args=2,
max_args=NO_MAX,
supported_sub_args='',
file_url_ok=False,
provider_url_ok=False,
urls_start_arg=2,
gs_api_support=[
# ApiSelector.XML, # TODO: Uncomment once boto changes are added.
ApiSelector.JSON],
gs_default_api=ApiSelector.JSON,
argparse_arguments={
'set': [
CommandArgument('mode', choices=['on', 'off']),
CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
],
'get': [
CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
]
}
)
# Help specification. See help_provider.py for documentation.
help_spec = Command.HelpSpec(
help_name='requesterpays',
help_name_aliases=[],
help_type='command_help',
help_one_line_summary=(
'Enable or disable requester pays for one or more buckets'),
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
)
def _CalculateUrlsStartArg(self):
if not self.args:
self.RaiseWrongNumberOfArgumentsException()
if self.args[0].lower() == 'set':
return 2
else:
return 1
def _SetRequesterPays(self):
"""Gets requesterpays configuration for a bucket."""
requesterpays_arg = self.args[0].lower()
if requesterpays_arg not in ('on', 'off'):
raise CommandException('Argument to "%s set" must be either [on|off]'
% (self.command_name))
url_args = self.args[1:]
if not url_args:
self.RaiseWrongNumberOfArgumentsException()
# Iterate over URLs, expanding wildcards and set the requesterpays
# configuration on each.
some_matched = False
for url_str in url_args:
bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
for blr in bucket_iter:
url = blr.storage_url
some_matched = True
bucket_metadata = apitools_messages.Bucket(
billing=apitools_messages.Bucket.BillingValue())
if requesterpays_arg == 'on':
self.logger.info('Enabling requester pays for %s...', url)
bucket_metadata.billing.requesterPays = True
else:
self.logger.info('Disabling requester pays for %s...', url)
bucket_metadata.billing.requesterPays = False
self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
provider=url.scheme, fields=['id'])
if not some_matched:
raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
def _GetRequesterPays(self):
"""Gets requesterpays configuration for one or more buckets."""
url_args = self.args
# Iterate over URLs, expanding wildcards and getting the requesterpays
# configuration on each.
some_matched = False
for url_str in url_args:
bucket_iter = self.GetBucketUrlIterFromArg(url_str,
bucket_fields=['billing'])
for blr in bucket_iter:
some_matched = True
if blr.root_object.billing and blr.root_object.billing.requesterPays:
print '%s: Enabled' % blr.url_string.rstrip('/')
else:
print '%s: Disabled' % blr.url_string.rstrip('/')
if not some_matched:
raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
def RunCommand(self):
"""Command entry point for the requesterpays command."""
action_subcommand = self.args.pop(0)
if action_subcommand == 'get':
func = self._GetRequesterPays
metrics.LogCommandParams(subcommands=[action_subcommand])
elif action_subcommand == 'set':
func = self._SetRequesterPays
requesterpays_arg = self.args[0].lower()
if requesterpays_arg in ('on', 'off'):
metrics.LogCommandParams(
subcommands=[action_subcommand, requesterpays_arg])
else:
raise CommandException((
'Invalid subcommand "%s" for the %s command.\n'
'See "gsutil help %s".') % (
action_subcommand, self.command_name, self.command_name))
func()
return 0
| apache-2.0 | -6,606,186,444,595,795,000 | 35.412088 | 87 | 0.671194 | false |
shea256/coinrpc | coinrpc/config.py | 1 | 1552 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
coinrpc
~~~~~
:copyright: (c) 2014 by Halfmoon Labs
:license: MIT, see LICENSE for more details.
"""
import os
from commontools import log
NAMECOIND_ENABLED = True
BITCOIND_ENABLED = False
DEBUG = True
#--------------------------------------------------
if NAMECOIND_ENABLED:
NAMECOIND_USE_HTTPS = True
try:
NAMECOIND_PORT = os.environ['NAMECOIND_PORT']
NAMECOIND_SERVER = os.environ['NAMECOIND_SERVER']
NAMECOIND_USER = os.environ['NAMECOIND_USER']
NAMECOIND_PASSWD = os.environ['NAMECOIND_PASSWD']
except:
#log.debug("Namecoind not configured")
#default settings with a public server
NAMECOIND_PORT = 8332
NAMECOIND_SERVER = '107.170.167.141'
NAMECOIND_USER = 'opennamesystem'
NAMECOIND_PASSWD = 'opennamesystem'
try:
NAMECOIND_WALLET_PASSPHRASE = os.environ['NAMECOIND_WALLET_PASSPHRASE']
except:
NAMECOIND_WALLET_PASSPHRASE = ''
#--------------------------------------------------
if BITCOIND_ENABLED:
BITCOIND_USE_HTTPS = True
try:
BITCOIND_PORT = os.environ['BITCOIND_PORT']
BITCOIND_SERVER = os.environ['BITCOIND_SERVER']
BITCOIND_USER = os.environ['BITCOIND_USER']
BITCOIND_PASSWD = os.environ['BITCOIND_PASSWD']
BITCOIND_WALLET_PASSPHRASE = os.environ['BITCOIND_WALLET_PASSPHRASE']
except:
#log.debug("Bitcoind not configured")
BITCOIND_PORT = 5005
BITCOIND_SERVER = BITCOIND_USER = BITCOIND_PASSWD = ''
try:
BITCOIND_WALLET_PASSPHRASE = os.environ['BITCOIND_WALLET_PASSPHRASE']
except:
BITCOIND_WALLET_PASSPHRASE = '' | mit | -7,431,524,886,803,951,000 | 24.048387 | 73 | 0.67268 | false |
rustychris/stomel | src/equilateral_paver.py | 1 | 5386 | # Make a grid with all equilateral triangles
# Currently only supports a rectangular domain, constant density,
# and either vertical or horizontal orientation
import trigrid
import numpy as np
class EquilateralPaver(trigrid.TriGrid):
def __init__(self,L,W,dens,orientation='horizontal',**kwargs):
super(EquilateralPaver,self).__init__(**kwargs)
self.L = L # x dimension
self.W = W # y dimension
self.dens = dens
self.orientation = orientation
if self.orientation == 'vertical':
self.L,self.W = self.W,self.L
self.create_grid()
if self.orientation == 'vertical':
self.L,self.W = self.W,self.L
self.points = self.points[:,::-1]
self.cells = self.cells[:,::-1]
self.renumber()
def create_grid(self):
# first, how many rows - here we assume orientation is horizontal,
# so the left and right sides are ragged.
cos30 = np.cos(30*np.pi/180.)
n_rows = self.W / (cos30 * self.dens)
# to make sure that the first and last points line up, we need an
# even number of rows of cells:
n_rows = 2 * int( (n_rows+1.0)/ 2 )
self.n_rows = n_rows
# Let the length L be fudge-able - as in we prefer perfectly equilateral triangles
# over a perfectly L-length grid. the width W can still be exact.
dens = self.W / (n_rows * cos30)
print "That will make n_rows=%d and adjusted edge length %f"%(n_rows,dens)
# this is the number of cells...
n_cols = int(self.L / dens)
self.n_cols = n_cols
# Stack them up
for r in range(n_rows+1):
y = self.W * float(r)/n_rows
odd = r%2
x_off = odd * 0.5*dens
for c in range(n_cols+1):
x = x_off + dens*float(c)
n = self.add_node( np.array([x,y]) )
if c > 0:
if r==0:
self.add_edge(n-1,n,cright=-1,marker=1)
elif r==n_rows:
self.add_edge(n-1,n,cleft=-1,marker=1)
else:
self.add_edge(n,n-1)
# HERE: need to finish adding in the markers and closed boundary code.
if r>0:
cright=-2
cleft=-2
marker = 0
if odd:
if c==0:
cleft=-1
marker=1
elif c==n_cols:
cright=-1
marker=1
self.add_edge(n-(n_cols+1),n,marker=marker,cleft=cleft,cright=cright)
if c<n_cols:
self.add_edge(n,n-n_cols)
else:
if c==0:
cleft=-1
marker=1
elif c==n_cols:
cright=-1
marker=1
self.add_edge(n-(n_cols+1),n,cleft=cleft,cright=cright,marker=marker)
if c>0:
self.add_edge(n,n-(n_cols+1)-1)
class RotatedEquilateralPaver(EquilateralPaver):
""" Create a ragged-edged grid where the triangles are rotated the given
angle, in radians, CCW from parallel to the x-axis.
"""
def __init__(self,L,W,dens,angle=0,**kwargs):
self.final_L = L
self.final_W = W
# find the L and W needed to still be big enough after we've rotated -
# adding a bit of extra to avoid funny edge effects:
Lprime = L*np.cos(angle) + W*np.sin(angle) + 4*dens
Wprime = W*np.cos(angle) + L*np.sin(angle) + 4*dens
super(RotatedEquilateralPaver,self).__init__(L=Lprime, W=Wprime, dens=dens, **kwargs)
self.rotate_grid(angle)
self.trim_grid()
self.renumber()
def rotate_grid(self,angle):
""" rotates the oversized grid and translates to get the origin in the right place.
"""
# translate to get centered on the extra bit we asked for:
self.points[:] -= 2*self.dens
# rotate
self.points[:] = trigrid.rot(angle,self.points)
# and get our origin to a nice place
self.points[:,0] += self.final_L * np.sin(angle)**2
self.points[:,1] -= self.final_L * np.sin(angle)*np.cos(angle)
def trim_grid(self):
""" with the oversized grid created, and the origin correctly placed, remove points
and associated edges/cells that fall outside the actual footprint
"""
to_delete = (self.points[:,0] < 0) | (self.points[:,0]>self.final_L) | \
(self.points[:,1] < 0) | (self.points[:,1]>self.final_W)
for n in np.nonzero(to_delete)[0]:
self.delete_node(n,remove_edges=True)
if __name__ == '__main__':
#ep = EquilateralPaver(10000.,5000.,500.,orientation='horizontal')
#ep.plot()
ep = RotatedEquilateralPaver(10000.,5000.,510.,angle=15*pi/180.)
cla()
ep.plot()
| gpl-2.0 | -8,226,103,744,813,507,000 | 35.890411 | 93 | 0.492573 | false |
Arabidopsis-Information-Portal/PMR_API | services/metabolite_api/metabolite.py | 1 | 1745 | # PMR WebServices
# Copyright (C) 2016 Manhoi Hur, Belyaeva, Irina
# This file is part of PMR WebServices API.
#
# PMR API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# PMR API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PMR API. If not, see <http://www.gnu.org/licenses/>.
"""
Metabolite Class
"""
# Class Constructor
class Metabolite(object):
"""Creates an instance of Metabolite class
by metabolite ID, metabolite name
:type mId: string
:param mId: metabolite ID
:type metaboliteName: string
:param metaboliteName: metabolite name
:rtype: Metabolite
:return: Returns Metabolite object
"""
def __init__(self, mId, metaboliteName):
self.mId = mId
self.metaboliteName = metaboliteName
# This function transform json object into an instance of Metabolite class
def object_hook_handler(parsed_dict):
"""Performs json string serialization into an Metabolite object
by metabolite ID, metabolite name
:type parsed_dict: dict
:param parsed_dict: json object as dictionary
:rtype: Metabolite
:return: Returns Metabolite object
"""
return Metabolite(mId=parsed_dict['mId'],
metaboliteName=parsed_dict['metaboliteName']
)
| gpl-2.0 | -512,958,847,505,018,560 | 31.924528 | 74 | 0.695702 | false |
jmvrbanac/barbican | functionaltests/api/v1/functional/test_secrets.py | 1 | 28571 | # Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import binascii
import json
import sys
import time
from testtools import testcase
from barbican.tests import utils
from functionaltests.api import base
from functionaltests.api.v1.behaviors import secret_behaviors
from functionaltests.api.v1.models import secret_models
# TODO(tdink) Move to a config file
secret_create_defaults_data = {
"name": "AES key",
"expiration": "2018-02-28T19:14:44.180394",
"algorithm": "aes",
"bit_length": 256,
"mode": "cbc",
"payload": "gF6+lLoF3ohA9aPRpt+6bQ==",
"payload_content_type": "application/octet-stream",
"payload_content_encoding": "base64",
}
secret_create_nones_data = {
"name": None,
"expiration": None,
"algorithm": None,
"bit_length": None,
"mode": None,
"payload": None,
"payload_content_type": None,
"payload_content_encoding": None,
}
secret_create_emptystrings_data = {
"name": '',
"expiration": '',
"algorithm": '',
"bit_length": '',
"mode": '',
"payload": '',
"payload_content_type": '',
"payload_content_encoding": '',
}
secret_create_two_phase_data = {
"name": "AES key",
"expiration": "2018-02-28T19:14:44.180394",
"algorithm": "aes",
"bit_length": 256,
"mode": "cbc",
}
@utils.parameterized_test_case
class SecretsTestCase(base.TestCase):
def setUp(self):
super(SecretsTestCase, self).setUp()
self.behaviors = secret_behaviors.SecretBehaviors(self.client)
def tearDown(self):
self.behaviors.delete_all_created_secrets()
super(SecretsTestCase, self).tearDown()
@testcase.attr('negative')
def test_secret_create_nones_content_type(self):
"""Create secret with valid content type but no payload."""
test_model = secret_models.SecretModel(**secret_create_nones_data)
overrides = {"payload_content_type": "application/octet-stream"}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@testcase.attr('positive')
def test_secret_create_defaults_check_content_types(self):
"""Check that set content-type attribute is retained in metadata."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
resp = self.behaviors.get_secret_metadata(secret_ref)
content_types = resp.model.content_types
self.assertIsNotNone(content_types)
self.assertIn('default', content_types)
self.assertEqual(content_types['default'],
test_model.payload_content_type)
@testcase.attr('positive')
def test_secret_create_nones(self):
"""Covers case of a POST request with no JSON data."""
test_model = secret_models.SecretModel(**secret_create_nones_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@testcase.attr('negative')
def test_secret_get_secret_doesnt_exist(self):
"""GET a non-existent secret.
Should return a 404.
"""
resp = self.behaviors.get_secret_metadata('not_a_uuid')
self.assertEqual(resp.status_code, 404)
@testcase.attr('negative')
def test_secret_delete_doesnt_exist(self):
"""DELETE a non-existent secret.
Should return a 404.
"""
resp = self.behaviors.delete_secret('not_a_uuid', expected_fail=True)
self.assertEqual(resp.status_code, 404)
@testcase.attr('negative')
def test_secret_get_invalid_mime_type(self):
"""Covers getting a secret with an invalid mime type."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
resp = self.behaviors.get_secret(secret_ref,
payload_content_type="i/m")
self.assertEqual(resp.status_code, 406)
@testcase.attr('negative')
def test_secret_create_defaults_expiration_passed(self):
"""Create a secret with an expiration that has already passed.
Should return a 400.
"""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"expiration": '2000-01-10T14:58:52.546795'}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@testcase.attr('negative')
def test_secret_create_emptystrings(self):
"""Secret create with empty Strings for all attributes.
Should return a 400.
"""
test_model = secret_models.SecretModel(
**secret_create_emptystrings_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@testcase.attr('negative')
def test_secret_create_defaults_invalid_content_type(self):
"""Create secret with an invalid content type in HTTP header.
Should return a 415.
"""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
headers = {"Content-Type": "crypto/boom"}
resp, secret_ref = self.behaviors.create_secret(test_model, headers)
self.assertEqual(resp.status_code, 415)
@testcase.attr('negative')
def test_secret_create_defaults_oversized_payload(self):
"""Create a secret that is larger than the max payload size.
Should return a 413 if the secret size is greater than the
maximum allowed size.
"""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"payload": str(self.oversized_payload)}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 413)
@testcase.attr('negative')
def test_secret_put_doesnt_exist(self):
"""PUT secret to a non-existent secret.
Should return 404.
"""
resp = self.behaviors.update_secret_payload(
secret_ref='not_a_uuid',
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload='testing putting to non-existent secret')
self.assertEqual(resp.status_code, 404)
@testcase.attr('negative')
def test_secret_put_defaults_data_already_exists(self):
"""PUT against a secret that already has encrypted data.
Should return 409.
"""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload='testing putting data in secret that already has data')
self.assertEqual(resp.status_code, 409)
@testcase.attr('negative')
def test_secret_put_two_phase_empty_payload(self):
"""Covers case of putting empty String to a secret.
Should return 400.
"""
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload='')
self.assertEqual(put_resp.status_code, 400)
@testcase.attr('negative')
def test_secret_put_two_phase_invalid_content_type(self):
"""PUT with an invalid content type. Should return 415.
Launchpad bug #1208601
- Updated in Barbican blueprint barbican-enforce-content-type
"""
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='crypto/boom',
payload_content_encoding='base64',
payload='invalid content type')
self.assertEqual(put_resp.status_code, 415)
@testcase.attr('negative')
def test_secret_put_two_phase_no_payload(self):
"""Covers case of putting null String to a secret.
Should return 400.
"""
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload=None)
self.assertEqual(put_resp.status_code, 400)
@testcase.attr('negative')
def test_secret_put_two_phase_w_oversized_binary_data_not_utf8(self):
"""PUT with an oversized binary string that isn't UTF-8.
Launchpad bug #1315498.
"""
oversized_payload = bytearray().zfill(self.max_payload_size + 1)
# put a value in the middle of the data that does not have a UTF-8
# code point. Using // to be python3-friendly.
oversized_payload[self.max_payload_size // 2] = b'\xb0'
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload=str(oversized_payload))
self.assertEqual(put_resp.status_code, 413)
@testcase.attr('negative')
def test_secret_put_two_phase_oversized_payload(self):
"""PUT with oversized payload should return 413.
Covers the case of putting secret data that is larger than the maximum
secret size allowed by Barbican. Beyond that it should return 413.
"""
oversized_payload = self.oversized_payload
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload=oversized_payload)
self.assertEqual(put_resp.status_code, 413)
@testcase.attr('positive')
def test_secret_put_two_phase_valid_binary_data_not_utf8(self):
"""A string with binary data that doesn't contain UTF-8 code points.
Launchpad bug #1315498.
"""
# put a value in the data that does not have a UTF-8 code point.
data = b'\xb0'
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload=str(data))
self.assertEqual(put_resp.status_code, 204)
@testcase.attr('positive')
def test_secret_put_two_phase_high_range_unicode_character(self):
"""Tests a high-range unicode character on a two-step PUT.
Launchpad bug #1315498
"""
data = u'\U0001F37A'
data = data.encode('utf-8')
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
put_resp = self.behaviors.update_secret_payload(
secret_ref=secret_ref,
payload_content_type='application/octet-stream',
payload_content_encoding='base64',
payload=data)
self.assertEqual(put_resp.status_code, 204)
def test_secret_get_nones_payload_with_a_octet_stream(self):
"""Tests getting a secret with octet-stream."""
test_model = secret_models.SecretModel(**secret_create_two_phase_data)
overrides = {'payload_content_type': 'application/octet-stream',
'payload_content_encoding': 'base64',
'payload': base64.b64encode('abcdef')}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
get_resp = self.behaviors.get_secret(
secret_ref,
payload_content_type=test_model.payload_content_type,
payload_content_encoding=test_model.payload_content_encoding)
self.assertIn(test_model.payload,
binascii.b2a_base64(get_resp.content))
def test_secret_create_defaults_bad_content_type_check_message(self):
"""Verifying the returned error message matches the expected form."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"payload_content_type": 'plain-text'}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
# first, ensure that the return code is 400
self.assertEqual(resp.status_code, 400)
resp_dict = json.loads(resp.content)
self.assertIn(
"Provided object does not match schema 'Secret': "
"payload_content_type is not one of ['text/plain', "
"'text/plain;charset=utf-8', 'text/plain; charset=utf-8', "
"'application/octet-stream'", resp_dict['description'])
self.assertIn("Bad Request", resp_dict['title'])
@testcase.attr('negative')
def test_secret_create_defaults_then_expire_then_check(self):
"""Covers case where you try to retrieve a secret that is expired.
This test creates a secret that will soon expire.
After it expires, check it and verify that it is no longer
a valid secret.
"""
# create a secret that expires in 5 seconds
timestamp = utils.create_timestamp_w_tz_and_offset(seconds=5)
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"expiration": timestamp}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
# now get the secret - will be still valid
get_resp = self.behaviors.get_secret_metadata(secret_ref)
self.assertEqual(get_resp.status_code, 200)
# now wait 10 seconds
time.sleep(10)
# now get the secret - should be invalid (expired)
resp = self.behaviors.get_secret_metadata(secret_ref)
self.assertEqual(resp.status_code, 404)
@utils.parameterized_dataset({
'alphanumeric': ['1f34ds'],
'punctuation': ['~!@#$%^&*()_+`-={}[]|:;<>,.?'],
'uuid': ['54262d9d-4bc7-4821-8df0-dc2ca8e112bb'],
'len_255': [base.TestCase.max_sized_field],
'empty': [''],
'null': [None]
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_name(self, name):
"""Covers cases of creating secrets with valid names."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"name": name}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@utils.parameterized_dataset({
'int': [400]
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_name(self, name):
"""Create secrets with various invalid names.
Should return 400.
"""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"name": name}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@utils.parameterized_dataset({
'invalid': ['invalid']
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_algorithms(self, algorithm):
"""Creates secrets with various valid algorithms."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"algorithm": algorithm}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@utils.parameterized_dataset({
'int': [400]
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_algorithms(self, algorithm):
"""Creates secrets with various invalid algorithms."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"algorithm": algorithm}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@utils.parameterized_dataset({
'512': [512],
'sixteen': [16],
'fifteen': [15],
'eight': [8],
'seven': [7],
'one': [1],
'none': [None]
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_bit_length(self, bit_length):
"""Covers cases of creating secrets with valid bit lengths."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"bit_length": bit_length}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@utils.parameterized_dataset({
'str_type': ['not-an-int'],
'empty': [''],
'blank': [' '],
'negative_maxint': [-sys.maxint],
'negative_one': [-1],
'zero': [0]
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_bit_length(self, bit_length):
"""Covers cases of creating secrets with invalid bit lengths."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"bit_length": bit_length}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@utils.parameterized_dataset({
'cbc': ['cbc'],
'unknown_positive': ['unknown']
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_mode(self, mode):
"""Covers cases of creating secrets with valid modes."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"mode": mode}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@utils.parameterized_dataset({
'zero': [0],
'oversized_string': [base.TestCase.oversized_field],
'int': [400]
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_mode(self, mode):
"""Covers cases of creating secrets with invalid modes."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"mode": mode}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@utils.parameterized_dataset({
'text_content_type_none_encoding': {
'payload_content_type': 'text/plain',
'payload_content_encoding': None},
'utf8_text_content_type_none_encoding': {
'payload_content_type': 'text/plain; charset=utf-8',
'payload_content_encoding': None},
'no_space_utf8_text_content_type_none_encoding': {
'payload_content_type': 'text/plain;charset=utf-8',
'payload_content_encoding': None},
'octet_content_type_base64_encoding': {
'payload_content_type': 'application/octet-stream',
'payload_content_encoding': 'base64'}
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_types_and_encoding(self, **kwargs):
"""Creates secrets with various content types and encodings."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
test_model.override_values(**kwargs)
payload_content_encoding = test_model.payload_content_encoding
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
get_resp = self.behaviors.get_secret(
secret_ref,
payload_content_type=test_model.payload_content_type,
payload_content_encoding=payload_content_encoding)
if payload_content_encoding == 'base64':
self.assertIn(test_model.payload,
binascii.b2a_base64(get_resp.content))
else:
self.assertIn(test_model.payload, get_resp.content)
@utils.parameterized_dataset({
'empty_content_type_and_encoding': {
'payload_content_type': '',
'payload_content_encoding': ''},
'none_content_type_and_encoding': {
'payload_content_type': None,
'payload_content_encoding': None},
'large_string_content_type_and_encoding': {
'payload_content_type': base.TestCase.oversized_field,
'payload_content_encoding': base.TestCase.oversized_field},
'int_content_type_and_encoding': {
'payload_content_type': 123,
'payload_content_encoding': 123},
'none_content_type_base64_content_encoding': {
'payload_content_type': None,
'payload_content_encoding': 'base64'},
'text_content_type_none_content_encoding': {
'payload_content_type': 'text/plain',
'payload_content_encoding': ''},
'text_no_subtype_content_type_none_content_encoding': {
'payload_content_type': 'text',
'payload_content_encoding': None},
'text_slash_no_subtype_content_type_none_content_encoding': {
'payload_content_type': 'text/',
'payload_content_encoding': None},
'text_content_type_empty_content_encoding': {
'payload_content_type': 'text/plain',
'payload_content_encoding': ' '},
'text_content_type_spaces_content_encoding': {
'payload_content_type': 'text/plain',
'payload_content_encoding': ' '},
'text_content_type_base64_content_encoding': {
'payload_content_type': 'text/plain',
'payload_content_encoding': 'base64'},
'text_and_utf88_content_type_none_content_encoding': {
'payload_content_type': 'text/plain; charset=utf-88',
'payload_content_encoding': None},
'invalid_content_type_base64_content_encoding': {
'payload_content_type': 'invalid',
'payload_content_encoding': 'base64'},
'invalid_content_type_none_content_encoding': {
'payload_content_type': 'invalid',
'payload_content_encoding': None},
'octet_content_type_invalid_content_encoding': {
'payload_content_type': 'application/octet-stream',
'payload_content_encoding': 'invalid'},
'text_content_type_invalid_content_encoding': {
'payload_content_type': 'text/plain',
'payload_content_encoding': 'invalid'},
'none_content_type_invalid_content_encoding': {
'payload_content_type': None,
'payload_content_encoding': 'invalid'},
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_types_and_encoding(self, **kwargs):
"""Creating secrets with invalid payload types and encodings."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
test_model.override_values(**kwargs)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@utils.parameterized_dataset({
'max_payload_string': [base.TestCase.max_sized_payload]
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_payload(self, payload):
"""Create secrets with a various valid payloads."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"payload": payload}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@utils.parameterized_dataset({
'empty': [''],
'array': [['boom']],
'int': [123],
'none': [None]
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_payload(self, payload):
"""Covers creating secrets with various invalid payloads."""
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"payload_content_type": "application/octet-stream",
"payload_content_encoding": "base64",
"payload": payload}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
@utils.parameterized_dataset({
'negative_five_long_expire': {
'timezone': '-05:00',
'days': 5},
'positive_five_long_expire': {
'timezone': '+05:00',
'days': 5},
'negative_one_short_expire': {
'timezone': '-01',
'days': 1},
'positive_one_short_expire': {
'timezone': '+01',
'days': 1}
})
@testcase.attr('positive')
def test_secret_create_defaults_valid_expiration(self, **kwargs):
"""Create secrets with a various valid expiration data."""
timestamp = utils.create_timestamp_w_tz_and_offset(**kwargs)
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"expiration": timestamp}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 201)
@utils.parameterized_dataset({
'malformed_timezone': {
'timezone': '-5:00',
'days': 0}
})
@testcase.attr('negative')
def test_secret_create_defaults_invalid_expiration(self, **kwargs):
"""Create secrets with various invalid expiration data."""
timestamp = utils.create_timestamp_w_tz_and_offset(**kwargs)
test_model = secret_models.SecretModel(**secret_create_defaults_data)
overrides = {"expiration": timestamp}
test_model.override_values(**overrides)
resp, secret_ref = self.behaviors.create_secret(test_model)
self.assertEqual(resp.status_code, 400)
| apache-2.0 | -3,509,866,125,628,700,000 | 36.892573 | 79 | 0.632249 | false |
hugobranquinho/ines | ines/middlewares/gzipper.py | 1 | 3689 | # -*- coding: utf-8 -*-
from io import BytesIO
from gzip import compress as gzip_compress
from pyramid.decorator import reify
from ines.middlewares import Middleware
class Gzip(Middleware):
name = 'gzip'
def __init__(self, config, application, **settings):
super(Gzip, self).__init__(config, application, **settings)
self.compress_level = int(settings.get('compress_level') or 9)
self.content_types = (
settings.get('content_types', '').split()
or ['text/', 'application/', 'image/svg'])
self.all_content_types = '*' in self.content_types
def __call__(self, environ, start_response):
return GzipMiddlewareSession(self)(environ, start_response)
class GzipMiddlewareSession(object):
def __init__(self, middleware):
self.middleware = middleware
self.compressible = False
self.status = None
self.headers = []
self.exc_info = None
def __call__(self, environ, start_response):
if 'gzip' not in environ.get('HTTP_ACCEPT_ENCODING', ''):
return self.middleware.application(environ, start_response)
self.start_response = start_response
app_iter = self.middleware.application(environ, self.gzip_start_response)
if app_iter is not None and self.compressible:
binary = gzip_compress(b''.join(app_iter), self.middleware.compress_level)
if hasattr(app_iter, 'close'):
app_iter.close()
self.remove_header('content-length')
self.headers.append(('content-encoding', 'gzip'))
self.set_header('content-length', len(binary))
start_response(self.status, self.headers, self.exc_info)
return [binary]
return app_iter
@reify
def buffer(self):
return BytesIO()
def remove_header(self, name):
i = len(self.headers)
name = name.lower()
for key, value in reversed(self.headers):
i -= 1
if key == name:
self.headers.pop(i)
def get_header(self, name):
name = name.lower()
for key, value in self.headers:
if key == name:
return value
def in_headers(self, name):
name = name.lower()
for key, value in self.headers:
if key == name:
return True
else:
return False
def set_header(self, name, new_value):
name = name.lower()
for i, (key, value) in enumerate(self.headers):
if key == name:
self.headers[i] = (name, str(new_value))
break
else:
self.headers.append((name, str(new_value)))
def gzip_start_response(self, status, headers, exc_info=None):
self.headers = [(key.lower(), value) for key, value in headers]
if not self.in_headers('content-encoding'):
content_type = self.get_header('content-type')
if content_type and 'zip' not in content_type:
content_type = content_type.split(';')[0]
if self.middleware.all_content_types:
self.compressible = True
else:
for start_content_type in self.middleware.content_types:
if content_type.startswith(start_content_type):
self.compressible = True
break
if self.compressible:
self.status = status
self.exc_info = exc_info
return self.buffer.write
return self.start_response(status, headers, exc_info)
| mit | 4,223,264,974,435,183,600 | 32.234234 | 86 | 0.564381 | false |
marianotepper/csnmf | csnmf/third_party/mrnmf/nmf_process_algorithms.py | 1 | 4252 | """
Copyright (c) 2014, Austin R. Benson, David F. Gleich,
Purdue University, and Stanford University.
All rights reserved.
This file is part of MRNMF and is under the BSD 2-Clause License,
which can be found at http://opensource.org/licenses/BSD-2-Clause
Copyright (c) 2015, Mariano Tepper,
Duke University.
All rights reserved.
Mariano Tepper made the following changes to this file:
- modified names and line lengths to adhere more closely to PEP8
- changed docstrings
- some numpy operations are more numpy-ish now.
- small edits, refactoring, and cleanups
- removed some code
"""
import numpy as np
from scipy.optimize import nnls
def spa(data, r, colnorms):
"""
Successive projection algorithm (SPA) for NMF. This algorithm
computes the column indices.
:param data: The data matrix.
:type data: numpy.ndarray
:param r: The target separation rank.
:type r: int
:param colnorms: The column L1 norms.
:type colnorms: numpy.ndarray
:return: A list of r columns chosen by SPA.
:rtype: list of int
"""
idx = np.nonzero(colnorms)
x = np.copy(data)
x[:, idx] /= colnorms[idx]
cols = []
m, n = x.shape
for _ in xrange(r):
col_norms = np.linalg.norm(x, ord=2, axis=0)
col_norms[cols] = -1
col_ind = np.argmax(col_norms)
cols.append(col_ind)
col = np.atleast_2d(x[:, col_ind]) # col is a row vector
x = np.dot(np.eye(m) - np.dot(col.T, col) / col_norms[col_ind], x)
return cols
def xray(x, r):
"""
X-ray algorithm for NMF. This algorithm computes the column
indices.
:param x: The data matrix.
:type x: numpy.ndarray
:param r: The target separation rank.
:type r: int
:return: A list of r columns chosen by X-ray.
:rtype: list of int
"""
cols = []
R = np.copy(x)
while len(cols) < r:
# Loop until we choose a column that has not been selected.
while True:
p = np.random.random((1, x.shape[0]))
scores = np.linalg.norm(np.dot(R.T, x), ord=2, axis=0)
scores /= np.squeeze(np.dot(p, x))
scores[cols] = -1 # IMPORTANT
best_col = np.argmax(scores)
if best_col in cols:
# Re-try
continue
else:
cols.append(best_col)
H, rel_res = nnls_frob(x, cols)
R = x - np.dot(x[:, cols], H)
break
return cols
def nnls_frob(x, cols):
"""
Compute H, the coefficient matrix, by nonnegative least squares
to minimize the Frobenius norm. Given the data matrix X and the
columns cols, H is
.. math:: \arg\min_{Y \ge 0} \| X - X(:, cols) H \|_F.
:param X: The data matrix.
:type X: numpy.ndarray
:param cols: The column indices.
:type cols: list of int
:return: The matrix H and the relative residual.
"""
ncols = x.shape[1]
x_sel = x[:, cols]
H = np.zeros((len(cols), ncols))
for i in xrange(ncols):
sol, res = nnls(x_sel, x[:, i])
H[:, i] = sol
rel_res = np.linalg.norm(x - np.dot(x_sel, H), 'fro')
rel_res /= np.linalg.norm(x, 'fro')
return H, rel_res
def select_columns(data, alg, r, colnorms=None):
""" Compute an approximate separable NMF of the matrix data. By
compute, we mean choose r columns and a best fitting coefficient
matrix H. The r columns are selected by the 'alg' option, which
is one of 'SPA' or 'XRAY'. The coefficient matrix H is the
one that produces the smallest Frobenius norm error.
:param data: The data matrix.
:type data: numpy.ndarray
:param alg: Choice of algorithm for computing the columns. One of
'SPA' or 'XRAY'.
:type alg: string
:param r: The target separation rank.
:type r: int
:param colnorms: The column L1 norms, needed only by SPA.
:type colnorms: numpy.ndarray
:return The selected columns, the matrix H, and the relative residual.
"""
if alg == 'XRAY':
cols = xray(data, r)
elif alg == 'SPA':
cols = spa(data, r, colnorms)
else:
raise Exception('Unknown algorithm: {0}'.format(alg))
return cols
| bsd-2-clause | 8,538,298,718,568,246,000 | 30.264706 | 74 | 0.601834 | false |
timeartist/flask_chutes | flask_chutes/__init__.py | 1 | 3171 | from flask_sockets import Sockets
from flask import Flask
from redis import StrictRedis
from json import loads, dumps
from multiprocessing import Process
from gevent import sleep, Greenlet
from geventwebsocket.exceptions import WebSocketError
processes = {}
def enable_chutes(app, endpoint='/chutes'):
'''
Factory method to add the chutes socket endpoint to your existing Flask app
Input:
app - Flask App Object to be extended
Returns:
None
'''
assert isinstance(app, Flask)
connection = app.config['REDIS_CONN']
r = StrictRedis(**connection)
sockets = Sockets(app)
@sockets.route(endpoint)
def _chutes(ws):
try:
i = 0
redis_key = None
channel = None
while True:
if i == 0:
msg = ws.receive()
print msg
sign_on = loads(msg)
channel = sign_on['channel']
if channel not in processes:
processes[channel] = []
redis_key = 'c:%s'%channel
i += 1
ps = r.pubsub()
ps.subscribe(redis_key)
process = Greenlet(socket_sentinel_publish, *(ws, ps))
process.start()
processes[channel].append(process)
process = Greenlet(socket_sentinel_client_listener, *(ws, r, redis_key))
process.start()
processes[channel].append(process)
resp = r.blpop(redis_key, 30)
print resp
if ws.closed:
print 'Websocket Connection Closed by Client'
break
if resp and isinstance(resp[-1], (str, unicode)):
print 'WS:', channel, '->', resp[-1]
ws.send(resp[-1])
else:
ws.send(dumps({'data':None}))
except WebSocketError, e:
_processes = processes[channel]
for process in _processes:
process.kill()
class Chute(object):
def __init__(self, channel, **kwargs):
self.r = StrictRedis(**kwargs)
self.channel = channel
self._r_key = 'c:%s'%channel
def send(self, data, timeout=90):
self.r.lpush(self._r_key, data)
self.r.expire(self._r_key, timeout)
def publish(self, data):
self.r.publish(self._r_key, data)
def listen(self):
ps = self.r.pubsub()
ps.subscribe(channel)
for item in ps.listen():
yield item
def socket_sentinel_publish(ws, ps):
for msg in ps.listen():
print msg
if msg:
ws.send(msg['data'])
def socket_sentinel_client_listener(ws, r, channel):
while True:
msg = ws.receive()
print msg
r.publish(channel, msg)
| mit | -6,333,327,705,612,059,000 | 28.091743 | 92 | 0.48029 | false |
pokermania/pokerengine | tests/test_pokerchips.py | 1 | 10917 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006 - 2010 Loic Dachary <[email protected]>
# Copyright (C) 2006 Mekensleep
#
# Mekensleep
# 26 rue des rosiers
# 75004 Paris
# [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Authors:
# Pierre-Andre (05/2006)
# Loic Dachary <[email protected]>
#
import unittest, sys
from os import path
TESTS_PATH = path.dirname(path.realpath(__file__))
sys.path.insert(0, path.join(TESTS_PATH, ".."))
import os.path
import types
from pokerengine import pokerchips
class PokerChipsTestCase(unittest.TestCase):
# -----------------------------------------------------------------------------------------------------
def setUp(self):
pass
# -----------------------------------------------------------------------------------------------------
def tearDown(self):
pass
# -----------------------------------------------------------------------------------------------------
def testInit(self):
"""Test Poker Chips : Initialisation"""
values = [5, 10, 20]
pokerchip = pokerchips.PokerChips(values)
self.failUnlessEqual(pokerchip, pokerchips.PokerChips(values, 0))
self.failUnlessEqual(pokerchip.toint(), 0)
pokerchip1 = pokerchips.PokerChips(values, 153)
pokerchip2 = pokerchips.PokerChips(values, pokerchip1)
self.failUnlessEqual(pokerchip1, pokerchip2)
self.failUnlessEqual(pokerchip2.toint(), 153)
pokerchip1 = pokerchips.PokerChips(values, [10, 5, 1])
self.failUnlessEqual(pokerchip1.toint(), 120)
# -----------------------------------------------------------------------------------------------------
def testOperatorEqu(self):
"""Test Poker Chips : Equality"""
values = [5, 10, 20]
pokerchip1 = pokerchips.PokerChips(values, 153)
pokerchip2 = pokerchips.PokerChips(values, 153)
pokerchip3 = pokerchips.PokerChips(values, 154)
self.failUnlessEqual(pokerchip1, pokerchip2)
self.failIfEqual(pokerchip1, pokerchip3)
self.failUnlessEqual(pokerchip1 != pokerchip2, False)
self.failUnlessEqual(pokerchip1 != pokerchip3, True)
self.failUnlessEqual(pokerchip1 != 153, True)
# -----------------------------------------------------------------------------------------------------
def testToInt(self):
"""Test Poker Chips : To integer"""
values = [5, 10, 20]
pokerchip = pokerchips.PokerChips(values)
self.failUnlessEqual(pokerchip.toint(), 0)
pokerchip = pokerchips.PokerChips(values, 153)
self.failUnlessEqual(pokerchip.toint(), 153)
pokerchip = pokerchips.PokerChips(values, [1, 1, 1])
self.failUnlessEqual(pokerchip.toint(), 35)
# -----------------------------------------------------------------------------------------------------
def testAdd(self):
"""Test Poker Chips : Addition"""
values = [5, 10, 20]
pokerchip1 = pokerchips.PokerChips(values, 155)
pokerchip2 = pokerchips.PokerChips(values, 223)
pokerchip1.add(pokerchip2)
self.failUnlessEqual(pokerchip1.toint(), 155 + 223)
# -----------------------------------------------------------------------------------------------------
def testSub(self):
"""Test Poker Chips : Substraction"""
values = [5, 10, 20]
pokerchip1 = pokerchips.PokerChips(values, [3, 2, 1])
pokerchip2 = pokerchips.PokerChips(values, [1, 1, 1])
pokerchip1.subtract(pokerchip2)
self.failUnlessEqual(pokerchip1, pokerchips.PokerChips(values, [2, 1, 0]))
pokerchip1 = pokerchips.PokerChips(values, 155)
pokerchip1.subtract(130)
self.failUnlessEqual(pokerchip1.toint(), 25)
pokerchip1 = pokerchips.PokerChips(values, 155)
pokerchip1.subtract(160)
self.failUnlessEqual(pokerchip1.toint(), 0)
# -----------------------------------------------------------------------------------------------------
def testToString(self):
"""Test Poker Chips : String representation"""
self.failUnlessEqual(pokerchips.PokerChips.tostring(0), '0')
self.failUnlessEqual(pokerchips.PokerChips.tostring(1), '1')
# -----------------------------------------------------------------------------------------------------
def testInt2Chips(self):
"""Test Poker Chips : From integer"""
values = [5, 10, 20]
self.failUnlessEqual(pokerchips.PokerChips.int2chips([], pokerchips.INT2CHIPS_FACTOR, 15), ([], 15))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 0), ([0, 0, 0], 0))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 5), ([1, 0, 0], 0))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 7), ([1, 0, 0], 2))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 15), ([3, 0, 0], 0))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 50), ([8, 1, 0], 0))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 90), ([8, 3, 1], 0))
self.failUnlessEqual(pokerchips.PokerChips.int2chips(values, pokerchips.INT2CHIPS_FACTOR, 93), ([6, 4, 1], 3))
# -----------------------------------------------------------------------------------------------------
def testToList(self):
"""Test Poker Chips : To list"""
values = [5, 10, 20]
self.failUnlessEqual(pokerchips.PokerChips(values, 0).tolist(), [])
self.failUnlessEqual(pokerchips.PokerChips(values, 3).tolist(), [1,3])
self.failUnlessEqual(pokerchips.PokerChips(values, 5).tolist(), [5, 1])
self.failUnlessEqual(pokerchips.PokerChips(values, 7).tolist(), [1, 2 ,5, 1])
self.failUnlessEqual(pokerchips.PokerChips(values, 15).tolist(), [5, 3])
self.failUnlessEqual(pokerchips.PokerChips(values, 50).tolist(), [5, 8, 10, 1])
self.failUnlessEqual(pokerchips.PokerChips(values, 90).tolist(), [5, 8, 10, 3, 20, 1])
self.failUnlessEqual(pokerchips.PokerChips(values, 93).tolist(), [1, 3, 5, 6, 10, 4, 20, 1])
values = [1, 2, 4]
self.failUnlessEqual(pokerchips.PokerChips(values, 7).tolist(), [1, 7])
# -----------------------------------------------------------------------------------------------------
def testlimitChips(self):
"""Test Poker Chips : Limit chips"""
values = [5, 10, 20]
chips = pokerchips.PokerChips(values, [pokerchips.MAX_CHIPS_PER_STACK + 2, 0, 0])
self.failUnlessEqual(chips.tolist(), [5, pokerchips.MAX_CHIPS_PER_STACK, 10, 1])
chips = pokerchips.PokerChips(values, [pokerchips.MAX_CHIPS_PER_STACK + 3, 0, 0])
self.failUnlessEqual(chips.tolist(), [5, pokerchips.MAX_CHIPS_PER_STACK + 1, 10, 1])
chips = pokerchips.PokerChips(values, [pokerchips.MAX_CHIPS_PER_STACK + 4, pokerchips.MAX_CHIPS_PER_STACK, pokerchips.MAX_CHIPS_PER_STACK])
self.failUnlessEqual(chips.tolist(), [5, pokerchips.MAX_CHIPS_PER_STACK, 10, pokerchips.MAX_CHIPS_PER_STACK, 20, pokerchips.MAX_CHIPS_PER_STACK +1])
# -----------------------------------------------------------------------------------------------------
def testStrOperator(self):
"""Test Poker Chips : String representation"""
values = [5, 10, 20]
pokerchip = pokerchips.PokerChips(values, 93)
self.failUnlessEqual(str(pokerchip), 'PokerChips(%s) = %d (-%d)' %([6, 4, 1], 93, 3))
# -----------------------------------------------------------------------------------------------------
def testReprOperator(self):
"""Test Poker Chips : Representation"""
values = [5, 10, 20]
pokerchip = pokerchips.PokerChips(values, 93)
self.failUnlessEqual(repr(pokerchip), '%s(%s)' %('PokerChips', [6, 4, 1]))
# -----------------------------------------------------------------------------------------------------
def testCopy(self):
"""Test Poker Chips : Copy"""
values = [5, 10, 20]
pokerchip1 = pokerchips.PokerChips(values, 93)
pokerchip2 = pokerchip1.copy()
self.failUnlessEqual(pokerchip1, pokerchip2)
self.failUnlessEqual(pokerchip2.toint(), 93)
pokerchip1.add(7)
self.failIfEqual(pokerchip1, pokerchip2)
self.failUnlessEqual(pokerchip1.toint(), 100)
self.failUnlessEqual(pokerchip2.toint(), 93)
# -----------------------------------------------------------------------------------------------------
def GetTestSuite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(PokerChipsTestCase))
# Comment out above and use line below this when you wish to run just
# one test by itself (changing prefix as needed).
# suite.addTest(unittest.makeSuite(PokerChipsTestCase, prefix = "test2"))
return suite
# -----------------------------------------------------------------------------------------------------
def GetTestedModule():
return pokerchips
# -----------------------------------------------------------------------------------------------------
def run():
return unittest.TextTestRunner().run(GetTestSuite())
# -----------------------------------------------------------------------------------------------------
if __name__ == '__main__':
if run().wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
# Interpreted by emacs
# Local Variables:
# compile-command: "( cd .. ; ./config.status tests/test-pokerchips.py ) ; ( cd ../tests ; make COVERAGE_FILES='../pokerengine/pokerchips.py' TESTS='coverage-reset test-pokerchips.py coverage-report' check )"
# End:
| gpl-3.0 | 3,674,182,404,441,185,300 | 44.869748 | 208 | 0.525144 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.