ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 7df950cfcba59bd259bafb4536fc7f6b7d7f6cb4 | from typing import List
import pytest
from expression.system import (
CancellationToken,
CancellationTokenSource,
ObjectDisposedException,
)
from expression.system.disposable import Disposable
def test_token_none_works():
token = CancellationToken.none()
assert isinstance(token, CancellationToken)
assert not token.can_be_canceled
assert not token.is_cancellation_requested
token.throw_if_cancellation_requested()
def test_token_source_works():
source = CancellationTokenSource()
assert not source.is_cancellation_requested
with source as disp:
assert isinstance(disp, Disposable)
def test_token_cancelled_source_works():
source = CancellationTokenSource.cancelled_source()
assert isinstance(source, CancellationTokenSource)
assert source.is_cancellation_requested
with pytest.raises(ObjectDisposedException):
with source as disposable:
assert not disposable
def test_token_cancellation_works():
source = CancellationTokenSource()
with source:
token = source.token
token.throw_if_cancellation_requested()
assert token.can_be_canceled
assert not token.is_cancellation_requested
assert token.is_cancellation_requested
with pytest.raises(ObjectDisposedException):
token.throw_if_cancellation_requested()
def test_token_disposing_works():
source = CancellationTokenSource()
with source as disposable:
token = source.token
disposable.dispose()
assert token.is_cancellation_requested
with pytest.raises(ObjectDisposedException):
token.throw_if_cancellation_requested()
def test_token_cancellation_register_works():
called: List[bool] = []
source = CancellationTokenSource()
with source:
token = source.token
token.register(lambda: called.append(True))
assert not called
assert called
def test_token_cancellation_register_unregister_works():
called: List[bool] = []
source = CancellationTokenSource()
with source as _:
token = source.token
registration = token.register(lambda: called.append(True))
assert not called
registration.dispose()
assert not called
def test_token_cancelled_register_throws():
called: List[bool] = []
source = CancellationTokenSource.cancelled_source()
with pytest.raises(ObjectDisposedException):
with source:
token = source.token
token.register(lambda: called.append(True))
assert not called
|
py | 7df95118c18b89d98e77a991c01286a4f2d66051 | """
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
import six
from deepchem.models.tensorgraph import TensorGraph
from deepchem.metrics import to_one_hot
from deepchem.feat.mol_graphs import ConvMol
from deepchem.models.tensorgraph.layers import Input, GraphConvLayer, BatchNormLayer, GraphPoolLayer, Dense, GraphGather, \
SoftMax, SoftMaxCrossEntropy, Concat, WeightedError, Label, Weights, Feature
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from tox21_datasets import load_tox21
model_dir = "/tmp/graph_conv"
def graph_conv_model(batch_size, tasks):
model = TensorGraph(
model_dir=model_dir, batch_size=batch_size, use_queue=False)
atom_features = Feature(shape=(None, 75))
degree_slice = Feature(shape=(None, 2), dtype=tf.int32)
membership = Feature(shape=(None,), dtype=tf.int32)
deg_adjs = []
for i in range(0, 10 + 1):
deg_adj = Feature(shape=(None, i + 1), dtype=tf.int32)
deg_adjs.append(deg_adj)
gc1 = GraphConvLayer(
64,
activation_fn=tf.nn.relu,
in_layers=[atom_features, degree_slice, membership] + deg_adjs)
batch_norm1 = BatchNormLayer(in_layers=[gc1])
gp1 = GraphPoolLayer(
in_layers=[batch_norm1, degree_slice, membership] + deg_adjs)
gc2 = GraphConvLayer(
64,
activation_fn=tf.nn.relu,
in_layers=[gp1, degree_slice, membership] + deg_adjs)
batch_norm2 = BatchNormLayer(in_layers=[gc2])
gp2 = GraphPoolLayer(
in_layers=[batch_norm2, degree_slice, membership] + deg_adjs)
dense = Dense(out_channels=128, activation_fn=None, in_layers=[gp2])
batch_norm3 = BatchNormLayer(in_layers=[dense])
gg1 = GraphGather(
batch_size=batch_size,
activation_fn=tf.nn.tanh,
in_layers=[batch_norm3, degree_slice, membership] + deg_adjs)
costs = []
labels = []
for task in tasks:
classification = Dense(out_channels=2, activation_fn=None, in_layers=[gg1])
softmax = SoftMax(in_layers=[classification])
model.add_output(softmax)
label = Label(shape=(None, 2))
labels.append(label)
cost = SoftMaxCrossEntropy(in_layers=[label, classification])
costs.append(cost)
entropy = Concat(in_layers=costs)
task_weights = Weights(shape=(None, len(tasks)))
loss = WeightedError(in_layers=[entropy, task_weights])
model.set_loss(loss)
def feed_dict_generator(dataset, batch_size, epochs=1):
for epoch in range(epochs):
for ind, (X_b, y_b, w_b, ids_b) in enumerate(
dataset.iterbatches(batch_size, pad_batches=True)):
d = {}
for index, label in enumerate(labels):
d[label] = to_one_hot(y_b[:, index])
d[task_weights] = w_b
multiConvMol = ConvMol.agglomerate_mols(X_b)
d[atom_features] = multiConvMol.get_atom_features()
d[degree_slice] = multiConvMol.deg_slice
d[membership] = multiConvMol.membership
for i in range(1, len(multiConvMol.get_deg_adjacency_lists())):
d[deg_adjs[i - 1]] = multiConvMol.get_deg_adjacency_lists()[i]
yield d
return model, feed_dict_generator, labels, task_weights
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model, generator, labels, task_weights = graph_conv_model(batch_size,
tox21_tasks)
model.fit_generator(generator(train_dataset, batch_size, epochs=10))
print("Evaluating model")
train_scores = model.evaluate_generator(
generator(train_dataset, batch_size), [metric],
transformers,
labels,
weights=[task_weights])
valid_scores = model.evaluate_generator(
generator(valid_dataset, batch_size), [metric],
transformers,
labels,
weights=[task_weights])
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
py | 7df95204a2665d799650b102826dd1b3008e2475 | from instance_selector.registry import registry
from instance_selector.selectors import ModelAdminInstanceSelector
from wagtail.contrib.modeladmin.mixins import ThumbnailMixin
from wagtail.contrib.modeladmin.options import ModelAdmin
from wagtail.contrib.modeladmin.views import CreateView
from crm.models import Company, City
class CreateCompanyView(CreateView):
def get_initial(self):
initial = super().get_initial()
initial['location'] = City.most_popular()
return initial
class CompanyAdmin(ThumbnailMixin, ModelAdmin):
model = Company
menu_label = 'Companies' # ditch this to use verbose_name_plural from model
menu_icon = 'fa-building' # change as required
menu_order = 200 # will put in 3rd place (000 being 1st, 100 2nd)
add_to_settings_menu = False # or True to add your model to the Settings sub-menu
exclude_from_explorer = False # or True to exclude pages of this type from Wagtail's explorer view
list_display = ('admin_thumb', 'name', 'location')
list_filter = ('location', )
search_fields = ('name',)
thumb_image_field_name = 'logo'
thumb_default = '/static/img/default_company.png'
create_view_class = CreateCompanyView
class CompanySelector(ModelAdminInstanceSelector):
model_admin = CompanyAdmin()
def get_instance_display_image_url(self, instance):
if instance and instance.logo:
return instance.logo.file.url
registry.register_instance_selector(Company, CompanySelector())
|
py | 7df9523d1c3785a598e9aa0c96f079dffb250c7c | """
python-social-auth application, allows OpenId or OAuth user
registration/authentication just adding a few configurations.
"""
version = (0, 2, 2)
extra = ''
__version__ = '.'.join(map(str, version)) + extra
|
py | 7df95240c4fd5d1332164315aff903e00b5e9296 | #
# Simplify a symbol
#
import pybamm
import autograd.numpy as np
import numbers
from scipy.sparse import issparse
def simplify_if_constant(symbol):
"""
Utility function to simplify an expression tree if it evalutes to a constant
scalar, vector or matrix
"""
if symbol.is_constant():
result = symbol.evaluate_ignoring_errors()
if result is not None:
if isinstance(result, numbers.Number) or (
isinstance(result, np.ndarray) and result.ndim == 0
):
return pybamm.Scalar(result)
elif isinstance(result, np.ndarray) or issparse(result):
if result.ndim == 1 or result.shape[1] == 1:
return pybamm.Vector(result)
else:
return pybamm.Matrix(result)
return symbol
def simplify_addition_subtraction(myclass, left, right):
"""
if children are associative (addition, subtraction, etc) then try to find groups of
constant children (that produce a value) and simplify them to a single term
The purpose of this function is to simplify expressions like (1 + (1 + p)), which
should be simplified to (2 + p). The former expression consists of an Addition, with
a left child of Scalar type, and a right child of another Addition containing a
Scalar and a Parameter. For this case, this function will first flatten the
expression to a list of the bottom level children (i.e. [Scalar(1), Scalar(2),
Parameter(p)]), and their operators (i.e. [None, Addition, Addition]), and then
combine all the constant children (i.e. Scalar(1) and Scalar(1)) to a single child
(i.e. Scalar(2))
Note that this function will flatten the expression tree until a symbol is found
that is not either an Addition or a Subtraction, so this function would simplify
(3 - (2 + a*b*c)) to (1 + a*b*c)
This function is useful if different children expressions contain non-constant terms
that prevent them from being simplified, so for example (1 + a) + (b - 2) - (6 + c)
will be simplified to (-7 + a + b - c)
Parameters
----------
myclass: class
the binary operator class (pybamm.Addition or pybamm.Subtraction) operating on
children left and right
left: derived from pybamm.Symbol
the left child of the binary operator
right: derived from pybamm.Symbol
the right child of the binary operator
"""
numerator = []
numerator_types = []
def flatten(this_class, left_child, right_child, in_subtraction):
"""
recursive function to flatten a term involving only additions or subtractions
outputs to lists `numerator` and `numerator_types`
Note that domains are all set to [] as we do not wish to consider domains once
simplifications are applied
e.g.
(1 + 2) + 3 -> [1, 2, 3] and [None, Addition, Addition]
1 + (2 - 3) -> [1, 2, 3] and [None, Addition, Subtraction]
1 - (2 + 3) -> [1, 2, 3] and [None, Subtraction, Subtraction]
(1 + 2) - (2 + 3) -> [1, 2, 2, 3] and [None, Addition, Subtraction, Subtraction]
"""
left_child.domain = []
right_child.domain = []
for side, child in [("left", left_child), ("right", right_child)]:
if isinstance(child, (pybamm.Addition, pybamm.Subtraction)):
left, right = child.orphans
flatten(child.__class__, left, right, in_subtraction)
else:
numerator.append(child)
if in_subtraction is None:
numerator_types.append(None)
elif in_subtraction:
numerator_types.append(pybamm.Subtraction)
else:
numerator_types.append(pybamm.Addition)
if side == "left":
if in_subtraction is None:
in_subtraction = this_class == pybamm.Subtraction
elif this_class == pybamm.Subtraction:
in_subtraction = not in_subtraction
flatten(myclass, left, right, None)
def partition_by_constant(source, types):
"""
function to partition a source list of symbols into those that return a constant
value, and those that do not
"""
constant = []
nonconstant = []
constant_types = []
nonconstant_types = []
for child, op_type in zip(source, types):
if child.is_constant() and child.evaluate_ignoring_errors() is not None:
constant.append(child)
constant_types.append(op_type)
else:
nonconstant.append(child)
nonconstant_types.append(op_type)
return constant, nonconstant, constant_types, nonconstant_types
def fold_add_subtract(array, types):
"""
performs a fold operation on the children nodes in `array`, using the operator
types given in `types`
e.g. if the input was:
array = [1, 2, 3, 4]
types = [None, +, -, +]
the result would be 1 + 2 - 3 + 4
"""
ret = None
if len(array) > 0:
if types[0] in [None, pybamm.Addition]:
ret = array[0]
elif types[0] == pybamm.Subtraction:
ret = -array[0]
for child, typ in zip(array[1:], types[1:]):
if typ == pybamm.Addition:
ret += child
else:
ret -= child
return ret
# simplify identical terms
i = 0
while i < len(numerator) - 1:
if isinstance(numerator[i], pybamm.Multiplication) and isinstance(
numerator[i].children[0], pybamm.Scalar
):
term_i = numerator[i].orphans[1]
term_i_count = numerator[i].children[0].evaluate()
else:
term_i = numerator[i]
term_i_count = 1
# loop through rest of numerator counting up and deleting identical terms
for j, (term_j, typ_j) in enumerate(
zip(numerator[i + 1 :], numerator_types[i + 1 :])
):
if isinstance(term_j, pybamm.Multiplication) and isinstance(
term_j.left, pybamm.Scalar
):
factor = term_j.left.evaluate()
term_j = term_j.right
else:
factor = 1
if term_i.id == term_j.id:
if typ_j == pybamm.Addition:
term_i_count += factor
elif typ_j == pybamm.Subtraction:
term_i_count -= factor
del numerator[j + i + 1]
del numerator_types[j + i + 1]
# replace this term by count * term if count > 1
if term_i_count != 1:
# simplify the result just in case
# (e.g. count == 0, or can fold constant into the term)
numerator[i] = (term_i_count * term_i).simplify()
i += 1
# can reorder the numerator
(constant, nonconstant, constant_types, nonconstant_types) = partition_by_constant(
numerator, numerator_types
)
constant_expr = fold_add_subtract(constant, constant_types)
nonconstant_expr = fold_add_subtract(nonconstant, nonconstant_types)
if constant_expr is not None and nonconstant_expr is None:
# might be no nonconstants
new_expression = pybamm.simplify_if_constant(constant_expr)
elif constant_expr is None and nonconstant_expr is not None:
# might be no constants
new_expression = nonconstant_expr
else:
# or mix of both
constant_expr = pybamm.simplify_if_constant(constant_expr)
new_expression = constant_expr + nonconstant_expr
return new_expression
def simplify_multiplication_division(myclass, left, right):
"""
if children are associative (multiply, division, etc) then try to find
groups of constant children (that produce a value) and simplify them
The purpose of this function is to simplify expressions of the type (1 * c / 2),
which should simplify to (0.5 * c). The former expression consists of a Division,
with a left child of a Multiplication containing a Scalar and a Parameter, and a
right child consisting of a Scalar. For this case, this function will first flatten
the expression to a list of the bottom level children on the numerator (i.e.
[Scalar(1), Parameter(c)]) and their operators (i.e. [None, Multiplication]), as
well as those children on the denominator (i.e. [Scalar(2)]. After this, all the
constant children on the numerator and denominator (i.e. Scalar(1) and Scalar(2))
will be combined appropriately, in this case to Scalar(0.5), and combined with the
nonconstant children (i.e. Parameter(c))
Note that this function will flatten the expression tree until a symbol is found
that is not either an Multiplication, Division or MatrixMultiplication, so this
function would simplify (3*(1 + d)*2) to (6 * (1 + d))
As well as Multiplication and Division, this function can handle
MatrixMultiplication. If any MatrixMultiplications are found on the
numerator/denominator, no reordering of children is done to find groups of constant
children. In this case only neighbouring constant children on the numerator are
simplified
Parameters
----------
myclass: class
the binary operator class (pybamm.Addition or pybamm.Subtraction) operating on
children left and right
left: derived from pybamm.Symbol
the left child of the binary operator
right: derived from pybamm.Symbol
the right child of the binary operator
"""
numerator = []
denominator = []
numerator_types = []
denominator_types = []
# recursive function to flatten a term involving only multiplications or divisions
def flatten(
previous_class,
this_class,
left_child,
right_child,
in_numerator,
in_matrix_multiplication,
):
"""
recursive function to flatten a term involving only Multiplication, Division or
MatrixMultiplication. keeps track of wether a term is on the numerator or
denominator. For those terms on the numerator, their operator type
(Multiplication or MatrixMultiplication) is stored
Note that multiplication *within* matrix multiplications, e.g. a@(b*c), are not
flattened into a@b*c, as this would be incorrect (see #253)
Note that the domains are all set to [] as we do not wish to consider domains
once simplifications are applied
outputs to lists `numerator`, `denominator` and `numerator_types`
e.g.
expression numerator denominator numerator_types
(1 * 2) / 3 -> [1, 2] [3] [None, Multiplication]
(1 @ 2) / 3 -> [1, 2] [3] [None, MatrixMultiplication]
1 / (c / 2) -> [1, 2] [c] [None, Multiplication]
"""
left_child.domain = []
right_child.domain = []
for side, child in [("left", left_child), ("right", right_child)]:
if side == "left":
other_child = right_child
else:
other_child = left_child
# flatten if all matrix multiplications
# flatten if one child is a matrix mult if the other term is a scalar or
# vector
if isinstance(child, pybamm.MatrixMultiplication) and (
in_matrix_multiplication
or isinstance(other_child, (pybamm.Scalar, pybamm.Vector))
):
left, right = child.orphans
if (
side == "left"
and this_class == pybamm.Multiplication
and isinstance(other_child, pybamm.Vector)
):
# change (m @ v1) * v2 -> v2 * m @ v so can simplify correctly
# (#341)
numerator.append(other_child)
numerator_types.append(previous_class)
flatten(
this_class, child.__class__, left, right, in_numerator, True
)
break
if side == "left":
flatten(
previous_class, child.__class__, left, right, in_numerator, True
)
else:
flatten(
this_class, child.__class__, left, right, in_numerator, True
)
# flatten if all multiplies and divides
elif (
isinstance(child, (pybamm.Multiplication, pybamm.Division))
and not in_matrix_multiplication
):
left, right = child.orphans
if side == "left":
flatten(
previous_class,
child.__class__,
left,
right,
in_numerator,
False,
)
else:
flatten(
this_class, child.__class__, left, right, in_numerator, False
)
# everything else don't flatten
else:
if in_numerator:
numerator.append(child)
if side == "left":
numerator_types.append(previous_class)
else:
numerator_types.append(this_class)
else:
denominator.append(child)
if side == "left":
denominator_types.append(previous_class)
else:
denominator_types.append(this_class)
if side == "left" and this_class == pybamm.Division:
in_numerator = not in_numerator
flatten(None, myclass, left, right, True, myclass == pybamm.MatrixMultiplication)
# check if there is a matrix multiply in the numerator (if so we can't reorder it)
numerator_has_mat_mul = any(
[typ == pybamm.MatrixMultiplication for typ in numerator_types + [myclass]]
)
denominator_has_mat_mul = any(
[typ == pybamm.MatrixMultiplication for typ in denominator_types]
)
def partition_by_constant(source, types=None):
"""
function to partition a source list of symbols into those that return a constant
value, and those that do not
"""
constant = []
nonconstant = []
for child in source:
if child.is_constant() and child.evaluate_ignoring_errors() is not None:
constant.append(child)
else:
nonconstant.append(child)
return constant, nonconstant
def fold_multiply(array, types=None):
"""
performs a fold operation on the children nodes in `array`, using the operator
types given in `types`
e.g. if the input was:
array = [1, 2, 3, 4]
types = [None, *, @, *]
the result would be 1 * 2 @ 3 * 4
"""
ret = None
if len(array) > 0:
if types is None:
ret = array[0]
for child in array[1:]:
ret *= child
else:
# work backwards through 'array' and 'types' so that multiplications
# and matrix multiplications are performed in the most efficient order
ret = array[-1]
for child, typ in zip(reversed(array[:-1]), reversed(types[1:])):
if typ == pybamm.MatrixMultiplication:
ret = child @ ret
else:
ret = child * ret
return ret
def simplify_with_mat_mul(nodes, types):
new_nodes = [nodes[0]]
new_types = [types[0]]
for child, typ in zip(nodes[1:], types[1:]):
if (
new_nodes[-1].is_constant()
and child.is_constant()
and new_nodes[-1].evaluate_ignoring_errors() is not None
and child.evaluate_ignoring_errors() is not None
):
if typ == pybamm.MatrixMultiplication:
new_nodes[-1] = new_nodes[-1] @ child
else:
new_nodes[-1] *= child
new_nodes[-1] = pybamm.simplify_if_constant(new_nodes[-1])
else:
new_nodes.append(child)
new_types.append(typ)
new_nodes = fold_multiply(new_nodes, new_types)
return new_nodes
if numerator_has_mat_mul and denominator_has_mat_mul:
new_numerator = simplify_with_mat_mul(numerator, numerator_types)
new_denominator = simplify_with_mat_mul(denominator, denominator_types)
if new_denominator is None:
result = new_numerator
else:
result = new_numerator / new_denominator
elif numerator_has_mat_mul and not denominator_has_mat_mul:
# can reorder the denominator since no matrix multiplies
denominator_constant, denominator_nonconst = partition_by_constant(denominator)
constant_denominator_expr = fold_multiply(denominator_constant)
nonconst_denominator_expr = fold_multiply(denominator_nonconst)
# fold constant denominator expr into numerator if possible
if constant_denominator_expr is not None:
for i, child in enumerate(numerator):
if child.is_constant() and child.evaluate_ignoring_errors() is not None:
numerator[i] = child / constant_denominator_expr
numerator[i] = pybamm.simplify_if_constant(numerator[i])
constant_denominator_expr = None
new_numerator = simplify_with_mat_mul(numerator, numerator_types)
# result = constant_numerator_expr * new_numerator / nonconst_denominator_expr
# need to take into accound that terms can be None
if constant_denominator_expr is None:
if nonconst_denominator_expr is None:
result = new_numerator
else:
result = new_numerator / nonconst_denominator_expr
else:
# invert constant denominator terms for speed
constant_numerator_expr = pybamm.simplify_if_constant(
1 / constant_denominator_expr
)
if nonconst_denominator_expr is None:
result = constant_numerator_expr * new_numerator
else:
result = (
constant_numerator_expr * new_numerator / nonconst_denominator_expr
)
elif not numerator_has_mat_mul and denominator_has_mat_mul:
new_denominator = simplify_with_mat_mul(denominator, denominator_types)
# can reorder the numerator since no matrix multiplies
numerator_constant, numerator_nonconst = partition_by_constant(numerator)
constant_numerator_expr = fold_multiply(numerator_constant)
nonconst_numerator_expr = fold_multiply(numerator_nonconst)
# result = constant_numerator_expr * nonconst_numerator_expr / new_denominator
# need to take into account that terms can be None
if constant_numerator_expr is None:
result = nonconst_numerator_expr / new_denominator
else:
constant_numerator_expr = pybamm.simplify_if_constant(
constant_numerator_expr
)
if nonconst_numerator_expr is None:
result = constant_numerator_expr / new_denominator
else:
result = (
constant_numerator_expr * nonconst_numerator_expr / new_denominator
)
else:
# can reorder the numerator since no matrix multiplies
numerator_constant, numerator_nonconstant = partition_by_constant(numerator)
constant_numerator_expr = fold_multiply(numerator_constant)
nonconst_numerator_expr = fold_multiply(numerator_nonconstant)
# can reorder the denominator since no matrix multiplies
denominator_constant, denominator_nonconst = partition_by_constant(denominator)
constant_denominator_expr = fold_multiply(denominator_constant)
nonconst_denominator_expr = fold_multiply(denominator_nonconst)
if constant_numerator_expr is not None:
if constant_denominator_expr is not None:
constant_numerator_expr = pybamm.simplify_if_constant(
constant_numerator_expr / constant_denominator_expr
)
else:
constant_numerator_expr = pybamm.simplify_if_constant(
constant_numerator_expr
)
else:
if constant_denominator_expr is not None:
constant_numerator_expr = pybamm.simplify_if_constant(
1 / constant_denominator_expr
)
# result = constant_numerator_expr * nonconst_numerator_expr
# / nonconst_denominator_expr
# need to take into account that terms can be None
if constant_numerator_expr is None:
result = nonconst_numerator_expr
else:
if nonconst_numerator_expr is None:
result = constant_numerator_expr
else:
result = constant_numerator_expr * nonconst_numerator_expr
if nonconst_denominator_expr is not None:
result = result / nonconst_denominator_expr
return result
class Simplification(object):
def __init__(self, simplified_symbols=None):
self._simplified_symbols = simplified_symbols or {}
def simplify(self, symbol):
"""
This function recurses down the tree, applying any simplifications defined in
classes derived from pybamm.Symbol. E.g. any expression multiplied by a
pybamm.Scalar(0) will be simplified to a pybamm.Scalar(0).
If a symbol has already been simplified, the stored value is returned.
Parameters
----------
symbol : :class:`pybamm.Symbol`
The symbol to simplify
Returns
-------
:class:`pybamm.Symbol`
Simplified symbol
"""
try:
return self._simplified_symbols[symbol.id]
except KeyError:
simplified_symbol = self._simplify(symbol)
self._simplified_symbols[symbol.id] = simplified_symbol
return simplified_symbol
def _simplify(self, symbol):
""" See :meth:`Simplification.simplify()`. """
symbol.domain = []
symbol.auxiliary_domains = {}
if isinstance(symbol, pybamm.BinaryOperator):
left, right = symbol.children
# process children
new_left = self.simplify(left)
new_right = self.simplify(right)
# _binary_simplify defined in derived classes for specific rules
new_symbol = symbol._binary_simplify(new_left, new_right)
elif isinstance(symbol, pybamm.UnaryOperator):
new_child = self.simplify(symbol.child)
new_symbol = symbol._unary_simplify(new_child)
elif isinstance(symbol, pybamm.Function):
simplified_children = [None] * len(symbol.children)
for i, child in enumerate(symbol.children):
simplified_children[i] = self.simplify(child)
new_symbol = symbol._function_simplify(simplified_children)
elif isinstance(symbol, pybamm.Concatenation):
new_children = [self.simplify(child) for child in symbol.children]
new_symbol = symbol._concatenation_simplify(new_children)
else:
# Backup option: return new copy of the object
try:
new_symbol = symbol.new_copy()
return new_symbol
except NotImplementedError:
raise NotImplementedError(
"Cannot simplify symbol of type '{}'".format(type(symbol))
)
return simplify_if_constant(new_symbol)
|
py | 7df9529603398d9a5fc4b6c1ee59c64d79171d21 | # SPDX-FileCopyrightText: 2022 Tim Hawes <[email protected]>
#
# SPDX-License-Identifier: MIT
from django.apps import AppConfig
class DiscorduserConfig(AppConfig):
name = "discorduser"
|
py | 7df952f7db4b8c3eecab4f83616f4bf7275cb71c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Built-in imports
import argparse
# 3rd party imports
import yaml
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
# from scipy import optimize
from scipy.constants import elementary_charge, mu_0
from pyrfu.mms import rotate_tensor
from pyrfu.pyrf import cotrans, cross, norm, resample, trace
from pyrfu.plot import (make_labels, plot_line, plot_magnetosphere,
plot_spectr, span_tint)
# Local imports
from jfs.utils import find_feeps_clusters
from jfs.plot import show_tint, plot_tetrahedron
from jfs.load import (load_r_mmsx, load_eb_mmsx, load_fpi_def_omni_mmsx,
load_fpi_moments_mmsx, load_hpca_moments,
load_feeps_dpf_omni_mmsx)
__author__ = "Louis Richard"
__email__ = "[email protected]"
__copyright__ = "Copyright 2021"
__license__ = "Apache 2.0"
plt.style.use("scientific")
def residual(a, n_h, n_he):
return np.sum((n_h.data - a * n_he.data) ** 2) / len(n_h.data)
def main(args):
# Read time intervals
with open(args.config) as f:
cfg = yaml.load(f, Loader=yaml.FullLoader)
tint = cfg["tints"]["overview"]
# %%
# Load spacecraft location
r_gse_avg, r_gsm_avg, r_gsm_sep = load_r_mmsx(tint, cfg)
print(r_gsm_avg)
# %%
b_gse, e_gse = load_eb_mmsx(tint, cfg)
b_gsm, e_gsm = [cotrans(field, "gse>gsm") for field in [b_gse, e_gse]]
# %% Load ion and electron differential energy flux from FPI-DIS and
# FPI-DES
def_omni_fpi_i, def_omni_fpi_e = load_fpi_def_omni_mmsx(tint, cfg)
# %% Load ion and electron moments from FPI-DIS and FPI-DES
moments_i, moments_e = load_fpi_moments_mmsx(tint, cfg)
n_i, v_gse_i, t_gse_i, p_gse_i = moments_i
n_e, v_gse_e, t_gse_e, p_gse_e = moments_e
# Transform ion bulk velocity to GSM coordinates and compute scalar
# temperature from temperature tensor
v_gsm_i = cotrans(v_gse_i, "gse>gsm")
t_fac_i = rotate_tensor(t_gse_i, "fac", b_gse)
t_i = trace(t_fac_i) / 3
# Transform electron bulk velocity to GSM coordinates and compute scalar
# temperature from temperature tensor
t_fac_e = rotate_tensor(t_gse_e, "fac", b_gse)
t_e = trace(t_fac_e) / 3
p_e = 1e15 * elementary_charge * n_e.data * t_e # nPa
# %% Load H+ and He++ moments from HPCA
# moments_p, moments_a = load_hpca_moments_mmsx(tint, cfg)
moments_p, moments_a = load_hpca_moments(tint, 2, cfg)
n_p, v_gsm_p, t_p, p_p = moments_p
n_a, v_gsm_a, t_a, p_a = moments_a
# %% Compute proton to alpha density ratio
# p_opt = optimize.minimize(residual, 10, args=(n_p, n_a))
# r_ap = p_opt.x[0]
# %% Compute motional electric field
evxb_gsm_i = 1e3 * 1e3 * 1e-9 * cross(v_gsm_i, b_gsm)
evxb_gsm_p = 1e3 * 1e3 * 1e-9 * cross(v_gsm_p, b_gsm)
# %% Compute plasma beta
p_tot = 1e-9 * p_p + 1e-9 * resample(p_a, p_p) # Plasma pressure
p_mag = 1e-18 * norm(b_gsm) ** 2 / (2 * mu_0) # Magnetic pressure
beta_ = p_tot / resample(p_mag, p_p) # plasma beta
# %% Load high energy ion and electron differential particle flux from
# FEEPS
dpf_omni_feeps_i, dpf_omni_feeps_e = load_feeps_dpf_omni_mmsx(tint, cfg)
# %%
times, tints = find_feeps_clusters(dpf_omni_feeps_i)
t_idx = [503, 531, 608, 682, 797, 927]
n_tid = len(t_idx) + 1
# %%
fig = plt.figure(figsize=(12, 17.2))
gsp1 = fig.add_gridspec(20, 1, top=.95, bottom=.05, left=.1, right=.9,
hspace=0.1)
gsp10 = gsp1[:3].subgridspec(1, 3, hspace=0)
gsp11 = gsp1[4:].subgridspec(8, 1, hspace=0)
# Create axes in the grid spec
axs10 = [fig.add_subplot(gsp10[i]) for i in range(2)]
axs01 = fig.add_subplot(gsp10[2], projection="3d")
axs11 = [fig.add_subplot(gsp11[i]) for i in range(8)]
# Plot MMS tetrahedron configuration
axs01 = plot_tetrahedron(axs01, r_gsm_sep)
axs01.set_xlabel("$X_{GSM}$ [km]")
axs01.set_ylabel("$Y_{GSM}$ [km]")
axs01.set_zlabel("$Z_{GSM}$ [km]")
axs01.legend(loc="upper right", ncol=2, bbox_to_anchor=(0, 1.1, 1, .2),
bbox_transform=axs01.transAxes)
field_lines = [False, True]
for i, y_axis in zip(range(2), ["$Y_{GSM}$ [$R_E$]", "$Z_{GSM}$ [$R_E$]"]):
plot_magnetosphere(axs10[i], tint, field_lines=field_lines[i])
axs10[i].invert_xaxis()
axs10[i].plot(r_gsm_avg[0] / 6371, r_gsm_avg[i + 1] / 6371,
marker="^", color="tab:red", linestyle="", label="MMS")
axs10[i].set_xlim([-30, 15])
axs10[i].set_ylim([-20, 20])
axs10[i].set_aspect("equal")
axs10[i].set_xlabel("$X_{GSM}$ [$R_E$]")
axs10[i].set_ylabel(y_axis)
axs10[i].invert_xaxis()
# Plot magnetic field in GSM coordinates
plot_line(axs11[0], b_gsm, zorder=n_tid)
axs11[0].legend(["$B_x$", "$B_y$", "$B_z$", "|B|"], frameon=True, ncol=3,
loc="upper right")
axs11[0].set_ylabel("$B$" + "\n" + "[nT]")
axs11[0].set_ylim([-22, 15])
for t_ in times:
show_tint(axs11[0], t_, "tab:purple")
# Plot cross-tail electric field (measured and motional)
"""
plot_line(axs11[1], e_gsm[:, 1], color="tab:green", label="$E_y$",
zorder=n_tid + 0)
plot_line(axs11[1], -evxb_gsm_i[:, 1], color="tab:blue", zorder=n_tid + 1,
label="$(-V_i \\times B)_y$")
plot_line(axs11[1], -evxb_gsm_p[:, 1], color="tab:cyan", zorder=n_tid + 1,
label="$(-V_{H^+} \\times B)_y$")
axs11[1].set_ylabel("$E_y$" + "\n" + "[mV m$^{-1}$]")
axs11[1].set_ylim([-19, 19])
axs11[1].legend(loc="upper right", ncol=3, frameon=True)
"""
# Plot the FPI-DIS and HPCA H+ bulk velocity in GSM coordinates
comps_ = ["x", "y", "z"]
colors_i = ["tab:blue", "tab:green", "tab:red"]
for i, c_fpi in zip(range(3), colors_i):
plot_line(axs11[1], v_gsm_i[:, i], zorder=n_tid + i, color=c_fpi,
label=f"$V_{{i{comps_[i]}}}$")
axs11[1].legend(ncol=3, frameon=True, loc="upper right")
axs11[1].set_ylim([-800, 2200])
axs11[1].set_ylabel("$V_i$" + "\n" + "[km s$^{-1}$]")
# Plot FPI-DIS, HPCA H+ and scaled HPCA He++ number densities
plot_line(axs11[2], n_i, zorder=n_tid + 0, label="$n_i$")
# plot_line(axs11[3], n_p, color="tab:pink", zorder=n_tid + 2,
# label="$n_{H^+}$")
# plot_line(axs11[3], r_ap * n_a, zorder=n_tid + 2, color="tab:green",
# label=f"{r_ap:3.0f}$n_{{He^{{++}}}}$")
# axs11[3].legend(frameon=True, loc="upper right", ncol=3)
axs11[2].set_ylabel("$n_i$" + "\n" + "[cm$^{-3}$]")
# Plot FPI-DIS and HPCA H+ temperatures
"""
plot_line(axs11[4], 1e-3 * t_i, zorder=n_tid + 0, label="$T_i$")
# plot_line(axs11[4], 1e-3 * t_p, zorder=n_tid + 3, label="$T_{H^+}$",
# color="tab:pink")
axs11[4].set_ylabel("$T_{p}$" + "\n" + "[keV]")
axs11[4].set_yscale("log")
axs11[4].set_ylim([1.2, 90])
axs11[4].legend(loc="upper right", frameon=True, ncol=3)
"""
# Plot plasma beta
plot_line(axs11[3], beta_, color="tab:blue")
axs11[3].set_yscale("log")
axs11[3].set_ylim([2e-2, 1.3e3])
axs11[3].set_ylabel("$\\beta_i$" + "\n" + " ")
axs11[3].axhspan(.02, .1, color="black", alpha=.2)
axs11[3].axhspan(.1, .7, color="tab:red", alpha=.2)
axs11[3].axhspan(.7, 1.3e3, color="tab:green", alpha=.2)
axs11[3].text(.93, .85, "CPS", color="tab:green",
transform=axs11[3].transAxes)
axs11[3].text(.93, .2, "PSBL", color="tab:red",
transform=axs11[3].transAxes)
axs11[3].text(.93, .03, "Lobe", color="k", transform=axs11[3].transAxes)
axs11[4], caxs4 = plot_spectr(axs11[4], dpf_omni_feeps_i[:, 2:],
yscale="log", cscale="log",
clim=[2e-1, 2e2], cmap="Spectral_r")
axs11[4].set_ylabel("$K_i$" + "\n" + "[keV]")
caxs4.set_ylabel("Diff. Flux" + "\n" + "[(cm$^{2}$ s sr keV)$^{-1}$]")
axs11[5], caxs5 = plot_spectr(axs11[5], def_omni_fpi_i[:, 13:],
yscale="log", cscale="log", clim=[1e3, 1e6],
cmap="Spectral_r")
plot_line(axs11[5], t_i, zorder=n_tid + 0, label="$T_i$")
axs11[5].set_ylabel("$K_i$" + "\n" + "[eV]")
axs11[5].legend(loc="lower right", frameon=True, ncol=3)
axs11[5].grid(visible=False, which="major")
caxs5.set_ylabel("DEF" + "\n" + "[(cm$^{2}$ s sr)$^{-1}$]")
axs11[6], caxs6 = plot_spectr(axs11[6], dpf_omni_feeps_e[:, 1:11],
yscale="log", cscale="log", clim=[2e0, 2e3],
cmap="Spectral_r")
axs11[6].set_ylabel("$K_e$" + "\n" + "[keV]")
caxs6.set_ylabel("Diff. Flux" + "\n" + "[(cm$^{2}$ s sr keV)$^{-1}$]")
axs11[7], caxs7 = plot_spectr(axs11[7], def_omni_fpi_e[:, 9:],
yscale="log", cscale="log",
clim=[42.8e3, 42.8e6], cmap="Spectral_r")
# axs11[4].axhline(def_omni_fpi_e.energy.data[7])
axs11[7].set_ylabel("$K_e$" + "\n" + "[eV]")
caxs7.set_ylabel("DEF" + "\n" + "[(cm$^{2}$ s sr)$^{-1}$]")
fpi_time = def_omni_fpi_i.time.data
for i, t_ in enumerate(t_idx):
for ax in axs11[:4]:
ax.axvline(fpi_time[t_], linestyle=":", color="k", zorder=i + 1,
linewidth=1.2)
for ax in axs11[4:]:
ax.axvline(fpi_time[t_], linestyle=":", color="k", linewidth=1.2)
axs11[-1].get_shared_x_axes().join(*axs11)
fig.align_ylabels(axs11)
for ax in axs11[:-1]:
ax.xaxis.set_ticklabels([])
axs11[-1].set_xlim(mdates.datestr2num(tint))
make_labels(axs10, [.028, .9], pad=0)
make_labels([axs01], [.028, .9], pad=2)
make_labels(axs11, [.008, .86], pad=3)
for t_ in tints:
span_tint(axs11, t_, ec="k", fc="tab:purple", alpha=.2)
plt.savefig("./figures/figure_1.pdf")
plt.savefig("./figures/figure_1.png", dpi=200)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--config",
help="Path to the configuration file (.yml)",
required=True, type=str)
main(parser.parse_args())
|
py | 7df95306c92b78f3214f616a82a7ca00992302f7 | ##############################################################################
# Written by: Brian G. Merrell <[email protected]>
# Date: 01/26/2009
# Application wrapper for splitter.py
# Used by the splitter-*.py tests
##############################################################################
'Application wrapper for splitter'
from strongwind import *
from os.path import exists
from sys import path
def launchSplitter(exe=None):
'Launch splitter with accessibility enabled and return a splitter object. Log an error and return None if something goes wrong'
if exe is None:
# make sure we can find the sample application
harness_dir = path[0]
i = harness_dir.rfind("/")
j = harness_dir[:i].rfind("/")
uiaqa_path = harness_dir[:j]
exe = '%s/samples/winforms/splitter_horizontal.py' % uiaqa_path
if not exists(exe):
raise IOError, "Could not find file %s" % exe
args = [exe]
(app, subproc) = cache.launchApplication(args=args, name='ipy', wait=config.LONG_DELAY)
splitter = Splitter(app, subproc)
cache.addApplication(splitter)
splitter.splitterFrame.app = splitter
return splitter
# class to represent the application
class Splitter(accessibles.Application):
def __init__(self, accessible, subproc=None):
'Get a reference to the splitter window'
super(Splitter, self).__init__(accessible, subproc)
self.findFrame('Horizontal Splitter', logName='Splitter')
|
py | 7df95329d895ae54cfc2ccc27fae645b8b9f2d92 | """
Module: 'uasyncio.core' on esp8266 v1.10
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.10-8-g8b7039d7d on 2019-01-26', machine='ESP module with ESP8266')
# Stubber: 1.1.0
class CancelledError:
''
DEBUG = 0
class EventLoop:
''
def call_at_():
pass
def call_later():
pass
def call_later_ms():
pass
def call_soon():
pass
def close():
pass
def create_task():
pass
def run_forever():
pass
def run_until_complete():
pass
def stop():
pass
def time():
pass
def wait():
pass
class IORead:
''
def handle():
pass
class IOReadDone:
''
def handle():
pass
class IOWrite:
''
def handle():
pass
class IOWriteDone:
''
def handle():
pass
class PollEventLoop:
''
def add_reader():
pass
def add_writer():
pass
def call_at_():
pass
def call_later():
pass
def call_later_ms():
pass
def call_soon():
pass
def close():
pass
def create_task():
pass
def remove_reader():
pass
def remove_writer():
pass
def run_forever():
pass
def run_until_complete():
pass
def stop():
pass
def time():
pass
def wait():
pass
class SleepMs:
''
def handle():
pass
class StopLoop:
''
def handle():
pass
class StreamReader:
''
aclose = None
read = None
readexactly = None
readline = None
class StreamWriter:
''
aclose = None
awrite = None
awriteiter = None
def get_extra_info():
pass
class SysCall:
''
def handle():
pass
class SysCall1:
''
def handle():
pass
def Task():
pass
class TimeoutError:
''
class TimeoutObj:
''
_socket = None
def cancel():
pass
core = None
def coroutine():
pass
def ensure_future():
pass
def get_event_loop():
pass
log = None
open_connection = None
select = None
def set_debug():
pass
sleep = None
sleep_ms = None
start_server = None
time = None
class type_gen:
''
def close():
pass
def pend_throw():
pass
def send():
pass
def throw():
pass
uasyncio = None
ucollections = None
uerrno = None
utimeq = None
def wait_for():
pass
wait_for_ms = None
|
py | 7df954d71c0bf60e2ce71be13bb9763497eac5a1 | import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
class FlipGradientBuilder:
def __init__(self):
self.num_calls = 0
def __call__(self, x, lw=1.0):
grad_name = "FlipGradient%d" % self.num_calls
@ops.RegisterGradient(grad_name)
def _flip_gradients(op, grad):
return [tf.negative(grad) * lw]
g = tf.get_default_graph()
with g.gradient_override_map({"Identity": grad_name}):
y = tf.identity(x)
self.num_calls += 1
return y
flip_gradient = FlipGradientBuilder()
def naive_birnn(inputs, seq_len, num_units, reuse=tf.AUTO_REUSE, name="naive_birnn"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
cell_fw = tf.nn.rnn_cell.LSTMCell(num_units)
cell_bw = tf.nn.rnn_cell.LSTMCell(num_units)
(o_fw, o_bw), _ = tf.nn.bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, seq_len, dtype=tf.float32)
return o_fw, o_bw
def char_meta_birnn(inputs, seq_len, start_index, end_index, num_units, num_layers, dim, drop_rate=0.0, training=False,
activation=tf.tanh, reuse=tf.AUTO_REUSE, name="char_meta_birnn"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
outputs = inputs
o_fw, o_bw = None, None
for layer in range(num_layers):
o_fw, o_bw = naive_birnn(outputs, seq_len, num_units, reuse=reuse, name="naive_birnn_%d" % layer)
outputs = tf.layers.dropout(tf.concat([o_fw, o_bw], axis=-1), rate=drop_rate, training=training)
fw_start, fw_end = tf.gather_nd(o_fw, start_index), tf.gather_nd(o_fw, end_index)
bw_start, bw_end = tf.gather_nd(o_bw, start_index), tf.gather_nd(o_bw, end_index)
outputs = tf.concat([fw_start, fw_end, bw_start, bw_end], axis=-1)
outputs = tf.layers.dense(outputs, units=dim, use_bias=True, activation=activation, reuse=reuse, name="dense")
outputs = tf.layers.dropout(outputs, rate=drop_rate, training=training)
return outputs
def char_cnn_hw(inputs, kernel_sizes, filters, dim, hw_layers, padding="VALID", activation=tf.nn.relu, use_bias=True,
hw_activation=tf.tanh, reuse=tf.AUTO_REUSE, name="char_cnn_hw"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
outputs = []
for i, (kernel_size, filter_size) in enumerate(zip(kernel_sizes, filters)):
weight = tf.get_variable("filter_%d" % i, shape=[1, kernel_size, dim, filter_size], dtype=tf.float32)
bias = tf.get_variable("bias_%d" % i, shape=[filter_size], dtype=tf.float32)
conv = tf.nn.conv2d(inputs, weight, strides=[1, 1, 1, 1], padding=padding, name="conv_%d" % i)
conv = tf.nn.bias_add(conv, bias=bias)
pool = tf.reduce_max(activation(conv), axis=2)
outputs.append(pool)
outputs = tf.concat(values=outputs, axis=-1)
for i in range(hw_layers):
outputs = highway_layer(outputs, num_unit=sum(filters), activation=hw_activation, use_bias=use_bias,
reuse=reuse, name="highway_%d" % i)
return outputs
def bi_rnn(inputs, seq_len, training, num_units, drop_rate=0.0, activation=tf.tanh, concat=True, use_peepholes=False,
reuse=tf.AUTO_REUSE, name="bi_rnn"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
cell_fw = tf.nn.rnn_cell.LSTMCell(num_units, use_peepholes=use_peepholes, name="forward_lstm_cell")
cell_bw = tf.nn.rnn_cell.LSTMCell(num_units, use_peepholes=use_peepholes, name="backward_lstm_cell")
outputs, _ = tf.nn.bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, seq_len, dtype=tf.float32)
if concat:
outputs = tf.concat(outputs, axis=-1)
outputs = tf.layers.dropout(outputs, rate=drop_rate, training=training)
outputs = tf.layers.dense(outputs, units=2 * num_units, use_bias=True, activation=activation, name="dense")
else:
output1 = tf.layers.dense(outputs[0], units=num_units, use_bias=True, reuse=reuse, name="forward_dense")
output1 = tf.layers.dropout(output1, rate=drop_rate, training=training)
output2 = tf.layers.dense(outputs[1], units=num_units, use_bias=True, reuse=reuse, name="backward_dense")
output2 = tf.layers.dropout(output2, rate=drop_rate, training=training)
bias = tf.get_variable(name="bias", shape=[num_units], dtype=tf.float32, trainable=True)
outputs = activation(tf.nn.bias_add(output1 + output2, bias=bias))
return outputs
def highway_layer(inputs, num_unit, activation, use_bias=True, reuse=tf.AUTO_REUSE, name="highway"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
trans_gate = tf.layers.dense(inputs, units=num_unit, use_bias=use_bias, activation=tf.sigmoid,
name="trans_gate")
hidden = tf.layers.dense(inputs, units=num_unit, use_bias=use_bias, activation=activation, name="hidden")
carry_gate = tf.subtract(1.0, trans_gate, name="carry_gate")
output = tf.add(tf.multiply(hidden, trans_gate), tf.multiply(inputs, carry_gate), name="output")
return output
def gate_add(inputs1, inputs2, use_bias=True, reuse=tf.AUTO_REUSE, name="gate_add"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
num_units = inputs2.get_shape().as_list()[-1]
trans_gate = tf.layers.dense(inputs2, units=num_units, use_bias=use_bias, activation=tf.sigmoid, name="trans")
carry_gate = tf.subtract(1.0, trans_gate, name="carry")
output = tf.add(tf.multiply(inputs1, trans_gate), tf.multiply(inputs2, carry_gate), name="output")
return output
def crf_layer(inputs, labels, seq_len, num_units, reuse=tf.AUTO_REUSE, name="crf"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
transition = tf.get_variable(name="transition", shape=[num_units, num_units], dtype=tf.float32)
crf_loss, transition = tf.contrib.crf.crf_log_likelihood(inputs, labels, seq_len, transition)
return transition, tf.reduce_mean(-crf_loss)
def embedding_lookup(tokens, token_size, token_dim, token2vec=None, token_weight=None, tune_emb=True, norm_emb=True,
project=False, new_dim=None, adversarial_training=False, reuse=tf.AUTO_REUSE, name="lookup_table"):
with tf.variable_scope(name, reuse=reuse):
if token2vec is not None:
table = tf.Variable(initial_value=token2vec, name="table", dtype=tf.float32, trainable=tune_emb)
unk = tf.get_variable(name="unk", shape=[1, token_dim], trainable=True, dtype=tf.float32)
table = tf.concat([unk, table], axis=0)
else:
table = tf.get_variable(name="table", shape=[token_size - 1, token_dim], dtype=tf.float32, trainable=True)
if adversarial_training and norm_emb and token_weight is not None:
weights = tf.constant(np.load(token_weight)["embeddings"], dtype=tf.float32, name="weight",
shape=[token_size - 1, 1])
table = emb_normalize(table, weights)
table = tf.concat([tf.zeros([1, token_dim], dtype=tf.float32), table], axis=0)
token_emb = tf.nn.embedding_lookup(table, tokens)
if project:
new_dim = token_dim if new_dim is None else new_dim
token_emb = tf.layers.dense(token_emb, units=new_dim, use_bias=True, activation=None, reuse=tf.AUTO_REUSE,
name="token_project")
return token_emb
def emb_normalize(emb, weights):
mean = tf.reduce_sum(weights * emb, axis=0, keepdims=True)
var = tf.reduce_sum(weights * tf.pow(emb - mean, 2.0), axis=0, keepdims=True)
stddev = tf.sqrt(1e-6 + var)
return (emb - mean) / stddev
def add_perturbation(emb, loss, epsilon=5.0):
"""Adds gradient to embedding and recomputes classification loss."""
grad, = tf.gradients(loss, emb, aggregation_method=tf.AggregationMethod.EXPERIMENTAL_ACCUMULATE_N)
grad = tf.stop_gradient(grad)
alpha = tf.reduce_max(tf.abs(grad), axis=(1, 2), keepdims=True) + 1e-12 # l2 scale
l2_norm = alpha * tf.sqrt(tf.reduce_sum(tf.pow(grad / alpha, 2), axis=(1, 2), keepdims=True) + 1e-6)
norm_grad = grad / l2_norm
perturb = epsilon * norm_grad
return emb + perturb
def self_attention(inputs, return_alphas=False, project=True, reuse=tf.AUTO_REUSE, name="self_attention"):
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
hidden_size = inputs.shape[-1].value
if project:
x = tf.layers.dense(inputs, units=hidden_size, use_bias=False, activation=tf.nn.tanh)
else:
x = inputs
weight = tf.get_variable(name="weight", shape=[hidden_size, 1], dtype=tf.float32,
initializer=tf.random_normal_initializer(stddev=0.01, seed=1227))
x = tf.tensordot(x, weight, axes=1)
alphas = tf.nn.softmax(x, axis=-2)
output = tf.matmul(tf.transpose(inputs, perm=[0, 2, 1]), alphas)
output = tf.squeeze(output, axis=-1)
if return_alphas:
return output, alphas
else:
return output
def focal_loss(logits, labels, seq_len=None, weights=None, alpha=0.25, gamma=2):
label_shape = logits.shape[-1].value
if label_shape == 2:
logits = tf.nn.softmax(logits, axis=1) # logits = tf.nn.sigmoid(logits)
else:
logits = tf.nn.softmax(logits, axis=1)
if labels.get_shape().ndims < logits.get_shape().ndims:
labels = tf.one_hot(labels, depth=logits.shape[-1].value, axis=-1)
labels = tf.cast(labels, dtype=tf.float32)
zeros = tf.zeros_like(logits, dtype=logits.dtype)
pos_logits_prob = tf.where(labels > zeros, labels - logits, zeros)
neg_logits_prob = tf.where(labels > zeros, zeros, logits)
if label_shape == 2:
cross_entropy = - alpha * (pos_logits_prob ** gamma) * tf.log(tf.clip_by_value(logits, 1e-8, 1.0)) \
- (1 - alpha) * (neg_logits_prob ** gamma) * tf.log(tf.clip_by_value(1.0 - logits, 1e-8, 1.0))
else:
cross_entropy = - (pos_logits_prob ** gamma) * tf.log(tf.clip_by_value(logits, 1e-8, 1.0)) \
- (neg_logits_prob ** gamma) * tf.log(tf.clip_by_value(1.0 - logits, 1e-8, 1.0))
if weights is not None:
if weights.get_shape().ndims < logits.get_shape().ndims:
weights = tf.expand_dims(weights, axis=-1)
cross_entropy = cross_entropy * weights
if seq_len is not None:
mask = tf.sequence_mask(seq_len, maxlen=tf.reduce_max(seq_len), dtype=tf.float32)
cross_entropy = tf.reduce_sum(cross_entropy, axis=-1)
cross_entropy = tf.reduce_sum(cross_entropy * mask) / tf.reduce_sum(mask)
else:
cross_entropy = tf.reduce_mean(cross_entropy)
return cross_entropy
def discriminator(features, labels, num_class, grad_rev_rate=0.7, alpha=0.25, gamma=2, mode=0, reuse=tf.AUTO_REUSE,
name="discriminator"):
if mode not in [0, 1, 2]:
raise ValueError("Unknown mode!!!!")
with tf.variable_scope(name, reuse=reuse, dtype=tf.float32):
if mode == 0:
return None
else:
feat = flip_gradient(features, lw=grad_rev_rate)
outputs = self_attention(feat, project=True, reuse=reuse, name="self_attention")
logits = tf.layers.dense(outputs, units=num_class, use_bias=True, reuse=reuse, name="discriminator_dense")
if mode == 1: # normal discriminator
loss = tf.nn.softmax_cross_entropy_with_logits_v2(logits=logits, labels=labels)
loss = tf.reduce_mean(loss)
else: # GRAD
loss = focal_loss(logits, labels, alpha=alpha, gamma=gamma)
return loss
def random_mask(prob, mask_shape):
rand = tf.random_uniform(mask_shape, dtype=tf.float32)
ones = tf.ones(mask_shape, dtype=tf.float32)
zeros = tf.zeros(mask_shape, dtype=tf.float32)
prob = ones * prob
return tf.where(rand < prob, ones, zeros)
def create_optimizer(cost, lr, decay_step=10, lr_decay=0.99994, opt_name="adam", grad_clip=5.0, name="optimizer"):
with tf.variable_scope(name):
global_step = tf.Variable(0, trainable=False, name='global_step')
learning_rate = tf.train.exponential_decay(lr, global_step, decay_step, lr_decay)
if opt_name.lower() == 'adagrad':
optimizer = tf.train.AdagradOptimizer(learning_rate=learning_rate)
elif opt_name.lower() == 'sgd':
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
elif opt_name.lower() == 'rmsprop':
optimizer = tf.train.RMSPropOptimizer(learning_rate=learning_rate)
elif opt_name.lower() == 'adadelta':
optimizer = tf.train.AdadeltaOptimizer(learning_rate=learning_rate)
elif opt_name.lower() == "lazyadam":
optimizer = tf.contrib.opt.LazyAdamOptimizer(learning_rate=learning_rate)
else: # default adam optimizer
if opt_name.lower() != 'adam':
print('Unsupported optimizing method {}. Using default adam optimizer.'.format(opt_name.lower()))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
if grad_clip is not None and grad_clip > 0:
grads, vs = zip(*optimizer.compute_gradients(cost))
grads, _ = tf.clip_by_global_norm(grads, grad_clip)
train_op = optimizer.apply_gradients(zip(grads, vs), global_step=global_step)
else:
train_op = optimizer.minimize(cost, global_step=global_step)
return train_op
def viterbi_decode(logits, trans_params, seq_len):
viterbi_sequences = []
for logit, lens in zip(logits, seq_len):
logit = logit[:lens] # keep only the valid steps
viterbi_seq, viterbi_score = tf.contrib.crf.viterbi_decode(logit, trans_params)
viterbi_sequences += [viterbi_seq]
return viterbi_sequences
|
py | 7df954d925f9c34a2585ecb11d629d350d4fbd7a | import sys
from os.path import isdir, join, isfile, abspath, dirname
from os import listdir
from os import makedirs
from human_body_prior.body_model.body_model import BodyModel
assert len(sys.argv) == 2
amass_path = sys.argv[1]
assert isdir(amass_path), amass_path
# ============================
# step 1: unpack the folders
# ============================
folders = [f for f in listdir(amass_path) if isdir(join(amass_path, f))]
if len(folders) == 0:
print("UNZIP")
# unzip all files
zipfiles = [f for f in listdir(amass_path) if f.endswith(".tar.bz2")]
for f in tqdm(zipfiles):
fname = join(amass_path, f)
command = ["tar", "-xf", fname, "-C", amass_path]
result = run(command, stdout=PIPE, stderr=PIPE, universal_newlines=True)
print(result.returncode, result.stdout, result.stderr)
|
py | 7df955284efb6c9872716668b3ee8e035bf3f671 | from lib import game as g
import pygame
pygame.init()
audio = pygame.mixer.Sound('assets/song.mp3')
audio.play()
g.play()
|
py | 7df9555c1d62f823591924551d15a63c3a68fa0b | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nitro.resource.base.base_resource import base_resource
from nitro.resource.base.base_resource import base_response
from nitro.service.options import options
from nitro.exception.nitro_exception import nitro_exception
from nitro.util.nitro_util import nitro_util
class authenticationsamlaction(base_resource) :
"""Configuration for AAA Saml action resource."""
def __init__(self) :
self._name = ""
self._samlidpcertname = ""
self._samlsigningcertname = ""
self._samlredirecturl = ""
self._samlacsindex = 0
self._samluserfield = ""
self._samlrejectunsignedassertion = ""
self._samlissuername = ""
self._samltwofactor = ""
self._defaultauthenticationgroup = ""
self._attribute1 = ""
self._attribute2 = ""
self._attribute3 = ""
self._attribute4 = ""
self._attribute5 = ""
self._attribute6 = ""
self._attribute7 = ""
self._attribute8 = ""
self._attribute9 = ""
self._attribute10 = ""
self._attribute11 = ""
self._attribute12 = ""
self._attribute13 = ""
self._attribute14 = ""
self._attribute15 = ""
self._attribute16 = ""
self._signaturealg = ""
self._digestmethod = ""
self._requestedauthncontext = ""
self._authnctxclassref = []
self._samlbinding = ""
self._attributeconsumingserviceindex = 0
self._sendthumbprint = ""
self._enforceusername = ""
self._logouturl = ""
self._artifactresolutionserviceurl = ""
self._skewtime = 0
self.___count = 0
@property
def name(self) :
"""Name for the SAML server profile (action).
Must begin with a letter, number, or the underscore character (_), and must contain only letters, numbers, and the hyphen (-), period (.) pound (#), space ( ), at (@), equals (=), colon (:), and underscore characters. Cannot be changed after SAML profile is created.
The following requirement applies only to the NetScaler CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my authentication action" or 'my authentication action').<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name for the SAML server profile (action).
Must begin with a letter, number, or the underscore character (_), and must contain only letters, numbers, and the hyphen (-), period (.) pound (#), space ( ), at (@), equals (=), colon (:), and underscore characters. Cannot be changed after SAML profile is created.
The following requirement applies only to the NetScaler CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my authentication action" or 'my authentication action').<br/>Minimum length = 1
:param name:
"""
try :
self._name = name
except Exception as e:
raise e
@property
def samlidpcertname(self) :
"""Name of the SAML server as given in that server's SSL certificate.<br/>Minimum length = 1."""
try :
return self._samlidpcertname
except Exception as e:
raise e
@samlidpcertname.setter
def samlidpcertname(self, samlidpcertname) :
"""Name of the SAML server as given in that server's SSL certificate.<br/>Minimum length = 1
:param samlidpcertname:
"""
try :
self._samlidpcertname = samlidpcertname
except Exception as e:
raise e
@property
def samlsigningcertname(self) :
"""Name of the signing authority as given in the SAML server's SSL certificate.<br/>Minimum length = 1."""
try :
return self._samlsigningcertname
except Exception as e:
raise e
@samlsigningcertname.setter
def samlsigningcertname(self, samlsigningcertname) :
"""Name of the signing authority as given in the SAML server's SSL certificate.<br/>Minimum length = 1
:param samlsigningcertname:
"""
try :
self._samlsigningcertname = samlsigningcertname
except Exception as e:
raise e
@property
def samlredirecturl(self) :
"""URL to which users are redirected for authentication.<br/>Minimum length = 1."""
try :
return self._samlredirecturl
except Exception as e:
raise e
@samlredirecturl.setter
def samlredirecturl(self, samlredirecturl) :
"""URL to which users are redirected for authentication.<br/>Minimum length = 1
:param samlredirecturl:
"""
try :
self._samlredirecturl = samlredirecturl
except Exception as e:
raise e
@property
def samlacsindex(self) :
"""Index/ID of the metadata entry corresponding to this configuration.<br/>Default value: 255<br/>Maximum length = 255."""
try :
return self._samlacsindex
except Exception as e:
raise e
@samlacsindex.setter
def samlacsindex(self, samlacsindex) :
"""Index/ID of the metadata entry corresponding to this configuration.<br/>Default value: 255<br/>Maximum length = 255
:param samlacsindex:
"""
try :
self._samlacsindex = samlacsindex
except Exception as e:
raise e
@property
def samluserfield(self) :
"""SAML user ID, as given in the SAML assertion.<br/>Minimum length = 1."""
try :
return self._samluserfield
except Exception as e:
raise e
@samluserfield.setter
def samluserfield(self, samluserfield) :
"""SAML user ID, as given in the SAML assertion.<br/>Minimum length = 1
:param samluserfield:
"""
try :
self._samluserfield = samluserfield
except Exception as e:
raise e
@property
def samlrejectunsignedassertion(self) :
"""Reject unsigned SAML assertions.<br/>Default value: ON<br/>Possible values = ON, OFF, STRICT."""
try :
return self._samlrejectunsignedassertion
except Exception as e:
raise e
@samlrejectunsignedassertion.setter
def samlrejectunsignedassertion(self, samlrejectunsignedassertion) :
"""Reject unsigned SAML assertions.<br/>Default value: ON<br/>Possible values = ON, OFF, STRICT
:param samlrejectunsignedassertion:
"""
try :
self._samlrejectunsignedassertion = samlrejectunsignedassertion
except Exception as e:
raise e
@property
def samlissuername(self) :
"""The name to be used in requests sent from Netscaler to IdP to uniquely identify Netscaler.<br/>Minimum length = 1."""
try :
return self._samlissuername
except Exception as e:
raise e
@samlissuername.setter
def samlissuername(self, samlissuername) :
"""The name to be used in requests sent from Netscaler to IdP to uniquely identify Netscaler.<br/>Minimum length = 1
:param samlissuername:
"""
try :
self._samlissuername = samlissuername
except Exception as e:
raise e
@property
def samltwofactor(self) :
"""Option to enable second factor after SAML.<br/>Default value: OFF<br/>Possible values = ON, OFF."""
try :
return self._samltwofactor
except Exception as e:
raise e
@samltwofactor.setter
def samltwofactor(self, samltwofactor) :
"""Option to enable second factor after SAML.<br/>Default value: OFF<br/>Possible values = ON, OFF
:param samltwofactor:
"""
try :
self._samltwofactor = samltwofactor
except Exception as e:
raise e
@property
def defaultauthenticationgroup(self) :
"""This is the default group that is chosen when the authentication succeeds in addition to extracted groups."""
try :
return self._defaultauthenticationgroup
except Exception as e:
raise e
@defaultauthenticationgroup.setter
def defaultauthenticationgroup(self, defaultauthenticationgroup) :
"""This is the default group that is chosen when the authentication succeeds in addition to extracted groups.
:param defaultauthenticationgroup:
"""
try :
self._defaultauthenticationgroup = defaultauthenticationgroup
except Exception as e:
raise e
@property
def attribute1(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute1."""
try :
return self._attribute1
except Exception as e:
raise e
@attribute1.setter
def attribute1(self, attribute1) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute1.
:param attribute1:
"""
try :
self._attribute1 = attribute1
except Exception as e:
raise e
@property
def attribute2(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute2."""
try :
return self._attribute2
except Exception as e:
raise e
@attribute2.setter
def attribute2(self, attribute2) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute2.
:param attribute2:
"""
try :
self._attribute2 = attribute2
except Exception as e:
raise e
@property
def attribute3(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute3."""
try :
return self._attribute3
except Exception as e:
raise e
@attribute3.setter
def attribute3(self, attribute3) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute3.
:param attribute3:
"""
try :
self._attribute3 = attribute3
except Exception as e:
raise e
@property
def attribute4(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute4."""
try :
return self._attribute4
except Exception as e:
raise e
@attribute4.setter
def attribute4(self, attribute4) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute4.
:param attribute4:
"""
try :
self._attribute4 = attribute4
except Exception as e:
raise e
@property
def attribute5(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute5."""
try :
return self._attribute5
except Exception as e:
raise e
@attribute5.setter
def attribute5(self, attribute5) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute5.
:param attribute5:
"""
try :
self._attribute5 = attribute5
except Exception as e:
raise e
@property
def attribute6(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute6."""
try :
return self._attribute6
except Exception as e:
raise e
@attribute6.setter
def attribute6(self, attribute6) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute6.
:param attribute6:
"""
try :
self._attribute6 = attribute6
except Exception as e:
raise e
@property
def attribute7(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute7."""
try :
return self._attribute7
except Exception as e:
raise e
@attribute7.setter
def attribute7(self, attribute7) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute7.
:param attribute7:
"""
try :
self._attribute7 = attribute7
except Exception as e:
raise e
@property
def attribute8(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute8."""
try :
return self._attribute8
except Exception as e:
raise e
@attribute8.setter
def attribute8(self, attribute8) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute8.
:param attribute8:
"""
try :
self._attribute8 = attribute8
except Exception as e:
raise e
@property
def attribute9(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute9."""
try :
return self._attribute9
except Exception as e:
raise e
@attribute9.setter
def attribute9(self, attribute9) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute9.
:param attribute9:
"""
try :
self._attribute9 = attribute9
except Exception as e:
raise e
@property
def attribute10(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute10."""
try :
return self._attribute10
except Exception as e:
raise e
@attribute10.setter
def attribute10(self, attribute10) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute10.
:param attribute10:
"""
try :
self._attribute10 = attribute10
except Exception as e:
raise e
@property
def attribute11(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute11."""
try :
return self._attribute11
except Exception as e:
raise e
@attribute11.setter
def attribute11(self, attribute11) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute11.
:param attribute11:
"""
try :
self._attribute11 = attribute11
except Exception as e:
raise e
@property
def attribute12(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute12."""
try :
return self._attribute12
except Exception as e:
raise e
@attribute12.setter
def attribute12(self, attribute12) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute12.
:param attribute12:
"""
try :
self._attribute12 = attribute12
except Exception as e:
raise e
@property
def attribute13(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute13."""
try :
return self._attribute13
except Exception as e:
raise e
@attribute13.setter
def attribute13(self, attribute13) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute13.
:param attribute13:
"""
try :
self._attribute13 = attribute13
except Exception as e:
raise e
@property
def attribute14(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute14."""
try :
return self._attribute14
except Exception as e:
raise e
@attribute14.setter
def attribute14(self, attribute14) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute14.
:param attribute14:
"""
try :
self._attribute14 = attribute14
except Exception as e:
raise e
@property
def attribute15(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute15."""
try :
return self._attribute15
except Exception as e:
raise e
@attribute15.setter
def attribute15(self, attribute15) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute15.
:param attribute15:
"""
try :
self._attribute15 = attribute15
except Exception as e:
raise e
@property
def attribute16(self) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute16."""
try :
return self._attribute16
except Exception as e:
raise e
@attribute16.setter
def attribute16(self, attribute16) :
"""Name of the attribute in SAML Assertion whose value needs to be extracted and stored as attribute16.
:param attribute16:
"""
try :
self._attribute16 = attribute16
except Exception as e:
raise e
@property
def signaturealg(self) :
"""Algorithm to be used to sign/verify SAML transactions.<br/>Default value: RSA-SHA1<br/>Possible values = RSA-SHA1, RSA-SHA256."""
try :
return self._signaturealg
except Exception as e:
raise e
@signaturealg.setter
def signaturealg(self, signaturealg) :
"""Algorithm to be used to sign/verify SAML transactions.<br/>Default value: RSA-SHA1<br/>Possible values = RSA-SHA1, RSA-SHA256
:param signaturealg:
"""
try :
self._signaturealg = signaturealg
except Exception as e:
raise e
@property
def digestmethod(self) :
"""Algorithm to be used to compute/verify digest for SAML transactions.<br/>Default value: SHA1<br/>Possible values = SHA1, SHA256."""
try :
return self._digestmethod
except Exception as e:
raise e
@digestmethod.setter
def digestmethod(self, digestmethod) :
"""Algorithm to be used to compute/verify digest for SAML transactions.<br/>Default value: SHA1<br/>Possible values = SHA1, SHA256
:param digestmethod:
"""
try :
self._digestmethod = digestmethod
except Exception as e:
raise e
@property
def requestedauthncontext(self) :
"""This element specifies the authentication context requirements of authentication statements returned in the response.<br/>Default value: exact<br/>Possible values = exact, minimum, maximum, better."""
try :
return self._requestedauthncontext
except Exception as e:
raise e
@requestedauthncontext.setter
def requestedauthncontext(self, requestedauthncontext) :
"""This element specifies the authentication context requirements of authentication statements returned in the response.<br/>Default value: exact<br/>Possible values = exact, minimum, maximum, better
:param requestedauthncontext:
"""
try :
self._requestedauthncontext = requestedauthncontext
except Exception as e:
raise e
@property
def authnctxclassref(self) :
"""This element specifies the authentication class types that are requested from IdP (IdentityProvider).
InternetProtocol: This is applicable when a principal is authenticated through the use of a provided IP address.
InternetProtocolPassword: This is applicable when a principal is authenticated through the use of a provided IP address, in addition to a username/password.
Kerberos: This is applicable when the principal has authenticated using a password to a local authentication authority, in order to acquire a Kerberos ticket.
MobileOneFactorUnregistered: This indicates authentication of the mobile device without requiring explicit end-user interaction.
MobileTwoFactorUnregistered: This indicates two-factor based authentication during mobile customer registration process, such as secure device and user PIN.
MobileOneFactorContract: Reflects mobile contract customer registration procedures and a single factor authentication.
MobileTwoFactorContract: Reflects mobile contract customer registration procedures and a two-factor based authentication.
Password: This class is applicable when a principal authenticates using password over unprotected http session.
PasswordProtectedTransport: This class is applicable when a principal authenticates to an authentication authority through the presentation of a password over a protected session.
PreviousSession: This class is applicable when a principal had authenticated to an authentication authority at some point in the past using any authentication context.
X509: This indicates that the principal authenticated by means of a digital signature where the key was validated as part of an X.509 Public Key Infrastructure.
PGP: This indicates that the principal authenticated by means of a digital signature where the key was validated as part of a PGP Public Key Infrastructure.
SPKI: This indicates that the principal authenticated by means of a digital signature where the key was validated via an SPKI Infrastructure.
XMLDSig: This indicates that the principal authenticated by means of a digital signature according to the processing rules specified in the XML Digital Signature specification.
Smartcard: This indicates that the principal has authenticated using smartcard.
SmartcardPKI: This class is applicable when a principal authenticates to an authentication authority through a two-factor authentication mechanism using a smartcard with enclosed private key and a PIN.
SoftwarePKI: This class is applicable when a principal uses an X.509 certificate stored in software to authenticate to the authentication authority.
Telephony: This class is used to indicate that the principal authenticated via the provision of a fixed-line telephone number, transported via a telephony protocol such as ADSL.
NomadTelephony: Indicates that the principal is "roaming" and authenticates via the means of the line number, a user suffix, and a password element.
PersonalTelephony: This class is used to indicate that the principal authenticated via the provision of a fixed-line telephone.
AuthenticatedTelephony: Indicates that the principal authenticated via the means of the line number, a user suffix, and a password element.
SecureRemotePassword: This class is applicable when the authentication was performed by means of Secure Remote Password.
TLSClient: This class indicates that the principal authenticated by means of a client certificate, secured with the SSL/TLS transport.
TimeSyncToken: This is applicable when a principal authenticates through a time synchronization token.
Unspecified: This indicates that the authentication was performed by unspecified means.
Windows: This indicates that Windows integrated authentication is utilized for authentication.<br/>Possible values = InternetProtocol, InternetProtocolPassword, Kerberos, MobileOneFactorUnregistered, MobileTwoFactorUnregistered, MobileOneFactorContract, MobileTwoFactorContract, Password, PasswordProtectedTransport, PreviousSession, X509, PGP, SPKI, XMLDSig, Smartcard, SmartcardPKI, SoftwarePKI, Telephony, NomadTelephony, PersonalTelephony, AuthenticatedTelephony, SecureRemotePassword, TLSClient, TimeSyncToken, Unspecified, Windows.
"""
try :
return self._authnctxclassref
except Exception as e:
raise e
@authnctxclassref.setter
def authnctxclassref(self, authnctxclassref) :
"""This element specifies the authentication class types that are requested from IdP (IdentityProvider).
InternetProtocol: This is applicable when a principal is authenticated through the use of a provided IP address.
InternetProtocolPassword: This is applicable when a principal is authenticated through the use of a provided IP address, in addition to a username/password.
Kerberos: This is applicable when the principal has authenticated using a password to a local authentication authority, in order to acquire a Kerberos ticket.
MobileOneFactorUnregistered: This indicates authentication of the mobile device without requiring explicit end-user interaction.
MobileTwoFactorUnregistered: This indicates two-factor based authentication during mobile customer registration process, such as secure device and user PIN.
MobileOneFactorContract: Reflects mobile contract customer registration procedures and a single factor authentication.
MobileTwoFactorContract: Reflects mobile contract customer registration procedures and a two-factor based authentication.
Password: This class is applicable when a principal authenticates using password over unprotected http session.
PasswordProtectedTransport: This class is applicable when a principal authenticates to an authentication authority through the presentation of a password over a protected session.
PreviousSession: This class is applicable when a principal had authenticated to an authentication authority at some point in the past using any authentication context.
X509: This indicates that the principal authenticated by means of a digital signature where the key was validated as part of an X.509 Public Key Infrastructure.
PGP: This indicates that the principal authenticated by means of a digital signature where the key was validated as part of a PGP Public Key Infrastructure.
SPKI: This indicates that the principal authenticated by means of a digital signature where the key was validated via an SPKI Infrastructure.
XMLDSig: This indicates that the principal authenticated by means of a digital signature according to the processing rules specified in the XML Digital Signature specification.
Smartcard: This indicates that the principal has authenticated using smartcard.
SmartcardPKI: This class is applicable when a principal authenticates to an authentication authority through a two-factor authentication mechanism using a smartcard with enclosed private key and a PIN.
SoftwarePKI: This class is applicable when a principal uses an X.509 certificate stored in software to authenticate to the authentication authority.
Telephony: This class is used to indicate that the principal authenticated via the provision of a fixed-line telephone number, transported via a telephony protocol such as ADSL.
NomadTelephony: Indicates that the principal is "roaming" and authenticates via the means of the line number, a user suffix, and a password element.
PersonalTelephony: This class is used to indicate that the principal authenticated via the provision of a fixed-line telephone.
AuthenticatedTelephony: Indicates that the principal authenticated via the means of the line number, a user suffix, and a password element.
SecureRemotePassword: This class is applicable when the authentication was performed by means of Secure Remote Password.
TLSClient: This class indicates that the principal authenticated by means of a client certificate, secured with the SSL/TLS transport.
TimeSyncToken: This is applicable when a principal authenticates through a time synchronization token.
Unspecified: This indicates that the authentication was performed by unspecified means.
Windows: This indicates that Windows integrated authentication is utilized for authentication.<br/>Possible values = InternetProtocol, InternetProtocolPassword, Kerberos, MobileOneFactorUnregistered, MobileTwoFactorUnregistered, MobileOneFactorContract, MobileTwoFactorContract, Password, PasswordProtectedTransport, PreviousSession, X509, PGP, SPKI, XMLDSig, Smartcard, SmartcardPKI, SoftwarePKI, Telephony, NomadTelephony, PersonalTelephony, AuthenticatedTelephony, SecureRemotePassword, TLSClient, TimeSyncToken, Unspecified, Windows
:param authnctxclassref:
"""
try :
self._authnctxclassref = authnctxclassref
except Exception as e:
raise e
@property
def samlbinding(self) :
"""This element specifies the transport mechanism of saml messages.<br/>Default value: POST<br/>Possible values = REDIRECT, POST, ARTIFACT."""
try :
return self._samlbinding
except Exception as e:
raise e
@samlbinding.setter
def samlbinding(self, samlbinding) :
"""This element specifies the transport mechanism of saml messages.<br/>Default value: POST<br/>Possible values = REDIRECT, POST, ARTIFACT
:param samlbinding:
"""
try :
self._samlbinding = samlbinding
except Exception as e:
raise e
@property
def attributeconsumingserviceindex(self) :
"""Index/ID of the attribute specification at Identity Provider (IdP). IdP will locate attributes requested by SP using this index and send those attributes in Assertion.<br/>Default value: 255<br/>Maximum length = 255."""
try :
return self._attributeconsumingserviceindex
except Exception as e:
raise e
@attributeconsumingserviceindex.setter
def attributeconsumingserviceindex(self, attributeconsumingserviceindex) :
"""Index/ID of the attribute specification at Identity Provider (IdP). IdP will locate attributes requested by SP using this index and send those attributes in Assertion.<br/>Default value: 255<br/>Maximum length = 255
:param attributeconsumingserviceindex:
"""
try :
self._attributeconsumingserviceindex = attributeconsumingserviceindex
except Exception as e:
raise e
@property
def sendthumbprint(self) :
"""Option to send thumbprint instead of x509 certificate in SAML request.<br/>Default value: OFF<br/>Possible values = ON, OFF."""
try :
return self._sendthumbprint
except Exception as e:
raise e
@sendthumbprint.setter
def sendthumbprint(self, sendthumbprint) :
"""Option to send thumbprint instead of x509 certificate in SAML request.<br/>Default value: OFF<br/>Possible values = ON, OFF
:param sendthumbprint:
"""
try :
self._sendthumbprint = sendthumbprint
except Exception as e:
raise e
@property
def enforceusername(self) :
"""Option to choose whether the username that is extracted from SAML assertion can be edited in login page while doing second factor.<br/>Default value: ON<br/>Possible values = ON, OFF."""
try :
return self._enforceusername
except Exception as e:
raise e
@enforceusername.setter
def enforceusername(self, enforceusername) :
"""Option to choose whether the username that is extracted from SAML assertion can be edited in login page while doing second factor.<br/>Default value: ON<br/>Possible values = ON, OFF
:param enforceusername:
"""
try :
self._enforceusername = enforceusername
except Exception as e:
raise e
@property
def logouturl(self) :
"""SingleLogout URL on IdP to which logoutRequest will be sent on Netscaler session cleanup."""
try :
return self._logouturl
except Exception as e:
raise e
@logouturl.setter
def logouturl(self, logouturl) :
"""SingleLogout URL on IdP to which logoutRequest will be sent on Netscaler session cleanup.
:param logouturl:
"""
try :
self._logouturl = logouturl
except Exception as e:
raise e
@property
def artifactresolutionserviceurl(self) :
"""URL of the Artifact Resolution Service on IdP to which Netscaler will post artifact to get actual SAML token."""
try :
return self._artifactresolutionserviceurl
except Exception as e:
raise e
@artifactresolutionserviceurl.setter
def artifactresolutionserviceurl(self, artifactresolutionserviceurl) :
"""URL of the Artifact Resolution Service on IdP to which Netscaler will post artifact to get actual SAML token.
:param artifactresolutionserviceurl:
"""
try :
self._artifactresolutionserviceurl = artifactresolutionserviceurl
except Exception as e:
raise e
@property
def skewtime(self) :
"""This option specifies the allowed clock skew in number of minutes that Netscaler ServiceProvider allows on an incoming assertion. For example, if skewTime is 10, then assertion would be valid from (current time - 10) min to (current time + 10) min, ie 20min in all.<br/>Default value: 5."""
try :
return self._skewtime
except Exception as e:
raise e
@skewtime.setter
def skewtime(self, skewtime) :
"""This option specifies the allowed clock skew in number of minutes that Netscaler ServiceProvider allows on an incoming assertion. For example, if skewTime is 10, then assertion would be valid from (current time - 10) min to (current time + 10) min, ie 20min in all.<br/>Default value: 5
:param skewtime:
"""
try :
self._skewtime = skewtime
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
"""converts nitro response into object and returns the object array in case of get request.
:param service:
:param response:
"""
try :
result = service.payload_formatter.string_to_resource(authenticationsamlaction_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationsamlaction
except Exception as e :
raise e
def _get_object_name(self) :
"""Returns the value of object identifier argument"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
"""Use this API to add authenticationsamlaction.
:param client:
:param resource:
"""
try :
if type(resource) is not list :
addresource = authenticationsamlaction()
addresource.name = resource.name
addresource.samlidpcertname = resource.samlidpcertname
addresource.samlsigningcertname = resource.samlsigningcertname
addresource.samlredirecturl = resource.samlredirecturl
addresource.samlacsindex = resource.samlacsindex
addresource.samluserfield = resource.samluserfield
addresource.samlrejectunsignedassertion = resource.samlrejectunsignedassertion
addresource.samlissuername = resource.samlissuername
addresource.samltwofactor = resource.samltwofactor
addresource.defaultauthenticationgroup = resource.defaultauthenticationgroup
addresource.attribute1 = resource.attribute1
addresource.attribute2 = resource.attribute2
addresource.attribute3 = resource.attribute3
addresource.attribute4 = resource.attribute4
addresource.attribute5 = resource.attribute5
addresource.attribute6 = resource.attribute6
addresource.attribute7 = resource.attribute7
addresource.attribute8 = resource.attribute8
addresource.attribute9 = resource.attribute9
addresource.attribute10 = resource.attribute10
addresource.attribute11 = resource.attribute11
addresource.attribute12 = resource.attribute12
addresource.attribute13 = resource.attribute13
addresource.attribute14 = resource.attribute14
addresource.attribute15 = resource.attribute15
addresource.attribute16 = resource.attribute16
addresource.signaturealg = resource.signaturealg
addresource.digestmethod = resource.digestmethod
addresource.requestedauthncontext = resource.requestedauthncontext
addresource.authnctxclassref = resource.authnctxclassref
addresource.samlbinding = resource.samlbinding
addresource.attributeconsumingserviceindex = resource.attributeconsumingserviceindex
addresource.sendthumbprint = resource.sendthumbprint
addresource.enforceusername = resource.enforceusername
addresource.logouturl = resource.logouturl
addresource.artifactresolutionserviceurl = resource.artifactresolutionserviceurl
addresource.skewtime = resource.skewtime
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ authenticationsamlaction() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].samlidpcertname = resource[i].samlidpcertname
addresources[i].samlsigningcertname = resource[i].samlsigningcertname
addresources[i].samlredirecturl = resource[i].samlredirecturl
addresources[i].samlacsindex = resource[i].samlacsindex
addresources[i].samluserfield = resource[i].samluserfield
addresources[i].samlrejectunsignedassertion = resource[i].samlrejectunsignedassertion
addresources[i].samlissuername = resource[i].samlissuername
addresources[i].samltwofactor = resource[i].samltwofactor
addresources[i].defaultauthenticationgroup = resource[i].defaultauthenticationgroup
addresources[i].attribute1 = resource[i].attribute1
addresources[i].attribute2 = resource[i].attribute2
addresources[i].attribute3 = resource[i].attribute3
addresources[i].attribute4 = resource[i].attribute4
addresources[i].attribute5 = resource[i].attribute5
addresources[i].attribute6 = resource[i].attribute6
addresources[i].attribute7 = resource[i].attribute7
addresources[i].attribute8 = resource[i].attribute8
addresources[i].attribute9 = resource[i].attribute9
addresources[i].attribute10 = resource[i].attribute10
addresources[i].attribute11 = resource[i].attribute11
addresources[i].attribute12 = resource[i].attribute12
addresources[i].attribute13 = resource[i].attribute13
addresources[i].attribute14 = resource[i].attribute14
addresources[i].attribute15 = resource[i].attribute15
addresources[i].attribute16 = resource[i].attribute16
addresources[i].signaturealg = resource[i].signaturealg
addresources[i].digestmethod = resource[i].digestmethod
addresources[i].requestedauthncontext = resource[i].requestedauthncontext
addresources[i].authnctxclassref = resource[i].authnctxclassref
addresources[i].samlbinding = resource[i].samlbinding
addresources[i].attributeconsumingserviceindex = resource[i].attributeconsumingserviceindex
addresources[i].sendthumbprint = resource[i].sendthumbprint
addresources[i].enforceusername = resource[i].enforceusername
addresources[i].logouturl = resource[i].logouturl
addresources[i].artifactresolutionserviceurl = resource[i].artifactresolutionserviceurl
addresources[i].skewtime = resource[i].skewtime
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
"""Use this API to delete authenticationsamlaction.
:param client:
:param resource:
"""
try :
if type(resource) is not list :
deleteresource = authenticationsamlaction()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ authenticationsamlaction() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ authenticationsamlaction() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
"""Use this API to update authenticationsamlaction.
:param client:
:param resource:
"""
try :
if type(resource) is not list :
updateresource = authenticationsamlaction()
updateresource.name = resource.name
updateresource.samlidpcertname = resource.samlidpcertname
updateresource.samlsigningcertname = resource.samlsigningcertname
updateresource.samlredirecturl = resource.samlredirecturl
updateresource.samlacsindex = resource.samlacsindex
updateresource.samluserfield = resource.samluserfield
updateresource.samlrejectunsignedassertion = resource.samlrejectunsignedassertion
updateresource.samlissuername = resource.samlissuername
updateresource.samltwofactor = resource.samltwofactor
updateresource.defaultauthenticationgroup = resource.defaultauthenticationgroup
updateresource.attribute1 = resource.attribute1
updateresource.attribute2 = resource.attribute2
updateresource.attribute3 = resource.attribute3
updateresource.attribute4 = resource.attribute4
updateresource.attribute5 = resource.attribute5
updateresource.attribute6 = resource.attribute6
updateresource.attribute7 = resource.attribute7
updateresource.attribute8 = resource.attribute8
updateresource.attribute9 = resource.attribute9
updateresource.attribute10 = resource.attribute10
updateresource.attribute11 = resource.attribute11
updateresource.attribute12 = resource.attribute12
updateresource.attribute13 = resource.attribute13
updateresource.attribute14 = resource.attribute14
updateresource.attribute15 = resource.attribute15
updateresource.attribute16 = resource.attribute16
updateresource.signaturealg = resource.signaturealg
updateresource.digestmethod = resource.digestmethod
updateresource.requestedauthncontext = resource.requestedauthncontext
updateresource.authnctxclassref = resource.authnctxclassref
updateresource.samlbinding = resource.samlbinding
updateresource.attributeconsumingserviceindex = resource.attributeconsumingserviceindex
updateresource.sendthumbprint = resource.sendthumbprint
updateresource.enforceusername = resource.enforceusername
updateresource.logouturl = resource.logouturl
updateresource.artifactresolutionserviceurl = resource.artifactresolutionserviceurl
updateresource.skewtime = resource.skewtime
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ authenticationsamlaction() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].samlidpcertname = resource[i].samlidpcertname
updateresources[i].samlsigningcertname = resource[i].samlsigningcertname
updateresources[i].samlredirecturl = resource[i].samlredirecturl
updateresources[i].samlacsindex = resource[i].samlacsindex
updateresources[i].samluserfield = resource[i].samluserfield
updateresources[i].samlrejectunsignedassertion = resource[i].samlrejectunsignedassertion
updateresources[i].samlissuername = resource[i].samlissuername
updateresources[i].samltwofactor = resource[i].samltwofactor
updateresources[i].defaultauthenticationgroup = resource[i].defaultauthenticationgroup
updateresources[i].attribute1 = resource[i].attribute1
updateresources[i].attribute2 = resource[i].attribute2
updateresources[i].attribute3 = resource[i].attribute3
updateresources[i].attribute4 = resource[i].attribute4
updateresources[i].attribute5 = resource[i].attribute5
updateresources[i].attribute6 = resource[i].attribute6
updateresources[i].attribute7 = resource[i].attribute7
updateresources[i].attribute8 = resource[i].attribute8
updateresources[i].attribute9 = resource[i].attribute9
updateresources[i].attribute10 = resource[i].attribute10
updateresources[i].attribute11 = resource[i].attribute11
updateresources[i].attribute12 = resource[i].attribute12
updateresources[i].attribute13 = resource[i].attribute13
updateresources[i].attribute14 = resource[i].attribute14
updateresources[i].attribute15 = resource[i].attribute15
updateresources[i].attribute16 = resource[i].attribute16
updateresources[i].signaturealg = resource[i].signaturealg
updateresources[i].digestmethod = resource[i].digestmethod
updateresources[i].requestedauthncontext = resource[i].requestedauthncontext
updateresources[i].authnctxclassref = resource[i].authnctxclassref
updateresources[i].samlbinding = resource[i].samlbinding
updateresources[i].attributeconsumingserviceindex = resource[i].attributeconsumingserviceindex
updateresources[i].sendthumbprint = resource[i].sendthumbprint
updateresources[i].enforceusername = resource[i].enforceusername
updateresources[i].logouturl = resource[i].logouturl
updateresources[i].artifactresolutionserviceurl = resource[i].artifactresolutionserviceurl
updateresources[i].skewtime = resource[i].skewtime
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
"""Use this API to unset the properties of authenticationsamlaction resource.
Properties that need to be unset are specified in args array.
:param client:
:param resource:
:param args:
"""
try :
if type(resource) is not list :
unsetresource = authenticationsamlaction()
if type(resource) != type(unsetresource):
unsetresource.name = resource
else :
unsetresource.name = resource.name
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ authenticationsamlaction() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ authenticationsamlaction() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i].name
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
"""Use this API to fetch all the authenticationsamlaction resources that are configured on netscaler.
:param client:
:param name: (Default value = "")
:param option_: (Default value = "")
"""
try :
if not name :
obj = authenticationsamlaction()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = authenticationsamlaction()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [authenticationsamlaction() for _ in range(len(name))]
obj = [authenticationsamlaction() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = authenticationsamlaction()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
"""Use this API to fetch filtered set of authenticationsamlaction resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
:param client:
:param filter_:
"""
try :
obj = authenticationsamlaction()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
"""Use this API to count the authenticationsamlaction resources configured on NetScaler.
:param client:
"""
try :
obj = authenticationsamlaction()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
"""Use this API to count filtered the set of authenticationsamlaction resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
:param client:
:param filter_:
"""
try :
obj = authenticationsamlaction()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Signaturealg:
""" """
RSA_SHA1 = "RSA-SHA1"
RSA_SHA256 = "RSA-SHA256"
class Samltwofactor:
""" """
ON = "ON"
OFF = "OFF"
class Samlbinding:
""" """
REDIRECT = "REDIRECT"
POST = "POST"
ARTIFACT = "ARTIFACT"
class Samlrejectunsignedassertion:
""" """
ON = "ON"
OFF = "OFF"
STRICT = "STRICT"
class Digestmethod:
""" """
SHA1 = "SHA1"
SHA256 = "SHA256"
class Authnctxclassref:
""" """
InternetProtocol = "InternetProtocol"
InternetProtocolPassword = "InternetProtocolPassword"
Kerberos = "Kerberos"
MobileOneFactorUnregistered = "MobileOneFactorUnregistered"
MobileTwoFactorUnregistered = "MobileTwoFactorUnregistered"
MobileOneFactorContract = "MobileOneFactorContract"
MobileTwoFactorContract = "MobileTwoFactorContract"
Password = "Password"
PasswordProtectedTransport = "PasswordProtectedTransport"
PreviousSession = "PreviousSession"
X509 = "X509"
PGP = "PGP"
SPKI = "SPKI"
XMLDSig = "XMLDSig"
Smartcard = "Smartcard"
SmartcardPKI = "SmartcardPKI"
SoftwarePKI = "SoftwarePKI"
Telephony = "Telephony"
NomadTelephony = "NomadTelephony"
PersonalTelephony = "PersonalTelephony"
AuthenticatedTelephony = "AuthenticatedTelephony"
SecureRemotePassword = "SecureRemotePassword"
TLSClient = "TLSClient"
TimeSyncToken = "TimeSyncToken"
Unspecified = "Unspecified"
Windows = "Windows"
class Requestedauthncontext:
""" """
exact = "exact"
minimum = "minimum"
maximum = "maximum"
better = "better"
class Sendthumbprint:
""" """
ON = "ON"
OFF = "OFF"
class Enforceusername:
""" """
ON = "ON"
OFF = "OFF"
class authenticationsamlaction_response(base_response) :
""" """
def __init__(self, length=1) :
self.authenticationsamlaction = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationsamlaction = [authenticationsamlaction() for _ in range(length)]
|
py | 7df9578718d2e882d35ff2d32620b9a2f8f398ef | import pytest
from app import app
@pytest.fixture
def client():
app.config['TESTING'] = True
test_client = app.test_client()
yield test_client
test_client.delete()
def test_index(client):
result = client.get('/')
assert b'Hello World' == result.data
def test_good(client):
result = client.get('/good')
assert b'Good' == result.data |
gyp | 7df957adf1c340e09c5be61eba6cec4a3131e521 | {
'targets': [
{
'target_name': 'virgolib',
'type': 'static_library',
'conditions': [
['OS!="win"', {
'sources': [
'virgo_detach.c',
],
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'cflags': [ '--std=c89' ],
'defines': [ '_GNU_SOURCE' ]
}],
['OS=="linux"', {
'dependencies': [
'../deps/breakpad/breakpad.gyp:*'
],
'sources': [
'virgo_crash_reporter.cc',
],
'include_dirs': [
'../deps/breakpad/src',
],
}],
['OS=="win"', {
'sources': [
'virgo_win32_service.c',
],
}],
],
'dependencies': [
'../deps/luvit/deps/zlib/zlib.gyp:zlib',
'../deps/luvit/luvit.gyp:luvit',
'../deps/luvit/luvit.gyp:libluvit',
'../deps/sigar.gyp:sigar',
'../deps/sigar.gyp:lua_sigar',
],
'export_dependent_settings': [
'../deps/luvit/luvit.gyp:libluvit',
],
'defines': [
'VIRGO_OS="<(OS)"',
'VIRGO_PLATFORM="<!(python ../tools/virgo_platform.py)"',
'VIRGO_VERSION="<!(git --git-dir ../.git rev-parse HEAD)"',
'VERSION_FULL="<!(python tools/version.py)"',
],
'sources': [
'virgo_agent_conf.c',
'virgo_conf.c',
'virgo_error.c',
'virgo_exec.c',
'virgo_init.c',
'virgo_lua.c',
'virgo_lua_loader.c',
'virgo_lua_logging.c',
'virgo_lua_debugger.c',
'virgo_lua_paths.c',
'virgo_lua_vfs.c',
'virgo_logging.c',
'virgo_paths.c',
'virgo_portable.c',
'virgo_time.c',
'virgo_util.c',
'virgo_versions.c',
],
'include_dirs': [
'.',
'../include/private',
'../include',
],
'direct_dependent_settings': {
'include_dirs': [
'../include'
],
},
}
],
}
|
py | 7df9580666858a834506a9a0beac742e548266f5 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from ppdet.modeling.tests.decorator_helper import prog_scope
from ppdet.core.workspace import load_config, merge_config, create
from ppdet.modeling.model_input import create_feed
class TestFasterRCNN(unittest.TestCase):
def setUp(self):
self.set_config()
self.cfg = load_config(self.cfg_file)
self.detector_type = self.cfg['architecture']
def set_config(self):
self.cfg_file = 'configs/faster_rcnn_r50_1x.yml'
@prog_scope()
def test_train(self):
train_feed = create(self.cfg['train_feed'])
model = create(self.detector_type)
_, feed_vars = create_feed(train_feed)
train_fetches = model.train(feed_vars)
@prog_scope()
def test_test(self):
test_feed = create(self.cfg['eval_feed'])
model = create(self.detector_type)
_, feed_vars = create_feed(test_feed)
test_fetches = model.eval(feed_vars)
class TestMaskRCNN(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/mask_rcnn_r50_1x.yml'
class TestCascadeRCNN(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/cascade_rcnn_r50_fpn_1x.yml'
class TestYolov3(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/yolov3_darknet.yml'
class TestRetinaNet(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/retinanet_r50_fpn_1x.yml'
class TestSSD(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/ssd/ssd_mobilenet_v1_voc.yml'
if __name__ == '__main__':
unittest.main()
|
py | 7df958914d4d399e44084317be61a3e5be497102 | l = float(input('Informe a largura em m: '))
h = float(input('Informe a altura em m: '))
print('Sua parede tem a seguinte dimensรฃo:\n{:.2f} x {:.2f} metros'.format(l, h))
a = l*h
print('Portanto a รกrea รฉ: {:.2f}mmยฒ'.format(a))
print('Para pintar a parede gastarรก: {:.2f}l de tinta'.format(a/2)) |
py | 7df9593c7b89bef316e5568dd3edc290bae1aa12 | from django.contrib.sites.models import Site
from django.contrib.sites.models import RequestSite
from user_registration.models import UserRegistration
class DefaultRegistrationBackend(object):
"""
Backend defines how the registration and activation processes are defined
@register: What to do after valid register form data is receieved
@activate: Activation process for a user based on registration data
@is_registration_open: defines if registration is open
"""
def register(self, request, **kwargs):
"""
Registration process is defined in this method. This should do the following:
1. Store the appropriate data based on your logic
2. Send an email / SMS or any other action for registration process
'kwargs' should contain all the required parameters to create a user
we can confirm that by using REQUIRED_FIELDS list + USERNAME_FIELD in the User model
"""
# create the user and registration data for this user
new_user, reg_data = UserRegistration.objects.register_user(**kwargs)
# Send an email
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
reg_data.send_activation_email(site)
return new_user
def activate(self, request, activation_key):
"""
Activation process should be defined here. By default, it is only doing check
against activation key when user accesses this URL.
You could also check against a secret code that should be provided by the user in
addition to the key. This code can be sent to the user in registration process by
email, SMS etc.
"""
activated = UserRegistration.objects.activate_user(activation_key)
return activated
def is_registration_open(self):
"""
Override this method to add logic for deciding when registration is allowed
"""
return True
|
py | 7df95948e8b3dfa05ee02dcf060d6094bbeedd14 | # Wrap evrey line from preInput.txt with ""
# open file for read and write input
preInputFile = open("pre-input.txt", "rt") # read text
inputFile = open("input-file.txt", "wt") # write text
# create a list with all the lines from file
textFile = preInputFile.readlines()
# ad "" to every line
for line in textFile:
if line[-1] == "\n":
newFormat = "\"" + line[:-1] + "\"" + "\n"
inputFile.write(newFormat)
else:
newFormat = "\"" + line + "\""
inputFile.write(newFormat)
# close all the opened files
preInputFile.close()
inputFile.close()
|
py | 7df959eb1a5aa79dda2528ed7716b5a95db3d648 | """
owtf.api.base
~~~~~~~~~~~~~
"""
import os
import json
import stat
import time
import hashlib
import datetime
import mimetypes
import email.utils
import subprocess
import tornado.web
import tornado.template
from tornado.escape import url_escape
from owtf.dependency_management.dependency_resolver import BaseComponent, ServiceLocator
class APIRequestHandler(tornado.web.RequestHandler, BaseComponent):
def write(self, chunk):
if isinstance(chunk, list):
super(APIRequestHandler, self).write(json.dumps(chunk))
self.set_header("Content-Type", "application/json")
else:
super(APIRequestHandler, self).write(chunk)
class UIRequestHandler(tornado.web.RequestHandler, BaseComponent):
def reverse_url(self, name, *args):
url = super(UIRequestHandler, self).reverse_url(name, *args)
url = url.replace('?', '')
return url.split('None')[0]
class FileRedirectHandler(tornado.web.RequestHandler):
SUPPORTED_METHODS = ['GET']
def get(self, file_url):
config = ServiceLocator.get_component("config")
ui_port = config.get("UI_SERVER_PORT")
fileserver_port = config.get("FILE_SERVER_PORT")
output_files_server = "%s://%s/" % (self.request.protocol, self.request.host.replace(ui_port, fileserver_port))
redirect_file_url = output_files_server + url_escape(file_url, plus=False)
self.redirect(redirect_file_url, permanent=True)
class StaticFileHandler(tornado.web.StaticFileHandler):
def set_default_headers(self):
self.add_header("Access-Control-Allow-Origin", "*")
self.add_header("Access-Control-Allow-Methods", "GET, POST, DELETE")
def get(self, path, include_body=True):
"""
This is an edited method of original class so that we can show
directory listing and set correct Content-Type
"""
path = self.parse_url_path(path)
abspath = os.path.abspath(os.path.join(self.root, path))
self.absolute_path = abspath
if not os.path.exists(abspath):
raise tornado.web.HTTPError(404)
# Check if a directory if so provide listing
if os.path.isdir(abspath):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
# Just loop once to get dirnames and filenames :P
for abspath, dirnames, filenames in os.walk(abspath):
break
directory_listing_template = tornado.template.Template("""
<html>
<head>
<title>Directory Listing</title>
</head>
<body>
<h1>Index of</h1>
<hr>
<ul>
<li><a href="../">../</a></li>
{% if len(dirnames) > 0 %}
<h2>Directories</h2>
{% for item in dirnames %}
<li><a href="{{ url_escape(item, plus=False) }}/">{{ item }}/</a></li>
{% end %}
{% end %}
{% if len(filenames) > 0 %}
<h2>Files</h2>
{% for item in filenames %}
<li><a href="{{ url_escape(item, plus=False) }}">{{ item }}</a></li>
{% end %}
{% end %}
</ul>
</body>
</html>
""")
self.write(directory_listing_template.generate(dirnames=dirnames, filenames=filenames))
return
if os.path.isfile(abspath): # So file
stat_result = os.stat(abspath)
modified = datetime.datetime.fromtimestamp(stat_result[stat.ST_MTIME])
self.set_header("Last-Modified", modified)
mime_type, encoding = mimetypes.guess_type(abspath)
if mime_type:
self.set_header("Content-Type", mime_type)
cache_time = self.get_cache_time(path, modified, mime_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() + datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=%s" % str(cache_time))
else:
self.set_header("Cache-Control", "public")
self.set_extra_headers(path)
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if_since = datetime.datetime.fromtimestamp(time.mktime(date_tuple))
if if_since >= modified:
self.set_status(304)
return
no_of_lines = self.get_argument("lines", default="-1")
if no_of_lines != "-1":
data = subprocess.check_output(["tail", "-" + no_of_lines, abspath])
else:
with open(abspath, "rb") as file:
data = file.read()
hasher = hashlib.sha1()
hasher.update(data)
self.set_header("Etag", '"%s"' % hasher.hexdigest())
if include_body:
self.write(data)
else:
assert self.request.method == "HEAD"
self.set_header("Content-Length", len(data))
|
py | 7df95b64fcfd965c096c53502204b3d6e564e49b | import tensorflow as tf
import numpy as np
import json
import time
ISOTIMEFORMAT="%Y-%m-%d %X"
tf.flags.DEFINE_string("data_dir", "./data", "The data dir.")
tf.flags.DEFINE_string("sub_dir", "WikiPeople", "The sub data dir.")
tf.flags.DEFINE_string("old_dir", "avs", "The dir of the original data representation.")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
for attr, value in sorted(FLAGS.__flags.items()):
print("{}={}".format(attr.upper(), value))
def write_json(t_t):
g = open(FLAGS.data_dir+"/"+FLAGS.sub_dir+"/n-ary_"+t_t+".json", "w")
with open(FLAGS.data_dir+"/"+FLAGS.sub_dir+"/"+FLAGS.old_dir+"/n-ary_"+t_t+".json", "r") as f:
for line in f:
aline = ()
tmp_dict = eval(line)
xx_dict = {}
for k in tmp_dict:
if k.endswith("_h"):
xx_dict["H"] = tmp_dict[k]
xx_dict["R"] = k[0:-2]
elif k.endswith("_t"):
xx_dict["T"] = tmp_dict[k]
elif k != "N":
xx_dict[k] = tmp_dict[k]
xx_dict["N"] = tmp_dict["N"]
json.dump(xx_dict, g)
g.write("\n")
g.close()
if __name__ == "__main__":
print(time.strftime(ISOTIMEFORMAT, time.localtime()))
arr = ["train", "valid", "test"]
for i in arr:
write_json(i)
print(time.strftime(ISOTIMEFORMAT, time.localtime()))
|
py | 7df95c3d0ac85b241609487347bf7a773b256a40 | import time
import math
import numpy as np
import torch
from torch.autograd import Variable
from torch.nn import Parameter
from torch.utils.data.sampler import SubsetRandomSampler
from data_loader import libsvm_dataset
from thrift_ps.ps_service import ParameterServer
from thrift_ps.client import ps_client
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from utils.constants import Prefix, MLModel, Optimization, Synchronization
from storage.s3.s3_type import S3Storage
from model import linear_models
def handler(event, context):
start_time = time.time()
# dataset setting
file = event['file']
data_bucket = event['data_bucket']
dataset_type = event['dataset_type']
assert dataset_type == "sparse_libsvm"
n_features = event['n_features']
n_classes = event['n_classes']
n_workers = event['n_workers']
worker_index = event['worker_index']
# ps setting
host = event['host']
port = event['port']
# training setting
model_name = event['model']
optim = event['optim']
sync_mode = event['sync_mode']
assert model_name.lower() in MLModel.Sparse_Linear_Models
assert optim.lower() == Optimization.Grad_Avg
assert sync_mode.lower() == Synchronization.Reduce
# hyper-parameter
learning_rate = event['lr']
batch_size = event['batch_size']
n_epochs = event['n_epochs']
valid_ratio = event['valid_ratio']
print('bucket = {}'.format(data_bucket))
print("file = {}".format(file))
print('number of workers = {}'.format(n_workers))
print('worker index = {}'.format(worker_index))
print('model = {}'.format(model_name))
print('host = {}'.format(host))
print('port = {}'.format(port))
# Set thrift connection
# Make socket
transport = TSocket.TSocket(host, port)
# Buffering is critical. Raw sockets are very slow
transport = TTransport.TBufferedTransport(transport)
# Wrap in a protocol
protocol = TBinaryProtocol.TBinaryProtocol(transport)
# Create a client to use the protocol encoder
t_client = ParameterServer.Client(protocol)
# Connect!
transport.open()
# test thrift connection
ps_client.ping(t_client)
print("create and ping thrift server >>> HOST = {}, PORT = {}".format(host, port))
# Read file from s3
read_start = time.time()
storage = S3Storage()
lines = storage.load(file, data_bucket).read().decode('utf-8').split("\n")
print("read data cost {} s".format(time.time() - read_start))
parse_start = time.time()
dataset = libsvm_dataset.from_lines(lines, n_features, dataset_type)
print("parse data cost {} s".format(time.time() - parse_start))
preprocess_start = time.time()
# Creating data indices for training and validation splits:
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(valid_ratio * dataset_size))
shuffle_dataset = True
random_seed = 100
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
# split train set and test set
train_set = [dataset[i] for i in train_indices]
n_train_batch = math.floor(len(train_set) / batch_size)
val_set = [dataset[i] for i in val_indices]
print("preprocess data cost {} s, dataset size = {}"
.format(time.time() - preprocess_start, dataset_size))
model = linear_models.get_sparse_model(model_name, train_set, val_set, n_features,
n_epochs, learning_rate, batch_size)
# register model
model_name = "w.b"
weight_length = n_features
bias_length = 1
model_length = weight_length + bias_length
ps_client.register_model(t_client, worker_index, model_name, model_length, n_workers)
ps_client.exist_model(t_client, model_name)
print("register and check model >>> name = {}, length = {}".format(model_name, model_length))
# Training the Model
train_start = time.time()
iter_counter = 0
for epoch in range(n_epochs):
epoch_start = time.time()
epoch_cal_time = 0
epoch_comm_time = 0
epoch_loss = 0.
for batch_idx in range(n_train_batch):
batch_start = time.time()
batch_comm_time = 0
# pull latest model
ps_client.can_pull(t_client, model_name, iter_counter, worker_index)
latest_model = ps_client.pull_model(t_client, model_name, iter_counter, worker_index)
model.weight = torch.from_numpy(np.asarray(latest_model[:weight_length]).astype(np.float32)
.reshape(n_features, 1))
model.bias = float(latest_model[-1])
batch_comm_time += time.time() - batch_start
batch_loss, batch_acc = model.one_batch()
epoch_loss += batch_loss.average
w_b = np.concatenate((model.weight.double().numpy().flatten(), np.array([model.bias]).astype(np.double)))
w_b_update = np.subtract(w_b, latest_model)
batch_cal_time = time.time() - batch_start
# push gradient to PS
batch_comm_start = time.time()
ps_client.can_push(t_client, model_name, iter_counter, worker_index)
ps_client.push_grad(t_client, model_name, w_b_update, 1.0 / n_workers, iter_counter, worker_index)
ps_client.can_pull(t_client, model_name, iter_counter + 1, worker_index) # sync all workers
batch_comm_time += time.time() - batch_comm_start
epoch_cal_time += batch_cal_time
epoch_comm_time += batch_comm_time
if batch_idx % 10 == 0:
print('Epoch: [%d/%d], Batch: [%d/%d], Time: %.4f, Loss: %.4f, Accuracy: %.4f,'
'batch cost %.4f s: cal cost %.4f s and communication cost %.4f s'
% (epoch + 1, n_epochs, batch_idx + 1, n_train_batch,
time.time() - train_start, batch_loss.average, batch_acc.accuracy,
time.time() - batch_start, batch_cal_time, batch_comm_time))
iter_counter += 1
# Test the Model
test_start = time.time()
test_loss, test_acc = model.evaluate()
test_time = time.time() - test_start
print("Epoch: [{}/{}] finishes, Batch: [{}/{}], Time: {:.4f}, Loss: {:.4f}, epoch cost {:.4f} s, "
"calculation cost = {:.4f} s, synchronization cost {:.4f} s, test cost {:.4f} s, "
"accuracy of the model on the {} test samples: {}, loss = {}"
.format(epoch + 1, n_epochs, batch_idx + 1, n_train_batch,
time.time() - train_start, epoch_loss, time.time() - epoch_start,
epoch_cal_time, epoch_comm_time, test_time,
len(val_set), test_acc.accuracy, test_loss.average))
end_time = time.time()
print("Elapsed time = {} s".format(end_time - start_time))
|
py | 7df95c7ce41edc1eee0e51193dfc3bd2291c0b72 | # web_app/routes/book_routes.py
from flask import Blueprint, jsonify, request, render_template, redirect #, flash
from web_app.models import Book, parse_records, db
book_routes = Blueprint("book_routes", __name__)
@book_routes.route("/books.json")
def list_books():
# books = [
# {"id": 1, "title": "Book 1"},
# {"id": 2, "title": "Book 2"},
# {"id": 3, "title": "Book 3"},
# ]
book_records = Book.query.all()
print(book_records)
books = parse_records(book_records)
return jsonify(books)
@book_routes.route("/books")
def list_books_for_humans():
# books = [
# {"id": 1, "title": "Book 1"},
# {"id": 2, "title": "Book 2"},
# {"id": 3, "title": "Book 3"},
# ]
book_records = Book.query.all()
print(book_records)
return render_template("books.html", message="Here's some books", books=book_records)
@book_routes.route("/books/new")
def new_book():
return render_template("new_book.html")
@book_routes.route("/books/create", methods=["POST"])
def create_book():
print("FORM DATA:", dict(request.form))
# use the title and author inputs from the request for to initialize
# new book amd add to database
new_book = Book(title=request.form["title"], author_id=request.form["author_name"])
db.session.add(new_book)
db.session.commit()
# return jsonify({
# "message": "BOOK CREATED OK (TODO)",
# "book": dict(request.form)
# })
#flash(f"Book '{new_book.title}' created successfully!", "success")
return redirect(f"/books")
|
py | 7df95ca0589aeec951c2f337dcfbd05c790530a7 | import os
import pytest
from datetime import datetime
from medium_api import Medium
from medium_api._publication import Publication
from medium_api._user import User
from medium_api._article import Article
medium = Medium(os.getenv('RAPIDAPI_KEY'))
article_id = '562c5821b5f0'
article = medium.article(article_id=article_id)
def test_article_instance():
assert isinstance(article, Article)
assert isinstance(article._id, str)
assert article._id == article_id
def test_article_info():
article.save_info()
assert isinstance(article.title, str)
assert isinstance(article.subtitle, str)
assert isinstance(article.claps, int)
assert isinstance(article.author, User)
assert isinstance(article.url, str)
assert isinstance(article.published_at, datetime)
assert isinstance(article.publication_id, str)
assert isinstance(article.tags, list)
assert isinstance(article.topics, list)
assert isinstance(article.last_modified_at, datetime)
assert isinstance(article.reading_time, float)
assert isinstance(article.word_count, int)
assert isinstance(article.voters, int)
assert isinstance(article.image_url, str)
assert isinstance(article.info, dict)
def test_article_content():
article.save_content()
assert isinstance(article.content, str)
assert len(article.content) > 0
def test_article_json():
article.save_info()
article.save_content()
article_json = article.json
assert isinstance(article_json, dict)
assert 'content' in article_json.keys()
assert 'title' in article_json.keys()
def test_article_publication():
is_self_published = article.is_self_published
assert isinstance(is_self_published, bool)
if not is_self_published:
assert isinstance(article.publication, Publication)
else:
assert article.publication is None
|
py | 7df95f560763cf014cf7ccc828911ff3e8a9f5ee | #!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation. All rights reserved.
# Copyright 2016 Confluent Inc.
# Licensed under the MIT License.
# Licensed under the Apache License, Version 2.0
#
# Original Confluent sample modified for use with Azure Event Hubs for Apache Kafka Ecosystems
from confluent_kafka import Producer
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.stderr.write('Usage: %s <topic>\n' % sys.argv[0])
sys.exit(1)
topic = sys.argv[1]
# Producer configuration
# See https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
# See https://github.com/edenhill/librdkafka/wiki/Using-SSL-with-librdkafka#prerequisites for SSL issues
conf = {
'bootstrap.servers': 'mynamespace.servicebus.windows.net:9093', #replace
'security.protocol': 'SASL_SSL',
'ssl.ca.location': '/path/to/ca-certificate.crt',
'sasl.mechanism': 'PLAIN',
'sasl.username': '$ConnectionString',
'sasl.password': '{YOUR.EVENTHUBS.CONNECTION.STRING}', #replace
'client.id': 'python-example-producer'
}
# Create Producer instance
p = Producer(**conf)
def delivery_callback(err, msg):
if err:
sys.stderr.write('%% Message failed delivery: %s\n' % err)
else:
sys.stderr.write('%% Message delivered to %s [%d] @ %o\n' % (msg.topic(), msg.partition(), msg.offset()))
# Write 1-100 to topic
for i in range(0, 100):
try:
p.produce(topic, str(i), callback=delivery_callback)
except BufferError as e:
sys.stderr.write('%% Local producer queue is full (%d messages awaiting delivery): try again\n' % len(p))
p.poll(0)
# Wait until all messages have been delivered
sys.stderr.write('%% Waiting for %d deliveries\n' % len(p))
p.flush()
|
py | 7df95f8dc86d5a98c0002a7b0c22e48a4ded5066 | """
New, fast version of the CloudPickler.
This new CloudPickler class can now extend the fast C Pickler instead of the
previous Python implementation of the Pickler class. Because this functionality
is only available for Python versions 3.8+, a lot of backward-compatibility
code is also removed.
Note that the C Pickler sublassing API is CPython-specific. Therefore, some
guards present in cloudpickle.py that were written to handle PyPy specificities
are not present in cloudpickle_fast.py
"""
import abc
import copyreg
import io
import itertools
import logging
import _pickle
import pickle
import sys
import types
import weakref
import typing
from _pickle import Pickler
from .cloudpickle import (
_is_dynamic, _extract_code_globals, _BUILTIN_TYPE_NAMES, DEFAULT_PROTOCOL,
_find_imported_submodules, _get_cell_contents, _is_importable_by_name, _builtin_type,
Enum, _get_or_create_tracker_id, _make_skeleton_class, _make_skeleton_enum,
_extract_class_dict, dynamic_subimport, subimport, _typevar_reduce, _get_bases,
)
load, loads = _pickle.load, _pickle.loads
# Shorthands similar to pickle.dump/pickle.dumps
def dump(obj, file, protocol=None, buffer_callback=None):
"""Serialize obj as bytes streamed into file
protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
between processes running the same Python version.
Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
compatibility with older versions of Python.
"""
CloudPickler(file, protocol=protocol, buffer_callback=buffer_callback).dump(obj)
def dumps(obj, protocol=None, buffer_callback=None):
"""Serialize obj as a string of bytes allocated in memory
protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
between processes running the same Python version.
Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
compatibility with older versions of Python.
"""
with io.BytesIO() as file:
cp = CloudPickler(file, protocol=protocol, buffer_callback=buffer_callback)
cp.dump(obj)
return file.getvalue()
# COLLECTION OF OBJECTS __getnewargs__-LIKE METHODS
# -------------------------------------------------
def _class_getnewargs(obj):
type_kwargs = {}
if "__slots__" in obj.__dict__:
type_kwargs["__slots__"] = obj.__slots__
__dict__ = obj.__dict__.get('__dict__', None)
if isinstance(__dict__, property):
type_kwargs['__dict__'] = __dict__
return (type(obj), obj.__name__, _get_bases(obj), type_kwargs,
_get_or_create_tracker_id(obj), None)
def _enum_getnewargs(obj):
members = dict((e.name, e.value) for e in obj)
return (obj.__bases__, obj.__name__, obj.__qualname__, members,
obj.__module__, _get_or_create_tracker_id(obj), None)
# COLLECTION OF OBJECTS RECONSTRUCTORS
# ------------------------------------
def _file_reconstructor(retval):
return retval
# COLLECTION OF OBJECTS STATE GETTERS
# -----------------------------------
def _function_getstate(func):
# - Put func's dynamic attributes (stored in func.__dict__) in state. These
# attributes will be restored at unpickling time using
# f.__dict__.update(state)
# - Put func's members into slotstate. Such attributes will be restored at
# unpickling time by iterating over slotstate and calling setattr(func,
# slotname, slotvalue)
slotstate = {
"__name__": func.__name__,
"__qualname__": func.__qualname__,
"__annotations__": func.__annotations__,
"__kwdefaults__": func.__kwdefaults__,
"__defaults__": func.__defaults__,
"__module__": func.__module__,
"__doc__": func.__doc__,
"__closure__": func.__closure__,
}
f_globals_ref = _extract_code_globals(func.__code__)
f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in
func.__globals__}
closure_values = (
list(map(_get_cell_contents, func.__closure__))
if func.__closure__ is not None else ()
)
# Extract currently-imported submodules used by func. Storing these modules
# in a smoke _cloudpickle_subimports attribute of the object's state will
# trigger the side effect of importing these modules at unpickling time
# (which is necessary for func to work correctly once depickled)
slotstate["_cloudpickle_submodules"] = _find_imported_submodules(
func.__code__, itertools.chain(f_globals.values(), closure_values))
slotstate["__globals__"] = f_globals
state = func.__dict__
return state, slotstate
def _class_getstate(obj):
clsdict = _extract_class_dict(obj)
clsdict.pop('__weakref__', None)
if issubclass(type(obj), abc.ABCMeta):
# If obj is an instance of an ABCMeta subclass, dont pickle the
# cache/negative caches populated during isinstance/issubclass
# checks, but pickle the list of registered subclasses of obj.
clsdict.pop('_abc_impl', None)
(registry, _, _, _) = abc._get_dump(obj)
clsdict["_abc_impl"] = [subclass_weakref()
for subclass_weakref in registry]
if "__slots__" in clsdict:
# pickle string length optimization: member descriptors of obj are
# created automatically from obj's __slots__ attribute, no need to
# save them in obj's state
if isinstance(obj.__slots__, str):
clsdict.pop(obj.__slots__)
else:
for k in obj.__slots__:
clsdict.pop(k, None)
clsdict.pop('__dict__', None) # unpicklable property object
return (clsdict, {})
def _enum_getstate(obj):
clsdict, slotstate = _class_getstate(obj)
members = dict((e.name, e.value) for e in obj)
# Cleanup the clsdict that will be passed to _rehydrate_skeleton_class:
# Those attributes are already handled by the metaclass.
for attrname in ["_generate_next_value_", "_member_names_",
"_member_map_", "_member_type_",
"_value2member_map_"]:
clsdict.pop(attrname, None)
for member in members:
clsdict.pop(member)
# Special handling of Enum subclasses
return clsdict, slotstate
# COLLECTIONS OF OBJECTS REDUCERS
# -------------------------------
# A reducer is a function taking a single argument (obj), and that returns a
# tuple with all the necessary data to re-construct obj. Apart from a few
# exceptions (list, dict, bytes, int, etc.), a reducer is necessary to
# correctly pickle an object.
# While many built-in objects (Exceptions objects, instances of the "object"
# class, etc), are shipped with their own built-in reducer (invoked using
# obj.__reduce__), some do not. The following methods were created to "fill
# these holes".
def _code_reduce(obj):
"""codeobject reducer"""
args = (
obj.co_argcount, obj.co_posonlyargcount,
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
obj.co_varnames, obj.co_filename, obj.co_name,
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
obj.co_cellvars
)
return types.CodeType, args
def _cell_reduce(obj):
"""Cell (containing values of a function's free variables) reducer"""
try:
obj.cell_contents
except ValueError: # cell is empty
return types.CellType, ()
else:
return types.CellType, (obj.cell_contents,)
def _classmethod_reduce(obj):
orig_func = obj.__func__
return type(obj), (orig_func,)
def _file_reduce(obj):
"""Save a file"""
import io
if not hasattr(obj, "name") or not hasattr(obj, "mode"):
raise pickle.PicklingError(
"Cannot pickle files that do not map to an actual file"
)
if obj is sys.stdout:
return getattr, (sys, "stdout")
if obj is sys.stderr:
return getattr, (sys, "stderr")
if obj is sys.stdin:
raise pickle.PicklingError("Cannot pickle standard input")
if obj.closed:
raise pickle.PicklingError("Cannot pickle closed files")
if hasattr(obj, "isatty") and obj.isatty():
raise pickle.PicklingError(
"Cannot pickle files that map to tty objects"
)
if "r" not in obj.mode and "+" not in obj.mode:
raise pickle.PicklingError(
"Cannot pickle files that are not opened for reading: %s"
% obj.mode
)
name = obj.name
retval = io.StringIO()
try:
# Read the whole file
curloc = obj.tell()
obj.seek(0)
contents = obj.read()
obj.seek(curloc)
except IOError:
raise pickle.PicklingError(
"Cannot pickle file %s as it cannot be read" % name
)
retval.write(contents)
retval.seek(curloc)
retval.name = name
return _file_reconstructor, (retval,)
def _getset_descriptor_reduce(obj):
return getattr, (obj.__objclass__, obj.__name__)
def _mappingproxy_reduce(obj):
return types.MappingProxyType, (dict(obj),)
def _memoryview_reduce(obj):
return bytes, (obj.tobytes(),)
def _module_reduce(obj):
if _is_dynamic(obj):
obj.__dict__.pop('__builtins__', None)
return dynamic_subimport, (obj.__name__, vars(obj))
else:
return subimport, (obj.__name__,)
def _method_reduce(obj):
return (types.MethodType, (obj.__func__, obj.__self__))
def _logger_reduce(obj):
return logging.getLogger, (obj.name,)
def _root_logger_reduce(obj):
return logging.getLogger, ()
def _property_reduce(obj):
return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__)
def _weakset_reduce(obj):
return weakref.WeakSet, (list(obj),)
def _dynamic_class_reduce(obj):
"""
Save a class that can't be stored as module global.
This method is used to serialize classes that are defined inside
functions, or that otherwise can't be serialized as attribute lookups
from global modules.
"""
if Enum is not None and issubclass(obj, Enum):
return (
_make_skeleton_enum, _enum_getnewargs(obj), _enum_getstate(obj),
None, None, _class_setstate
)
else:
return (
_make_skeleton_class, _class_getnewargs(obj), _class_getstate(obj),
None, None, _class_setstate
)
def _class_reduce(obj):
"""Select the reducer depending on the dynamic nature of the class obj"""
if obj is type(None): # noqa
return type, (None,)
elif obj is type(Ellipsis):
return type, (Ellipsis,)
elif obj is type(NotImplemented):
return type, (NotImplemented,)
elif obj in _BUILTIN_TYPE_NAMES:
return _builtin_type, (_BUILTIN_TYPE_NAMES[obj],)
elif not _is_importable_by_name(obj):
return _dynamic_class_reduce(obj)
return NotImplemented
# COLLECTIONS OF OBJECTS STATE SETTERS
# ------------------------------------
# state setters are called at unpickling time, once the object is created and
# it has to be updated to how it was at unpickling time.
def _function_setstate(obj, state):
"""Update the state of a dynaamic function.
As __closure__ and __globals__ are readonly attributes of a function, we
cannot rely on the native setstate routine of pickle.load_build, that calls
setattr on items of the slotstate. Instead, we have to modify them inplace.
"""
state, slotstate = state
obj.__dict__.update(state)
obj_globals = slotstate.pop("__globals__")
obj_closure = slotstate.pop("__closure__")
# _cloudpickle_subimports is a set of submodules that must be loaded for
# the pickled function to work correctly at unpickling time. Now that these
# submodules are depickled (hence imported), they can be removed from the
# object's state (the object state only served as a reference holder to
# these submodules)
slotstate.pop("_cloudpickle_submodules")
obj.__globals__.update(obj_globals)
obj.__globals__["__builtins__"] = __builtins__
if obj_closure is not None:
for i, cell in enumerate(obj_closure):
try:
value = cell.cell_contents
except ValueError: # cell is empty
continue
obj.__closure__[i].cell_contents = value
for k, v in slotstate.items():
setattr(obj, k, v)
def _class_setstate(obj, state):
state, slotstate = state
registry = None
for attrname, attr in state.items():
if attrname == "_abc_impl":
registry = attr
else:
setattr(obj, attrname, attr)
if registry is not None:
for subclass in registry:
obj.register(subclass)
return obj
class CloudPickler(Pickler):
"""Fast C Pickler extension with additional reducing routines.
CloudPickler's extensions exist into into:
* its dispatch_table containing reducers that are called only if ALL
built-in saving functions were previously discarded.
* a special callback named "reducer_override", invoked before standard
function/class builtin-saving method (save_global), to serialize dynamic
functions
"""
# cloudpickle's own dispatch_table, containing the additional set of
# objects (compared to the standard library pickle) that cloupickle can
# serialize.
dispatch = {}
dispatch[classmethod] = _classmethod_reduce
dispatch[io.TextIOWrapper] = _file_reduce
dispatch[logging.Logger] = _logger_reduce
dispatch[logging.RootLogger] = _root_logger_reduce
dispatch[memoryview] = _memoryview_reduce
dispatch[property] = _property_reduce
dispatch[staticmethod] = _classmethod_reduce
dispatch[types.CellType] = _cell_reduce
dispatch[types.CodeType] = _code_reduce
dispatch[types.GetSetDescriptorType] = _getset_descriptor_reduce
dispatch[types.ModuleType] = _module_reduce
dispatch[types.MethodType] = _method_reduce
dispatch[types.MappingProxyType] = _mappingproxy_reduce
dispatch[weakref.WeakSet] = _weakset_reduce
dispatch[typing.TypeVar] = _typevar_reduce
def __init__(self, file, protocol=None, buffer_callback=None):
if protocol is None:
protocol = DEFAULT_PROTOCOL
Pickler.__init__(self, file, protocol=protocol, buffer_callback=buffer_callback)
# map functions __globals__ attribute ids, to ensure that functions
# sharing the same global namespace at pickling time also share their
# global namespace at unpickling time.
self.globals_ref = {}
# Take into account potential custom reducers registered by external
# modules
self.dispatch_table = copyreg.dispatch_table.copy()
self.dispatch_table.update(self.dispatch)
self.proto = int(protocol)
def reducer_override(self, obj):
"""Type-agnostic reducing callback for function and classes.
For performance reasons, subclasses of the C _pickle.Pickler class
cannot register custom reducers for functions and classes in the
dispatch_table. Reducer for such types must instead implemented in the
special reducer_override method.
Note that method will be called for any object except a few
builtin-types (int, lists, dicts etc.), which differs from reducers in
the Pickler's dispatch_table, each of them being invoked for objects of
a specific type only.
This property comes in handy for classes: although most classes are
instances of the ``type`` metaclass, some of them can be instances of
other custom metaclasses (such as enum.EnumMeta for example). In
particular, the metaclass will likely not be known in advance, and thus
cannot be special-cased using an entry in the dispatch_table.
reducer_override, among other things, allows us to register a reducer
that will be called for any class, independently of its type.
Notes:
* reducer_override has the priority over dispatch_table-registered
reducers.
* reducer_override can be used to fix other limitations of cloudpickle
for other types that suffered from type-specific reducers, such as
Exceptions. See https://github.com/cloudpipe/cloudpickle/issues/248
"""
t = type(obj)
try:
is_anyclass = issubclass(t, type)
except TypeError: # t is not a class (old Boost; see SF #502085)
is_anyclass = False
if is_anyclass:
return _class_reduce(obj)
elif isinstance(obj, types.FunctionType):
return self._function_reduce(obj)
else:
# fallback to save_global, including the Pickler's distpatch_table
return NotImplemented
# function reducers are defined as instance methods of CloudPickler
# objects, as they rely on a CloudPickler attribute (globals_ref)
def _dynamic_function_reduce(self, func):
"""Reduce a function that is not pickleable via attribute lookup."""
newargs = self._function_getnewargs(func)
state = _function_getstate(func)
return (types.FunctionType, newargs, state, None, None,
_function_setstate)
def _function_reduce(self, obj):
"""Reducer for function objects.
If obj is a top-level attribute of a file-backed module, this
reducer returns NotImplemented, making the CloudPickler fallback to
traditional _pickle.Pickler routines to save obj. Otherwise, it reduces
obj using a custom cloudpickle reducer designed specifically to handle
dynamic functions.
As opposed to cloudpickle.py, There no special handling for builtin
pypy functions because cloudpickle_fast is CPython-specific.
"""
if _is_importable_by_name(obj):
return NotImplemented
else:
return self._dynamic_function_reduce(obj)
def _function_getnewargs(self, func):
code = func.__code__
# base_globals represents the future global namespace of func at
# unpickling time. Looking it up and storing it in
# CloudpiPickler.globals_ref allow functions sharing the same globals
# at pickling time to also share them once unpickled, at one condition:
# since globals_ref is an attribute of a CloudPickler instance, and
# that a new CloudPickler is created each time pickle.dump or
# pickle.dumps is called, functions also need to be saved within the
# same invocation of cloudpickle.dump/cloudpickle.dumps (for example:
# cloudpickle.dumps([f1, f2])). There is no such limitation when using
# CloudPickler.dump, as long as the multiple invocations are bound to
# the same CloudPickler.
base_globals = self.globals_ref.setdefault(id(func.__globals__), {})
if base_globals == {}:
# Add module attributes used to resolve relative imports
# instructions inside func.
for k in ["__package__", "__name__", "__path__", "__file__"]:
if k in func.__globals__:
base_globals[k] = func.__globals__[k]
# Do not bind the free variables before the function is created to
# avoid infinite recursion.
if func.__closure__ is None:
closure = None
else:
closure = tuple(
types.CellType() for _ in range(len(code.co_freevars)))
return code, base_globals, None, None, closure
def dump(self, obj):
try:
return Pickler.dump(self, obj)
except RuntimeError as e:
if "recursion" in e.args[0]:
msg = (
"Could not pickle object as excessively deep recursion "
"required."
)
raise pickle.PicklingError(msg)
else:
raise
|
py | 7df95fc7a0f9451c54acb69ded4bcb4433ab1c8c | # ************************** Desafio 075 ************************** #
# Anรกlise de dados em uma Tupla #
# Desenvolva um programa que leia quatro valores pelo teclado e #
# guarde-os em uma tupla. No final, mostre: #
# A) Quantas vezes apareceu o valor 9. #
# B) Em que posiรงรฃo foi digitado o primeiro valor 3. #
# C) Quais foram os nรบmeros pares. #
# ***************************************************************** #
titulo = ' \033[1;34mAnรกlise de dados em uma Tupla\033[m '
print(f'\n{titulo:=^60}\n')
for cont in range(0, 4):
n = int(input(' Digite um nรบmero: '))
if cont == 0:
t1 = n
elif cont == 1:
t2 = n
elif cont == 2:
t3 = n
else:
t4 = n
cont += 1
t = (t1, t2, t3, t4)
print('*'*60)
print(f'Vocรช digitou os valores: {t}')
print('*'*60)
n1 = t.count(9)
if n1 == 0:
n1 = ' Nรฃo foi digitado o valor 9'
else:
n1 = f' O valor 9 foi digitado {t.count(9)} vez(ez)'
n2 = t.count(3)
if n2 != 0:
n2 = f'O primeiro valor 3 foi digitado na {t.index(3)+1}ยช posiรงรฃo'
else:
n2 = 'Nรฃo foi digitado o valor 3'
print(' Os nรบmeros pares digitados foram: ', end='')
for n in range(0, len(t)):
if t[n] % 2 == 0:
print(t[n], end=' ')
print(f'\n{n1}\n {n2}\n')
# Soluรงรฃo do Gustavo Guanabara:
print('\n')
num = (int(input('Digite um nรบmero: ')), int(input('Digite outro nรบmero: ')),
int(input('Digite mais um nรบmero: ')), int(input('Digite o รบltimo nรบmero: ')))
print('-='*30)
print(f'Vocรช digitou os valores: {num}')
|
py | 7df95fed6f8c53792b1f30d07ed5be4228cc518f | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('spaweb.urls')),
path('tinymce/', include('tinymce.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
path(r'__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
py | 7df9603f365157e60e5f842a3807dda79c58f3b2 | import mock
from dvc.repo.reproduce import _get_active_graph
def test_get_active_graph(tmp_dir, dvc):
(pre_foo_stage,) = tmp_dir.dvc_gen({"pre-foo": "pre-foo"})
foo_stage = dvc.run(deps=["pre-foo"], outs=["foo"], cmd="echo foo > foo")
bar_stage = dvc.run(deps=["foo"], outs=["bar"], cmd="echo bar > bar")
baz_stage = dvc.run(deps=["foo"], outs=["baz"], cmd="echo baz > baz")
dvc.lock_stage("bar.dvc")
graph = dvc.graph
active_graph = _get_active_graph(graph)
assert active_graph.nodes == graph.nodes
assert set(active_graph.edges) == {
(foo_stage, pre_foo_stage),
(baz_stage, foo_stage),
}
dvc.lock_stage("baz.dvc")
graph = dvc.graph
active_graph = _get_active_graph(graph)
assert set(active_graph.nodes) == {bar_stage, baz_stage}
assert not active_graph.edges
@mock.patch("dvc.repo.reproduce._reproduce_stage", returns=[])
def test_number_reproduces(reproduce_stage_mock, tmp_dir, dvc):
tmp_dir.dvc_gen({"pre-foo": "pre-foo"})
dvc.run(deps=["pre-foo"], outs=["foo"], cmd="echo foo > foo")
dvc.run(deps=["foo"], outs=["bar"], cmd="echo bar > bar")
dvc.run(deps=["foo"], outs=["baz"], cmd="echo baz > baz")
dvc.run(deps=["bar"], outs=["boop"], cmd="echo boop > boop")
reproduce_stage_mock.reset_mock()
dvc.reproduce(all_pipelines=True)
assert reproduce_stage_mock.call_count == 5
|
py | 7df96041ccded998bc19ea1df9f0463651e2131c | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.utilities import color_to_colordict
from compas.utilities import i_to_blue
from compas.utilities import i_to_red
from compas.geometry import add_vectors
from compas.geometry import scale_vector
from compas.geometry import length_vector
from compas.geometry import sum_vectors
import compas_rhino
from compas_rhino.artists import NetworkArtist
from .elementartist import ElementArtist
__all__ = ['AssemblyArtist']
class AssemblyArtist(NetworkArtist):
"""An artist for visualisation of assemblies in Rhino.
Parameters
----------
assembly : compas_assembly.datastructures.Assembly
The assembly data structure.
layer : str, optional
The base layer for drawing.
Default is ``None``, which means drawing in the current layer.
Examples
--------
>>>
"""
def __init__(self, assembly, layer=None):
super(AssemblyArtist, self).__init__(assembly, layer=layer)
self.settings.update({
'color.vertex': (0, 0, 0),
'color.vertex:is_support': (0, 0, 0),
'color.edge': (0, 0, 0),
'color.interface': (255, 255, 255),
'color.force:compression': (0, 0, 255),
'color.force:tension': (255, 0, 0),
'color.selfweight': (0, 255, 0),
'scale.force': 0.1,
'scale.selfweight': 0.1,
'eps.selfweight': 1e-3,
'eps.force': 1e-3,
})
@property
def assembly(self):
"""Assembly : the assembly data structure."""
return self.network
@assembly.setter
def assembly(self, assembly):
self.network = assembly
# def _clear(self, name):
# name = "{}.{}.*".format(self.assembly.name, name)
# guids = compas_rhino.get_objects(name=name)
# compas_rhino.delete_objects(guids)
# def clear_blocks(self):
# """Delete all previously drawn blocks."""
# self._clear('block')
# def clear_interfaces(self):
# """Delete all previously drawn interfaces."""
# self._clear('interface')
# def clear_selfweight(self):
# """Delete all previously drawn self-weight vectors."""
# self._clear('selfweight')
# def clear_forces(self):
# """Delete all previously drawn force vectors."""
# self._clear('force')
# def clear_resultants(self):
# """Delete all previously drawn resultant vectors."""
# self._clear('resultant')
def draw_blocks(self, keys=None, show_faces=False, show_vertices=False, show_edges=True):
"""Draw the blocks of the assembly.
Parameters
----------
show_faces : bool, optional
Draw the faces of the blocks.
Default is ``False``.
show_vertices : bool, optional
Draw the vertices of the blocks.
Default is ``False``.
show_edges : bool, optional
Draw the edges of the blocks.
Default is ``True``.
Notes
-----
* By default, blocks are drawn as wireframes.
* By default, blocks are drawn on a sublayer of the base layer, if a base layer was specified.
* Block names have the following pattern: ``"{assembly_name}.block.{block_id}"``
* Faces and vertices can be drawn using the corresponding flags.
* Block components have the following pattern:
* face: ``"{assembly_name}.block.{block_id}.face.{face_id}"``
* edge: ``"{assembly_name}.block.{block_id}.edge.{edge_id}"``
* vertex: ``"{assembly_name}.block.{block_id}.vertex.{vertex_id}"``
Examples
--------
>>>
"""
keys = keys or list(self.assembly.nodes())
layer = "{}::Blocks".format(self.layer) if self.layer else None
artist = BlockArtist(None, layer=layer)
for key in keys:
block = self.assembly.blocks[key]
block.name = "{}.block.{}".format(self.assembly.name, key)
artist.block = block
if show_edges:
artist.draw_edges()
if show_faces:
artist.draw_faces()
if show_vertices:
artist.draw_vertices()
artist.redraw()
def draw_interfaces(self, keys=None, color=None):
"""Draw the interfaces between the blocks.
Parameters
----------
keys : list
A list of interface identifiers (i.e. assembly edge (u, v) tuples).
Default is ``None``, in which case all interfaces are drawn.
color : str, tuple, dict
The color specififcation for the interfaces.
Colors should be specified in the form of a string (hex colors) or
as a tuple of RGB components.
To apply the same color to all interfaces, provide a single color
specification. Individual colors can be assigned using a dictionary
of key-color pairs. Missing keys will be assigned the default interface
color (``self.settings['color.interface']``).
The default is ``None``, in which case all interfaces are assigned the
default interface color.
Notes
-----
* Interfaces are drawn as mesh faces.
* Interfaces are drawn on a sub-layer *Interfaces* of the base layer, if a base layer was provided.
* Interface names have the following pattern: ``"{assembly_name}.interface.{from block_id}-{to block_id}"``
* Interfaces have a direction, as suggested by the naming convention.
"""
layer = "{}::Interfaces".format(self.layer) if self.layer else None
faces = []
keys = keys or list(self.assembly.edges())
colordict = color_to_colordict(
color,
keys,
default=self.settings.get('color.interface'),
colorformat='rgb',
normalize=False
)
for (u, v), attr in self.assembly.edges(True):
faces.append({
'points': attr['interface_points'],
'name': "{}.interface.{}-{}".format(self.assembly.name, u, v),
'color': colordict[(u, v)]
})
compas_rhino.draw_faces(faces, layer=layer, clear=False, redraw=False)
def draw_interface_frames(self):
"""Draw the frames of the interfaces.
"""
layer = "{}::Interfaces::Frames".format(self.layer) if self.layer else None
lines = []
for (a, b), attr in self.assembly.edges(True):
o = attr['interface_origin']
u, v, w = attr['interface_uvw']
lines.append({
'start': o,
'end': add_vectors(o, u),
'name': "{}.iframe.{}-{}.u".format(self.assembly.name, a, b),
'color': (255, 0, 0),
'arrow': 'end'
})
lines.append({
'start': o,
'end': add_vectors(o, v),
'name': "{}.iframe.{}-{}.v".format(self.assembly.name, a, b),
'color': (0, 255, 0),
'arrow': 'end'
})
lines.append({
'start': o,
'end': add_vectors(o, w),
'name': "{}.iframe.{}-{}.w".format(self.assembly.name, a, b),
'color': (0, 0, 255),
'arrow': 'end'
})
self.draw_lines(lines, layer=layer, clear=True, redraw=True)
def draw_selfweight(self, scale=None, eps=None):
"""Draw vectors indicating the magnitude of the selfweight of the blocks.
Parameters
----------
scale : float, optional
The scale at which the selfweight vectors should be drawn.
Default is `0.1`.
eps : float, optional
A tolerance for drawing small vectors.
Selfweight vectors with a scaled length smaller than this tolerance are not drawn.
Default is `1e-3`.
Notes
-----
* Selfweight vectors are drawn as Rhino lines with arrow heads.
* The default color is *green*: `'#00ff00'` or `(0, 255, 0)`.
* Selfweight vectors are drawn in a sub-layer *Selfweight* of the base layer, if a base layer was specified.
* Selfweight vectors are named according to the following pattern: `"{assembly name}.selfweight.{block id}"`.
"""
layer = "{}::Selfweight".format(self.layer) if self.layer else None
scale = scale or self.settings['scale.selfweight']
eps = eps or self.settings['eps.selfweight']
color = self.settings['color.selfweight']
lines = []
for key, attr in self.assembly.vertices(True):
block = self.assembly.blocks[key]
volume = block.volume()
if volume * scale < eps:
continue
vector = [0.0, 0.0, -1.0 * volume * scale]
sp = block.centroid()
ep = sp[:]
ep[2] += vector[2]
lines.append({
'start': sp,
'end': ep,
'name': "{}.selfweight.{}".format(self.assembly.name, key),
'color': color,
'arrow': 'end'
})
compas_rhino.draw_lines(lines, layer=layer, clear=False, redraw=False)
def draw_forces(self, scale=None, eps=None, mode=0):
"""Draw the contact forces at the interfaces.
Parameters
----------
scale : float, optional
The scale at which the forces should be drawn.
Default is `0.1`.
eps : float, optional
A tolerance for drawing small force vectors.
Force vectors with a scaled length smaller than this tolerance are not drawn.
Default is `1e-3`.
mode : int, optional
Display mode: 0 normal, 1 resultant forces
Default is 0
Notes
-----
* Forces are drawn as lines with arrow heads.
* Forces are drawn on a sub-layer *Forces* of the base layer, if a base layer was specified.
* At every interface point there can be a *compression* force (blue) and a *tension* force (red).
* Forces are named according to the following pattern:
``"{assembly_name}.force.{from block}-{to block}.{interface point}"``
"""
layer = "{}::Forces".format(self.layer) if self.layer else None
scale = scale or self.settings['scale.force']
eps = eps or self.settings['eps.force']
color_compression = self.settings['color.force:compression']
color_tension = self.settings['color.force:tension']
lines = []
for (a, b), attr in self.assembly.edges(True):
if attr['interface_forces'] is None:
continue
w = attr['interface_uvw'][2]
for i in range(len(attr['interface_points'])):
sp = attr['interface_points'][i]
c = attr['interface_forces'][i]['c_np']
t = attr['interface_forces'][i]['c_nn']
f = c - t
if f > 0.0:
if scale * f < eps:
continue
color = color_compression
elif f < 0.0:
if -scale * f < eps:
continue
color = color_tension
else:
continue
lines.append({
'start': sp,
'end': [sp[axis] + scale * f * w[axis] for axis in range(3)],
'color': color,
'name': "{0}.force.{1}-{2}.{3}".format(self.assembly.name, a, b, i),
'arrow': 'end'
})
compas_rhino.draw_lines(lines, layer=layer, clear=False, redraw=False)
def draw_resultants(self, scale=1.0, eps=1e-3):
"""
"""
layer = "{}::Resultants".format(self.layer) if self.layer else None
scale = scale or self.settings['scale.force']
eps = eps or self.settings['eps.force']
color_compression = self.settings['color.force:compression']
color_tension = self.settings['color.force:tension']
eps2 = eps**2
lines = []
points = []
for key in self.assembly.edges():
u, v = key
corners = self.assembly.edge_attribute(key, 'interface_points')
forces = self.assembly.edge_attribute(key, 'interface_forces')
if not forces:
continue
n = self.assembly.edge_attribute(key, 'interface_uvw')[2]
cx, cy, cz = 0, 0, 0
p = len(corners)
R = 0
for point, force in zip(corners, forces):
c = force['c_np']
t = force['c_nn']
f = c - t
cx += point[0] * f
cy += point[1] * f
cz += point[2] * f
R += f
if R**2 < eps2:
continue
cx = cx / R
cy = cy / R
cz = cz / R
c = [cx, cy, cz]
sp = add_vectors(c, scale_vector(n, R * scale))
ep = add_vectors(c, scale_vector(n, -R * scale))
if R < 0:
color = color_tension
else:
color = color_compression
lines.append({'start': sp, 'end': ep, 'color': color, 'name': "{0}.resultant.{1}-{2}".format(self.assembly.name, u, v)})
points.append({'pos': c, 'color': color, 'name': "{0}.resultant.{1}-{2}".format(self.assembly.name, u, v)})
compas_rhino.draw_lines(lines, layer=layer, clear=False, redraw=False)
compas_rhino.draw_points(points, layer=layer, clear=False, redraw=False)
def color_interfaces(self, mode=0):
""""""
if mode == 0:
return
if mode == 1:
color_compression = self.settings['color.force:compression']
color_tension = self.settings['color.force:tension']
resultants = []
for key in self.assembly.edges():
forces = self.assembly.edge_attribute(key, 'interface_forces')
if not forces:
resultants.append(0)
continue
R = 0
for force in forces:
c = force['c_np']
t = force['c_nn']
f = c - t
R += f
resultants.append(R)
Rmax = max(resultants)
Rmin = min(resultants)
print(Rmax)
print(Rmin)
for index, key in enumerate(self.assembly.edges()):
u, v = key
name = "{}.interface.{}-{}".format(self.assembly.name, u, v)
guids = compas_rhino.get_objects(name=name)
if not guids:
continue
guid = guids[0]
R = resultants[index]
color = i_to_blue((R - Rmin) / (Rmax - Rmin))
compas_rhino.rs.ObjectColor(guid, color)
# ==============================================================================
# Debugging
# ==============================================================================
if __name__ == "__main__":
pass
|
py | 7df96081b23acafe2d99d83208d4fd179ef3e53b | import cv2
from detect import find_face
image_path = 'data/images'
label = ''
idx = 0
video_camera = cv2.VideoCapture(0)
while video_camera.isOpened():
if (cv2.waitKey(1) & 0xFF) == ord('q'):
break
ret, frame = video_camera.read()
cropped, bb = find_face(frame, 299)
if bb:
cv2.imwrite(image_path + '/' + label + '/' + label + '_' + str(idx) + '.jpg', cropped)
idx += 1
print idx
cv2.imshow('Camera', frame)
|
py | 7df960bb2f0ea14cfff571b0ef41dfbbf942af15 | from typing import Any, Optional
from django.db.models import Model
from graphene_django.filter import DjangoFilterConnectionField
from graphql import ResolveInfo
from graphene_permissions.permissions import AllowAny
class AuthNode:
"""
Permission mixin for queries (nodes).
Allows for simple configuration of access to nodes via class system.
"""
permission_classes = (AllowAny,)
@classmethod
def get_node(cls, info: ResolveInfo, id: str) -> Optional[Model]:
if all((perm.has_node_permission(info, id) for perm in cls.permission_classes)):
try:
object_instance = cls._meta.model.objects.get(pk=id) # type: ignore
except cls._meta.model.DoesNotExist: # type: ignore
object_instance = None
return object_instance
else:
return None
class AuthMutation:
"""
Permission mixin for ClientIdMutation.
"""
permission_classes = (AllowAny,)
@classmethod
def has_permission(cls, root: Any, info: ResolveInfo, input: dict) -> bool:
return all(
(
perm.has_mutation_permission(root, info, input)
for perm in cls.permission_classes
)
)
class AuthFilter(DjangoFilterConnectionField):
"""
Custom ConnectionField for permission system.
"""
permission_classes = (AllowAny,)
@classmethod
def has_permission(cls, info: ResolveInfo) -> bool:
return all(
(perm.has_filter_permission(info) for perm in cls.permission_classes)
)
@classmethod
def connection_resolver(
cls, resolver, connection, default_manager,
max_limit, enforce_first_or_last, filterset_class,
filtering_args, root, info, **args
):
filter_kwargs = {k: v for k, v in args.items() if k in filtering_args}
qs = filterset_class(
data=filter_kwargs,
queryset=default_manager.get_queryset()
).qs
if not cls.has_permission(info):
return super(DjangoFilterConnectionField, cls).connection_resolver(
resolver, connection, qs.none(), max_limit, enforce_first_or_last,
root, info, **args,
)
return super(DjangoFilterConnectionField, cls).connection_resolver(
resolver, connection, qs, max_limit, enforce_first_or_last,
filterset_class, filtering_args, **args,
)
|
py | 7df9630eb9600f03d74393aebddb8d1d936e74ef | """
.. module: cloudaux.tests.openstack.mock_decorators
:platform: Unix
:copyright: Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. See AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Michael Stair <[email protected]>
"""
from functools import wraps
def mock_get_regions(cloud_name, yaml_file):
return [ {'name':'RegionOne'} ]
def mock_openstack_conn():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
kwargs['conn'] = None
return f(*args, **kwargs)
return decorated_function
return decorator
def mock_iter_account_region(account_regions):
def decorator(func):
@wraps(func)
def decorated_function(*args, **kwargs):
results = []
kwargs['account_name'] = 'TEST_ACCOUNT'
kwargs['cloud_name'] = 'foo'
kwargs['yaml_file'] = 'bar'
kwargs['region'] = 'RegionOne'
results.append(func(*args, **kwargs))
return results
return decorated_function
return decorator
|
py | 7df9646dc3f4ea057e687b5ff091212fde864dc6 | #!/usr/bin/env python
# pylint: disable=W0613, C0116
# type: ignore[union-attr]
# This program is dedicated to the public domain under the CC0 license.
"""
Simple Bot to reply to Telegram messages.
First, a few handler functions are defined. Then, those functions are passed to
the Dispatcher and registered at their respective places.
Then, the bot is started and runs until we press Ctrl-C on the command line.
Usage:
Basic Echobot example, repeats messages.
Press Ctrl-C on the command line or send a signal to the process to stop the
bot.
"""
import logging
import os as os
from telegram import Update
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, CallbackContext
# Enable logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO
)
logger = logging.getLogger(__name__)
# Define a few command handlers. These usually take the two arguments update and
# context. Error handlers also receive the raised TelegramError object in error.
def start(update: Update, context: CallbackContext) -> None:
"""Send a message when the command /start is issued."""
update.message.reply_text('Hi! I\'m pyTeleBot!')
def help_command(update: Update, context: CallbackContext) -> None:
"""Send a message when the command /help is issued."""
update.message.reply_text('Help! I\'ve fallen and I can\'t get up!')
def weather_command(update: Update, context: CallbackContext) -> None:
"""Send a message when the command /weather issued."""
update.message.reply_text('Your weather report for your Location')
def echo(update: Update, context: CallbackContext) -> None:
"""Echo the user message."""
#update.message.reply_text(update.message.text)
pass
def main():
"""Start the bot."""
# Create the Updater and pass it your bot's token.
# Make sure to set use_context=True to use the new context based callbacks
# Post version 12 this will no longer be necessary
Bot_API_Key = os.getenv('BOT_FATHER_KEY')
updater = Updater(Bot_API_Key, use_context=True)
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# on different commands - answer in Telegram
dispatcher.add_handler(CommandHandler("start", start))
dispatcher.add_handler(CommandHandler("help", help_command))
dispatcher.add_handler(CommandHandler("weather", weather_command))
# on noncommand i.e message - echo the message on Telegram
# dispatcher.add_handler(MessageHandler(Filters.text & ~Filters.command, echo))
# Start the Bot
updater.start_polling()
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()
|
py | 7df96473bc29e9a0aaa53dbd1cb68dc9caf8adf0 | from django.apps import AppConfig
class CleanerConfig(AppConfig):
name = 'Cleaner'
|
py | 7df964fb63af1fe81eaf729be1dc59de81ea45f4 | """Hive API: Internal supporting methods"""
import logging
log = logging.getLogger(__name__)
async def get_community_id(db, name):
"""Get community id from db."""
assert name, 'community name cannot be blank'
return await db.query_one("SELECT find_community_id( (:name)::VARCHAR, True )", name=name)
async def get_account_id(db, name):
"""Get account id from account name."""
return await db.query_one("SELECT find_account_id( (:name)::VARCHAR, True )", name=name)
|
py | 7df965106b9223bccd6760680346666c67f3dd92 | import torch
from torch import nn
from torch.autograd import Function
from torch.autograd.function import once_differentiable
from torch.nn.modules.utils import _pair
from models.ops import _C
from apex import amp
class _ROIAlign(Function):
@staticmethod
def forward(ctx, input, roi, output_size, spatial_scale, sampling_ratio, aligned):
ctx.save_for_backward(roi)
ctx.output_size = _pair(output_size)
ctx.spatial_scale = spatial_scale
ctx.sampling_ratio = sampling_ratio
ctx.input_shape = input.size()
ctx.aligned = aligned
output = _C.roi_align_forward(
input, roi, spatial_scale, output_size[0], output_size[1], sampling_ratio, aligned
)
return output
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
rois, = ctx.saved_tensors
output_size = ctx.output_size
spatial_scale = ctx.spatial_scale
sampling_ratio = ctx.sampling_ratio
bs, ch, h, w = ctx.input_shape
grad_input = _C.roi_align_backward(
grad_output, rois, spatial_scale, output_size[0], output_size[1], bs, ch, h, w, sampling_ratio, ctx.aligned
)
return grad_input, None, None, None, None, None
roi_align = _ROIAlign.apply
class ROIAlign(nn.Module):
def __init__(self, output_size, spatial_scale, sampling_ratio, aligned):
super(ROIAlign, self).__init__()
self.output_size = output_size
self.spatial_scale = spatial_scale
self.sampling_ratio = sampling_ratio
self.aligned = aligned
@amp.float_function
def forward(self, input, rois):
return roi_align(
input, rois, self.output_size, self.spatial_scale, self.sampling_ratio, self.aligned
)
def __repr__(self):
tmpstr = self.__class__.__name__ + "("
tmpstr += "output_size=" + str(self.output_size)
tmpstr += ", spatial_scale=" + str(self.spatial_scale)
tmpstr += ", sampling_ratio=" + str(self.sampling_ratio)
tmpstr += ")"
return tmpstr
|
py | 7df96529f762d3412a442f1f26933d5d21d0f857 | from typing import Dict
import requests
import responses
from magma.configuration_controller.response_processor.response_db_processor import (
ResponseDBProcessor,
)
from magma.configuration_controller.response_processor.strategies.strategies_mapping import (
processor_strategies,
)
from magma.db_service.db_initialize import DBInitializer
from magma.db_service.models import (
DBCbsd,
DBCbsdState,
DBChannel,
DBGrant,
DBRequest,
DBRequestState,
DBRequestType,
DBResponse,
)
from magma.db_service.session_manager import SessionManager
from magma.db_service.tests.local_db_test_case import LocalDBTestCase
from magma.fixtures.fake_requests.deregistration_requests import (
deregistration_requests,
)
from magma.fixtures.fake_requests.grant_requests import grant_requests
from magma.fixtures.fake_requests.heartbeat_requests import heartbeat_requests
from magma.fixtures.fake_requests.registration_requests import (
registration_requests,
)
from magma.fixtures.fake_requests.relinquishment_requests import (
relinquishment_requests,
)
from magma.fixtures.fake_requests.spectrum_inquiry_requests import (
spectrum_inquiry_requests,
)
from magma.fixtures.fake_responses.spectrum_inquiry_responses import (
single_channel_for_one_cbsd,
two_channels_for_one_cbsd,
zero_channels_for_one_cbsd,
)
from magma.mappings.request_response_mapping import request_response
from magma.mappings.types import (
CbsdStates,
GrantStates,
RequestStates,
RequestTypes,
ResponseCodes,
)
from parameterized import parameterized
CBSD_SERIAL_NR = "cbsdSerialNumber"
FCC_ID = "fccId"
USER_ID = "userId"
CBSD_ID = "cbsdId"
GRANT_ID = "grantId"
REGISTRATION_REQ = RequestTypes.REGISTRATION.value
DEREGISTRATION_REQ = RequestTypes.DEREGISTRATION.value
RELINQUISHMENT_REQ = RequestTypes.RELINQUISHMENT.value
HEARTBEAT_REQ = RequestTypes.HEARTBEAT.value
GRANT_REQ = RequestTypes.GRANT.value
SPECTRUM_INQ_REQ = RequestTypes.SPECTRUM_INQUIRY.value
INITIAL_GRANT_ATTEMPTS = 1
class DefaultResponseDBProcessorTestCase(LocalDBTestCase):
def setUp(self):
super().setUp()
DBInitializer(SessionManager(self.engine)).initialize()
@parameterized.expand([
(REGISTRATION_REQ, registration_requests),
(SPECTRUM_INQ_REQ, spectrum_inquiry_requests),
(GRANT_REQ, grant_requests),
(HEARTBEAT_REQ, heartbeat_requests),
(RELINQUISHMENT_REQ, relinquishment_requests),
(DEREGISTRATION_REQ, deregistration_requests),
])
@responses.activate
def test_processor_splits_sas_response_into_separate_db_objects_and_links_them_with_requests(
self, request_type_name, requests_fixtures,
):
# Given
db_requests = self._create_db_requests(
request_type_name, requests_fixtures,
)
response = self._prepare_response_from_db_requests(db_requests)
# When
self._process_response(
request_type_name=request_type_name, response=response, db_requests=db_requests,
)
nr_of_requests = len(db_requests)
# Then
self._verify_requests_number_and_state(db_requests, nr_of_requests)
self.assertEqual(2, self.session.query(DBRequestState).count())
self.assertEqual(
1, self.session.query(DBRequestType).filter(
DBRequestType.name == request_type_name,
).count(),
)
@parameterized.expand([
(
GRANT_REQ, grant_requests, ResponseCodes.SUCCESS.value,
GrantStates.GRANTED.value,
),
(
GRANT_REQ, grant_requests,
ResponseCodes.INTERFERENCE.value, GrantStates.IDLE.value,
),
(
GRANT_REQ, grant_requests,
ResponseCodes.GRANT_CONFLICT.value, GrantStates.IDLE.value,
),
(
GRANT_REQ, grant_requests,
ResponseCodes.TERMINATED_GRANT.value, GrantStates.IDLE.value,
),
(
HEARTBEAT_REQ, heartbeat_requests,
ResponseCodes.SUCCESS.value, GrantStates.AUTHORIZED.value,
),
(
HEARTBEAT_REQ, heartbeat_requests,
ResponseCodes.TERMINATED_GRANT.value, GrantStates.IDLE.value,
),
(
HEARTBEAT_REQ, heartbeat_requests,
ResponseCodes.SUSPENDED_GRANT.value, GrantStates.GRANTED.value,
),
(
HEARTBEAT_REQ, heartbeat_requests,
ResponseCodes.UNSYNC_OP_PARAM.value, GrantStates.UNSYNC.value,
),
(
RELINQUISHMENT_REQ, relinquishment_requests,
ResponseCodes.SUCCESS.value, GrantStates.IDLE.value,
),
])
@responses.activate
def test_grant_state_after_response(
self, request_type_name, requests_fixtures, response_code, expected_grant_state_name,
):
# Given
db_requests = self._create_db_requests(
request_type_name, requests_fixtures,
)
response = self._prepare_response_from_db_requests(
db_requests, response_code=response_code,
)
# When
self._process_response(
request_type_name=request_type_name, response=response, db_requests=db_requests,
)
nr_of_requests = len(db_requests)
# Then
self._verify_requests_number_and_state(db_requests, nr_of_requests)
self.assertListEqual(
[expected_grant_state_name] * nr_of_requests,
[g.state.name for g in self.session.query(DBGrant).all()],
)
@parameterized.expand([
(0, GRANT_REQ, INITIAL_GRANT_ATTEMPTS),
(400, GRANT_REQ, INITIAL_GRANT_ATTEMPTS + 1),
(401, GRANT_REQ, INITIAL_GRANT_ATTEMPTS + 1),
(0, RELINQUISHMENT_REQ, INITIAL_GRANT_ATTEMPTS),
(0, DEREGISTRATION_REQ, 0),
(0, SPECTRUM_INQ_REQ, 0),
])
@responses.activate
def test_grant_attempts_after_response(self, code, message_type, expected):
cbsd = DBCbsd(
cbsd_id=CBSD_ID,
user_id=USER_ID,
fcc_id=FCC_ID,
cbsd_serial_number=CBSD_SERIAL_NR,
grant_attempts=INITIAL_GRANT_ATTEMPTS,
state=self._get_db_enum(DBCbsdState, CbsdStates.REGISTERED.value),
)
request = DBRequest(
type=self._get_db_enum(DBRequestType, message_type),
state=self._get_db_enum(
DBRequestState, RequestStates.PENDING.value,
),
cbsd=cbsd,
payload={'cbsdId': CBSD_ID},
)
resp_json = {'response': {}, 'cbsd_id': CBSD_ID}
response = self._prepare_response_from_payload(
req_type=message_type,
response_payload={request_response[message_type]: [resp_json]},
response_code=code,
)
self.session.add(request)
self.session.commit()
self._process_response(
request_type_name=message_type,
response=response,
db_requests=[request],
)
self.assertEqual(expected, cbsd.grant_attempts)
@parameterized.expand([
(0, CbsdStates.REGISTERED),
(300, CbsdStates.UNREGISTERED),
(400, CbsdStates.UNREGISTERED),
(105, CbsdStates.UNREGISTERED),
(104, CbsdStates.UNREGISTERED),
(401, CbsdStates.UNREGISTERED),
(500, CbsdStates.UNREGISTERED),
(501, CbsdStates.UNREGISTERED),
])
@responses.activate
def test_cbsd_state_after_registration_response(self, sas_response_code, expected_cbsd_state):
# Given
db_requests = self._create_db_requests(
REGISTRATION_REQ, registration_requests,
)
response = self._prepare_response_from_db_requests(
db_requests, sas_response_code,
)
# When
self._process_response(
request_type_name=REGISTRATION_REQ, response=response, db_requests=db_requests,
)
states = [req.cbsd.state for req in db_requests]
# Then
[
self.assertTrue(state.name == expected_cbsd_state.value)
for state in states
]
@parameterized.expand([
(0, CbsdStates.UNREGISTERED),
(400, CbsdStates.UNREGISTERED),
(500, CbsdStates.UNREGISTERED),
])
@responses.activate
def test_cbsd_state_after_deregistration_response(self, sas_response_code, expected_cbsd_state):
# Given
db_requests = self._create_db_requests(
DEREGISTRATION_REQ, deregistration_requests,
)
self._set_cbsds_to_state(CbsdStates.REGISTERED.value)
response = self._prepare_response_from_db_requests(
db_requests, sas_response_code,
)
# When
self._process_response(
request_type_name=DEREGISTRATION_REQ, response=response, db_requests=db_requests,
)
states = [req.cbsd.state for req in db_requests]
# Then
[
self.assertTrue(state.name == expected_cbsd_state.value)
for state in states
]
@parameterized.expand([
(zero_channels_for_one_cbsd, 0),
(single_channel_for_one_cbsd, 1),
(two_channels_for_one_cbsd, 2),
])
@responses.activate
def test_channels_created_after_spectrum_inquiry_response(self, response_fixture_payload, expected_channels_count):
# Given
db_requests = self._create_db_requests(
SPECTRUM_INQ_REQ, spectrum_inquiry_requests,
)
response = self._prepare_response_from_payload(
SPECTRUM_INQ_REQ, response_fixture_payload,
)
# When
self._process_response(
request_type_name=SPECTRUM_INQ_REQ, response=response, db_requests=db_requests,
)
# Then
cbsd = self.session.query(DBCbsd).filter(
DBCbsd.cbsd_id == "foo",
).first()
self.assertEqual(expected_channels_count, len(cbsd.channels))
@responses.activate
def test_old_channels_deleted_after_spectrum_inquiry_response(self):
# Given
db_requests = self._create_db_requests(
SPECTRUM_INQ_REQ, spectrum_inquiry_requests,
)
cbsd = self.session.query(DBCbsd).filter(
DBCbsd.cbsd_id == "foo",
).first()
self._create_channel(cbsd, 1, 2)
self.assertEqual(1, len(cbsd.channels))
response = self._prepare_response_from_payload(
SPECTRUM_INQ_REQ, zero_channels_for_one_cbsd,
)
# When
self._process_response(SPECTRUM_INQ_REQ, response, db_requests)
# Then
self.assertEqual(0, len(cbsd.channels))
@responses.activate
def test_channel_params_set_on_grant_response(self):
# Given
cbsd_id = "foo"
low_frequency = 1
high_frequency = 2
max_eirp = 3
fixture = self._build_grant_request(
cbsd_id, low_frequency, high_frequency, max_eirp,
)
db_requests = self._create_db_requests(GRANT_REQ, [fixture])
response = self._prepare_response_from_db_requests(db_requests)
# When
self._process_response(
request_type_name=GRANT_REQ,
db_requests=db_requests, response=response,
)
# Then
grant = self.session.query(DBGrant).first()
self.assertEqual(low_frequency, grant.low_frequency)
self.assertEqual(high_frequency, grant.high_frequency)
self.assertEqual(max_eirp, grant.max_eirp)
def _process_response(self, request_type_name, response, db_requests):
processor = self._get_response_processor(request_type_name)
processor.process_response(db_requests, response, self.session)
self.session.commit()
@staticmethod
def _get_response_processor(req_type):
return ResponseDBProcessor(
request_response[req_type],
process_responses_func=processor_strategies[req_type]["process_responses"],
)
def _verify_requests_number_and_state(self, db_requests, nr_of_requests, desired_state="processed"):
self.assertEqual(nr_of_requests, self.session.query(DBRequest).count())
self.assertListEqual(
[r.id for r in db_requests], [
_id for (
_id,
) in self.session.query(DBResponse.id).all()
],
)
self.assertListEqual(
[desired_state] * nr_of_requests,
[r.state.name for r in self.session.query(DBRequest).all()],
)
def _set_cbsds_to_state(self, state_name):
registered_state = self._get_db_enum(DBCbsdState, state_name)
self.session.query(DBCbsd).update(
{DBCbsd.state_id: registered_state.id},
)
self.session.commit()
def _create_db_requests(
self,
request_type_name,
requests_fixtures,
cbsd_state=CbsdStates.UNREGISTERED.value,
request_state=RequestStates.PENDING.value,
):
db_requests = self._create_db_requests_from_fixture(
request_state=request_state,
request_type=request_type_name,
fixture=requests_fixtures,
cbsd_state=cbsd_state,
)
self.session.add_all(db_requests)
self.session.commit()
return db_requests
def _get_db_enum(self, data_type, name):
return self.session.query(data_type).filter(data_type.name == name).first()
def _prepare_response_from_db_requests(self, db_requests, response_code=None):
req_type = db_requests[0].type.name
response_payload = self._create_response_payload_from_db_requests(
response_type_name=request_response[req_type],
db_requests=db_requests,
)
return self._prepare_response_from_payload(req_type, response_payload, response_code)
@staticmethod
def _prepare_response_from_payload(req_type, response_payload, response_code=None):
if response_code is not None:
for response_json in response_payload[request_response[req_type]]:
response_json["response"]["responseCode"] = response_code
any_url = 'https://foo.com/foobar'
responses.add(
responses.GET, any_url,
json=response_payload, status=200,
)
# url and method don't matter, I'm just crafting a qualified response here
return requests.get(any_url)
def _generate_cbsd_from_request_json(self, request_payload: Dict, cbsd_state: DBCbsdState):
cbsd_id = request_payload.get(CBSD_ID)
fcc_id = request_payload.get(FCC_ID)
user_id = request_payload.get(USER_ID)
serial_number = request_payload.get(CBSD_SERIAL_NR)
cbsd = DBCbsd(
cbsd_id=cbsd_id,
fcc_id=fcc_id,
cbsd_serial_number=serial_number,
user_id=user_id,
state=cbsd_state,
)
self.session.add(cbsd)
self.session.commit()
return cbsd
@staticmethod
def _build_grant_request(cbsd_id: str, low_frequency: int, high_frequency: int, max_eirp: int) -> Dict:
return {
GRANT_REQ: [
{
"cbsdId": cbsd_id,
"operationParam": {
"maxEirp": max_eirp,
"operationFrequencyRange": {
"lowFrequency": low_frequency,
"highFrequency": high_frequency,
},
},
},
],
}
def _create_channel(
self,
cbsd: DBCbsd,
low_frequency: int,
high_frequency: int,
) -> DBChannel:
channel = DBChannel(
cbsd=cbsd,
low_frequency=low_frequency,
high_frequency=high_frequency,
channel_type="some_type",
rule_applied="some_rule",
)
self.session.add(channel)
self.session.commit()
return channel
def _create_grant(self, grant_id, channel, cbsd, state):
grant = DBGrant(
channel=channel,
cbsd=cbsd,
state=state,
grant_id=grant_id,
)
self.session.add(grant)
self.session.commit()
return grant
def _create_db_requests_from_fixture(self, request_state, request_type, fixture, cbsd_state):
db_requests = []
for reqs in fixture:
for req in reqs[request_type]:
db_requests.append(
DBRequest(
cbsd=self._generate_cbsd_from_request_json(
req, self._get_db_enum(DBCbsdState, cbsd_state),
),
state=self._get_db_enum(DBRequestState, request_state),
type=self._get_db_enum(DBRequestType, request_type),
payload=req,
),
)
return db_requests
@staticmethod
def _create_response_payload_from_db_requests(response_type_name, db_requests, sas_response_code=0):
response_payload = {response_type_name: []}
for i, db_request in enumerate(db_requests):
cbsd_id = db_request.cbsd.cbsd_id or str(i)
response_json = {
"response": {
"responseCode": sas_response_code,
}, "cbsdId": cbsd_id,
}
if db_request.payload.get(GRANT_ID, ""):
response_json[GRANT_ID] = db_request.payload.get(GRANT_ID)
elif response_type_name == request_response[GRANT_REQ]:
response_json[GRANT_ID] = f'test_grant_id_for_{db_request.cbsd_id}'
response_payload[response_type_name].append(response_json)
return response_payload
|
py | 7df96625a3855660bc365f96435c69d51c1b0afd | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package onnx
# Module caffe2.python.onnx.tests.c2_ref_test
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import os
import unittest
from caffe2.python import core
from caffe2.proto import caffe2_pb2
import onnx
from onnx.helper import make_node, make_graph, make_tensor, make_tensor_value_info, make_model
from caffe2.python.onnx.helper import c2_native_run_net, c2_native_run_op
from onnx import defs, mapping
import caffe2.python.onnx.frontend as c2_onnx
import caffe2.python.onnx.backend as c2
import numpy as np
from caffe2.python.models.download import downloadFromURLToFile, getURLFromName, deleteDirectory
from caffe2.python.onnx.helper import dummy_name
from caffe2.python.onnx.tests.test_utils import TestCase
class TestCaffe2Basic(TestCase):
def test_dummy_name(self):
n1 = dummy_name()
n2 = dummy_name()
assert n1 != n2, "Got same names in different calls: {}".format(n1)
def test_relu_node_inplace(self):
X = np.random.randn(3, 2).astype(np.float32)
Y_ref = np.clip(X, 0, np.inf)
node_def = make_node(
"Relu", ["X"], ["Y"], consumed_inputs=[1])
output = c2.run_node(
node_def, {"X": X})
np.testing.assert_almost_equal(output.X, Y_ref)
node_def = make_node(
"Relu", ["X"], ["Y"], consumed_inputs=[1])
graph_def = make_graph(
[node_def],
name="test",
inputs=[make_tensor_value_info("X", onnx.TensorProto.FLOAT, [3, 2])],
outputs=[make_tensor_value_info("X", onnx.TensorProto.FLOAT, [3, 2])])
c2_rep = c2.prepare(make_model(graph_def, producer_name='caffe2-ref-test'))
output = c2_rep.run({"X": X})
np.testing.assert_almost_equal(output.X, Y_ref)
def test_relu_graph(self):
X = np.random.randn(3, 2).astype(np.float32)
Y_ref = np.clip(X, 0, np.inf)
node_def = make_node(
"Relu", ["X"], ["Y"])
output = c2.run_node(
node_def, {"X": X})
np.testing.assert_almost_equal(output.Y, Y_ref)
graph_def = make_graph(
[node_def],
name="test",
inputs=[make_tensor_value_info("X", onnx.TensorProto.FLOAT, [3, 2])],
outputs=[make_tensor_value_info("Y", onnx.TensorProto.FLOAT, [3, 2])])
c2_rep = c2.prepare(make_model(graph_def, producer_name='caffe2-ref-test'))
output = c2_rep.run(X)
np.testing.assert_almost_equal(output.Y, Y_ref)
def test_initializer(self):
X = np.array([[1, 2], [3, 4]]).astype(np.float32)
Y = np.array([[1, 2], [3, 4]]).astype(np.float32)
weight = np.array([[1, 0], [0, 1]])
graph_def = make_graph(
[make_node("Add", ["X", "Y"], ["Z0"]),
make_node("Cast", ["Z0"], ["Z"], to="float"),
make_node("Mul", ["Z", "weight"], ["W0"]),
make_node("Tanh", ["W0"], ["W1"]),
make_node("Sigmoid", ["W1"], ["W2"]),
make_node("Scale", ["W2"], ["W3"], scale=-1.0)],
name="test_initializer",
inputs=[
make_tensor_value_info("X", onnx.TensorProto.FLOAT, (2, 2)),
make_tensor_value_info("Y", onnx.TensorProto.FLOAT, (2, 2)),
make_tensor_value_info("weight", onnx.TensorProto.FLOAT, (2, 2)),
],
outputs=[
make_tensor_value_info("W3", onnx.TensorProto.FLOAT, (2, 2))
],
initializer=[make_tensor("weight",
onnx.TensorProto.FLOAT,
[2, 2],
weight.flatten().astype(float))]
)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
W_ref = -sigmoid(np.tanh((X + Y) * weight))
c2_rep = c2.prepare(make_model(graph_def, producer_name='caffe2-ref-test'))
output = c2_rep.run({"X": X, "Y": Y})
np.testing.assert_almost_equal(output["W3"], W_ref)
def test_gemm(self):
# simple
A = np.random.randn(3, 2).astype(np.float32)
B = np.random.randn(2, 4).astype(np.float32)
C = np.random.randn(3, 4).astype(np.float32)
node_def = make_node(
'Gemm',
['A', 'B', 'C'],
["Y"])
output = c2.run_node(node_def, [A, B, C])
np.testing.assert_almost_equal(output["Y"], np.dot(A, B) + C)
# transA
A = np.transpose(A)
node_def = make_node(
'Gemm',
['A', 'B', 'C'],
["Y"],
transA=True)
output = c2.run_node(node_def, [A, B, C])
np.testing.assert_almost_equal(
output["Y"],
np.dot(np.transpose(A), B) + C)
# revert A
A = np.transpose(A)
# transB
B = np.transpose(B)
node_def = make_node(
'Gemm',
['A', 'B', 'C'],
["Y"],
transB=True)
output = c2.run_node(node_def, [A, B, C])
np.testing.assert_almost_equal(
output["Y"],
np.dot(A, np.transpose(B)) + C)
# revert A
B = np.transpose(B)
# scale
alpha = np.random.random()
beta = np.random.random()
node_def = make_node(
'Gemm',
['A', 'B', 'C'],
["Y"],
alpha=alpha,
beta=beta)
output = c2.run_node(node_def, [A, B, C])
np.testing.assert_almost_equal(
output["Y"],
alpha * np.dot(A, B) + beta * C)
# broadcast
C = np.random.randn(4).astype(np.float32)
node_def = make_node(
'Gemm',
['A', 'B', 'C'],
["Y"],
alpha=alpha,
beta=beta,
broadcast=1)
output = c2.run_node(node_def, [A, B, C])
np.testing.assert_almost_equal(
output["Y"],
alpha * np.dot(A, B) + beta * C)
def test_tensor_filling_ops(self):
for dtype in [
onnx.TensorProto.FLOAT,
onnx.TensorProto.DOUBLE,
onnx.TensorProto.BOOL,
onnx.TensorProto.INT8,
onnx.TensorProto.INT16,
onnx.TensorProto.INT32,
onnx.TensorProto.INT64,
onnx.TensorProto.UINT8,
onnx.TensorProto.UINT16,
onnx.TensorProto.UINT32,
]:
shape = (1, 2, 3)
vals = np.random.randn(*shape)
if dtype != onnx.TensorProto.BOOL:
vals *= 5
vals = vals.astype(
mapping.TENSOR_TYPE_TO_NP_TYPE[dtype])
tensor = make_tensor(
name='test-tensor-{}'.format(dtype),
data_type=dtype,
dims=[1, 2, 3],
vals=vals.flatten().tolist(),
)
op = c2.Caffe2Backend._create_tensor_filling_op(tensor)
self.assertEqual(len(op.input), 0)
self.assertEqual(op.output, [tensor.name])
ws, output = c2_native_run_op(op, inputs=[])
self.assertEqual(len(output), 1)
np.testing.assert_almost_equal(output[0], vals)
np.testing.assert_almost_equal(ws.FetchBlob(op.output[0]), vals)
def test_slice(self):
X = np.random.randn(1, 2, 3).astype(np.float32)
starts = np.array([0, 1, 0], dtype=np.int32)
ends = np.array([-1, 2, 3], dtype=np.int32)
predict_net = caffe2_pb2.NetDef()
predict_net.name = 'test-slice-net'
predict_net.external_input[:] = ['X']
predict_net.external_output[:] = ['Y']
predict_net.op.extend([
core.CreateOperator(
'Slice',
inputs=['X'],
outputs=['Y'],
starts=starts,
ends=ends,
),
])
ws, (Y,) = c2_native_run_net(
init_net=None,
predict_net=predict_net,
inputs=[X])
onnx_model = c2_onnx.caffe2_net_to_onnx_model(
predict_net=predict_net,
value_info={
'X': (onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[X.dtype], X.shape)
})
Y, = c2.run_model(onnx_model, inputs=[X])
np.testing.assert_almost_equal(Y, X[:, 1:2, :])
class TestCaffe2End2End(TestCase):
def _model_dir(self, model):
caffe2_home = os.path.expanduser(os.getenv('ONNX_HOME', '~/.caffe2'))
models_dir = os.getenv('ONNX_MODELS', os.path.join(caffe2_home, 'models'))
return os.path.join(models_dir, model)
def _test_net(self,
net_name,
input_blob_dims=(1, 3, 224, 224),
decimal=7):
np.random.seed(seed=0)
model_dir = self._model_dir(net_name)
if not os.path.exists(model_dir):
self._download(net_name)
c2_predict_pb = os.path.join(model_dir, 'predict_net.pb')
c2_predict_net = caffe2_pb2.NetDef()
with open(c2_predict_pb, 'rb') as f:
c2_predict_net.ParseFromString(f.read())
c2_predict_net.name = net_name
c2_init_pb = os.path.join(model_dir, 'init_net.pb')
c2_init_net = caffe2_pb2.NetDef()
with open(c2_init_pb, 'rb') as f:
c2_init_net.ParseFromString(f.read())
c2_init_net.name = net_name + '_init'
n, c, h, w = input_blob_dims
data = np.random.randn(n, c, h, w).astype(np.float32)
inputs = [data]
_, c2_outputs = c2_native_run_net(c2_init_net, c2_predict_net, inputs)
del _
model = c2_onnx.caffe2_net_to_onnx_model(
predict_net=c2_predict_net,
init_net=c2_init_net,
value_info=json.load(open(os.path.join(model_dir, 'value_info.json'))))
c2_ir = c2.prepare(model)
onnx_outputs = c2_ir.run(inputs)
self.assertSameOutputs(c2_outputs, onnx_outputs, decimal=decimal)
def _download(self, model):
model_dir = self._model_dir(model)
assert not os.path.exists(model_dir)
os.makedirs(model_dir)
for f in ['predict_net.pb', 'init_net.pb', 'value_info.json']:
url = getURLFromName(model, f)
dest = os.path.join(model_dir, f)
try:
try:
downloadFromURLToFile(url, dest,
show_progress=False)
except TypeError:
# show_progress not supported prior to
# Caffe2 78c014e752a374d905ecfb465d44fa16e02a28f1
# (Sep 17, 2017)
downloadFromURLToFile(url, dest)
except Exception as e:
print("Abort: {reason}".format(reason=e))
print("Cleaning up...")
deleteDirectory(model_dir)
exit(1)
def test_alexnet(self):
self._test_net('bvlc_alexnet', decimal=4)
def test_resnet50(self):
self._test_net('resnet50')
@unittest.skipIf(
os.environ.get('JENKINS_URL'),
'Taking too long to download!')
def test_vgg16(self):
self._test_net('vgg16')
@unittest.skipIf(
os.environ.get('JENKINS_URL'),
'Running vgg19 on Travis with Python 2 keeps getting OOM!')
def test_vgg19(self):
self._test_net('vgg19')
def test_inception_v1(self):
self._test_net('inception_v1', decimal=2)
def test_inception_v2(self):
self._test_net('inception_v2')
@unittest.skip('Need to add support for ConstantFill operator')
def test_squeezenet(self):
self._test_net('squeezenet')
def test_shufflenet(self):
self._test_net('shufflenet')
def test_densenet121(self):
self._test_net('densenet121')
def test_bvlc_googlenet(self):
self._test_net('bvlc_googlenet')
def test_bvlc_reference_caffenet(self):
self._test_net('bvlc_reference_caffenet')
def test_bvlc_reference_rcnn_ilsvrc13(self):
self._test_net('bvlc_reference_rcnn_ilsvrc13')
if __name__ == '__main__':
unittest.main()
|
py | 7df9668f01ca0f22a7ad8ff065946ca00367ab2e | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Opencascade(CMakePackage):
"""Open CASCADE Technology is a software development kit (SDK)
intended for development of applications dealing with 3D CAD data,
freely available in open source. It includes a set of C++ class
libraries providing services for 3D surface and solid modeling,
visualization, data exchange and rapid application development."""
homepage = "https://www.opencascade.com"
url = "http://git.dev.opencascade.org/gitweb/?p=occt.git;a=snapshot;h=refs/tags/V7_4_0;sf=tgz"
version('7.4.0p1', extension='tar.gz',
sha256='e00fedc221560fda31653c23a8f3d0eda78095c87519f338d4f4088e2ee9a9c0')
version('7.4.0', extension='tar.gz',
sha256='655da7717dac3460a22a6a7ee68860c1da56da2fec9c380d8ac0ac0349d67676')
variant('tbb', default=False,
description='Build with Intel Threading Building Blocks')
variant('vtk', default=False,
description='Enable VTK support')
variant('freeimage', default=False,
description='Build with FreeImage')
variant('rapidjson', default=False,
description='Build with rapidjson')
depends_on('intel-tbb', when='+tbb')
depends_on('vtk', when='+vtk')
depends_on('freeimage', when='+freeimage')
depends_on('rapidjson', when='+rapidjson')
depends_on('freetype')
depends_on('tcl')
depends_on('tk')
depends_on('gl')
def url_for_version(self, version):
url = "http://git.dev.opencascade.org/gitweb/?p=occt.git;a=snapshot;h=refs/tags/V{0};sf=tgz"
return url.format(version.underscored)
def cmake_args(self):
args = []
if '+tbb' in self.spec:
args.append('-DUSE_TBB=ON')
args.append('-D3RDPARTY_VTK_DIR=%s' %
self.spec['intel-tbb'].prefix)
else:
args.append('-DUSE_TBB=OFF')
if '+vtk' in self.spec:
args.append('-DUSE_VTK=ON')
args.append('-D3RDPARTY_VTK_DIR=%s' %
self.spec['vtk'].prefix)
else:
args.append('-DUSE_VTK=OFF')
if '+freeimage' in self.spec:
args.append('-DUSE_FREEIMAGE=ON')
args.append('-D3RDPARTY_FREEIMAGE_DIR=%s' %
self.spec['freeimage'].prefix)
else:
args.append('-DUSE_FREEIMAGE=OFF')
if '+rapidjson' in self.spec:
args.append('-DUSE_RAPIDJSON=ON')
args.append('-D3RDPARTY_RAPIDJSON_DIR=%s' %
self.spec['rapidjson'].prefix)
else:
args.append('-DUSE_RAPIDJSON=OFF')
return args
|
py | 7df96938b7461c30a8303dd1b4b7b8b8572cfeee | from collections import OrderedDict, defaultdict
try:
import itertools.izip as zip
except ImportError:
pass
import numpy as np
from .interface import Interface, DataError
from ..dimension import Dimension
from ..element import Element
from ..dimension import OrderedDict as cyODict
from ..ndmapping import NdMapping, item_check
from .. import util
class DictInterface(Interface):
"""
Interface for simple dictionary-based dataset format. The dictionary
keys correspond to the column (i.e dimension) names and the values
are collections representing the values in that column.
"""
types = (dict, OrderedDict, cyODict)
datatype = 'dictionary'
@classmethod
def dimension_type(cls, dataset, dim):
name = dataset.get_dimension(dim, strict=True).name
values = dataset.data[name]
return type(values) if np.isscalar(values) else values.dtype.type
@classmethod
def init(cls, eltype, data, kdims, vdims):
odict_types = (OrderedDict, cyODict)
if kdims is None:
kdims = eltype.kdims
if vdims is None:
vdims = eltype.vdims
dimensions = [d.name if isinstance(d, Dimension) else
d for d in kdims + vdims]
if isinstance(data, tuple):
data = {d: v for d, v in zip(dimensions, data)}
elif util.is_dataframe(data) and all(d in data for d in dimensions):
data = {d: data[d] for d in dimensions}
elif isinstance(data, np.ndarray):
if data.ndim == 1:
if eltype._auto_indexable_1d and len(kdims)+len(vdims)>1:
data = np.column_stack([np.arange(len(data)), data])
else:
data = np.atleast_2d(data).T
data = {k: data[:,i] for i,k in enumerate(dimensions)}
elif isinstance(data, list) and data == []:
data = OrderedDict([(d, []) for d in dimensions])
elif isinstance(data, list) and np.isscalar(data[0]):
data = {dimensions[0]: np.arange(len(data)), dimensions[1]: data}
elif (isinstance(data, list) and isinstance(data[0], tuple) and len(data[0]) == 2
and any(isinstance(v, tuple) for v in data[0])):
dict_data = zip(*((util.wrap_tuple(k)+util.wrap_tuple(v))
for k, v in data))
data = {k: np.array(v) for k, v in zip(dimensions, dict_data)}
# Ensure that interface does not consume data of other types
# with an iterator interface
elif not any(isinstance(data, tuple(t for t in interface.types if t is not None))
for interface in cls.interfaces.values()):
data = {k: v for k, v in zip(dimensions, zip(*data))}
elif (isinstance(data, dict) and not any(isinstance(v, np.ndarray) for v in data.values()) and not
any(d in data or any(d in k for k in data if isinstance(k, tuple)) for d in dimensions)):
# For data where both keys and values are dimension values
# e.g. {('A', 'B'): (1, 2)} (should consider deprecating)
dict_data = sorted(data.items())
k, v = dict_data[0]
if len(util.wrap_tuple(k)) != len(kdims) or len(util.wrap_tuple(v)) != len(vdims):
raise ValueError("Dictionary data not understood, should contain a column "
"per dimension or a mapping between key and value dimension "
"values.")
dict_data = zip(*((util.wrap_tuple(k)+util.wrap_tuple(v))
for k, v in dict_data))
data = {k: np.array(v) for k, v in zip(dimensions, dict_data)}
if not isinstance(data, cls.types):
raise ValueError("DictInterface interface couldn't convert data.""")
elif isinstance(data, dict):
unpacked = []
for d, vals in data.items():
if isinstance(d, tuple):
vals = np.asarray(vals)
if vals.shape == (0,):
for sd in d:
unpacked.append((sd, np.array([], dtype=vals.dtype)))
elif not vals.ndim == 2 and vals.shape[1] == len(d):
raise ValueError("Values for %s dimensions did not have "
"the expected shape.")
else:
for i, sd in enumerate(d):
unpacked.append((sd, vals[:, i]))
else:
vals = vals if np.isscalar(vals) else np.asarray(vals)
if not np.isscalar(vals) and not vals.ndim == 1:
raise ValueError('DictInterface expects data for each column to be flat.')
unpacked.append((d, vals))
if not cls.expanded([d[1] for d in unpacked if not np.isscalar(d[1])]):
raise ValueError('DictInterface expects data to be of uniform shape.')
if isinstance(data, odict_types):
data.update(unpacked)
else:
data = OrderedDict(unpacked)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def validate(cls, dataset, vdims=True):
dim_types = 'all' if vdims else 'key'
dimensions = dataset.dimensions(dim_types, label='name')
not_found = [d for d in dimensions if d not in dataset.data]
if not_found:
raise DataError('Following columns specified as dimensions '
'but not found in data: %s' % not_found, cls)
lengths = [(dim, 1 if np.isscalar(dataset.data[dim]) else len(dataset.data[dim]))
for dim in dimensions]
if len({l for d, l in lengths if l > 1}) > 1:
lengths = ', '.join(['%s: %d' % l for l in sorted(lengths)])
raise DataError('Length of columns must be equal or scalar, '
'columns have lengths: %s' % lengths, cls)
@classmethod
def unpack_scalar(cls, dataset, data):
"""
Given a dataset object and data in the appropriate format for
the interface, return a simple scalar.
"""
if len(data) != 1:
return data
key = list(data.keys())[0]
if len(data[key]) == 1 and key in dataset.vdims:
scalar = data[key][0]
return scalar.compute() if hasattr(scalar, 'compute') else scalar
return data
@classmethod
def isscalar(cls, dataset, dim):
name = dataset.get_dimension(dim, strict=True).name
values = dataset.data[name]
return np.isscalar(values) or len(np.unique(values)) == 1
@classmethod
def shape(cls, dataset):
return cls.length(dataset), len(dataset.data),
@classmethod
def length(cls, dataset):
lengths = [len(vals) for vals in dataset.data.values() if not np.isscalar(vals)]
return max(lengths) if lengths else 1
@classmethod
def array(cls, dataset, dimensions):
if not dimensions:
dimensions = dataset.dimensions(label='name')
else:
dimensions = [dataset.get_dimensions(d).name for d in dimensions]
arrays = [dataset.data[dim.name] for dim in dimensions]
return np.column_stack([np.full(len(dataset), arr) if np.isscalar(arr) else arr
for arr in arrays])
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
dim = dimension.name if isinstance(dimension, Dimension) else dimension
data = list(dataset.data.items())
data.insert(dim_pos, (dim, values))
return OrderedDict(data)
@classmethod
def redim(cls, dataset, dimensions):
all_dims = dataset.dimensions()
renamed = []
for k, v in dataset.data.items():
if k in dimensions:
k = dimensions[k].name
elif k in all_dims:
k = dataset.get_dimension(k).name
renamed.append((k, v))
return OrderedDict(renamed)
@classmethod
def concat(cls, datasets, dimensions, vdims):
columns = defaultdict(list)
for key, ds in datasets:
for k, vals in ds.data.items():
columns[k].append(vals)
for d, k in zip(dimensions, key):
columns[d.name].append(np.full(len(ds), k))
template = datasets[0][1]
dims = dimensions+template.dimensions()
return OrderedDict([(d.name, np.concatenate(columns[d.name])) for d in dims])
@classmethod
def sort(cls, dataset, by=[], reverse=False):
by = [dataset.get_dimension(d).name for d in by]
if len(by) == 1:
sorting = cls.values(dataset, by[0]).argsort()
else:
arrays = [dataset.dimension_values(d) for d in by]
sorting = util.arglexsort(arrays)
return OrderedDict([(d, v if np.isscalar(v) else (v[sorting][::-1] if reverse else v[sorting]))
for d, v in dataset.data.items()])
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True):
dim = dataset.get_dimension(dim).name
values = dataset.data.get(dim)
if np.isscalar(values):
if not expanded:
return np.array([values])
values = np.full(len(dataset), values, dtype=np.array(values).dtype)
else:
if not expanded:
return util.unique_array(values)
values = np.asarray(values)
return values
@classmethod
def reindex(cls, dataset, kdims, vdims):
dimensions = [dataset.get_dimension(d).name for d in kdims+vdims]
return OrderedDict([(d, dataset.dimension_values(d))
for d in dimensions])
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d) for d in dimensions]
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
vdims = dataset.vdims
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
group_kwargs['kdims'] = kdims
group_kwargs.update(kwargs)
# Find all the keys along supplied dimensions
keys = (tuple(dataset.data[d.name] if np.isscalar(dataset.data[d.name])
else dataset.data[d.name][i] for d in dimensions)
for i in range(len(dataset)))
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in util.unique_iterator(keys):
mask = cls.select_mask(dataset, dict(zip(dimensions, unique_key)))
group_data = OrderedDict(((d.name, dataset.data[d.name] if np.isscalar(dataset.data[d.name])
else dataset.data[d.name][mask])
for d in kdims+vdims))
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((unique_key, group_data))
if issubclass(container_type, NdMapping):
with item_check(False):
return container_type(grouped_data, kdims=dimensions)
else:
return container_type(grouped_data)
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
data = OrderedDict((k, v if np.isscalar(v) else v[selection_mask])
for k, v in dataset.data.items())
if indexed and len(list(data.values())[0]) == 1 and len(dataset.vdims) == 1:
value = data[dataset.vdims[0].name]
return value if np.isscalar(value) else value[0]
return data
@classmethod
def sample(cls, dataset, samples=[]):
mask = False
for sample in samples:
sample_mask = True
if np.isscalar(sample): sample = [sample]
for i, v in enumerate(sample):
name = dataset.get_dimension(i).name
sample_mask &= (dataset.data[name]==v)
mask |= sample_mask
return {k: col if np.isscalar(col) else np.array(col)[mask]
for k, col in dataset.data.items()}
@classmethod
def aggregate(cls, dataset, kdims, function, **kwargs):
kdims = [dataset.get_dimension(d, strict=True).name for d in kdims]
vdims = dataset.dimensions('value', label='name')
groups = cls.groupby(dataset, kdims, list, OrderedDict)
aggregated = OrderedDict([(k, []) for k in kdims+vdims])
for key, group in groups:
key = key if isinstance(key, tuple) else (key,)
for kdim, val in zip(kdims, key):
aggregated[kdim].append(val)
for vdim, arr in group.items():
if vdim in dataset.vdims:
if np.isscalar(arr):
reduced = arr
elif isinstance(function, np.ufunc):
reduced = function.reduce(arr, **kwargs)
else:
reduced = function(arr, **kwargs)
aggregated[vdim].append(reduced)
return aggregated
@classmethod
def iloc(cls, dataset, index):
rows, cols = index
scalar = False
if np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols, strict=True)]
elif isinstance(cols, slice):
cols = dataset.dimensions()[cols]
else:
cols = [dataset.get_dimension(d, strict=True) for d in cols]
if np.isscalar(rows):
rows = [rows]
new_data = OrderedDict()
for d, values in dataset.data.items():
if d in cols:
if np.isscalar(values):
new_data[d] = values
else:
new_data[d] = values[rows]
if scalar:
arr = new_data[cols[0].name]
return arr if np.isscalar(arr) else arr[0]
return new_data
Interface.register(DictInterface)
|
py | 7df96974a9db1c362c188ac6ed69846be221350e | """Shelly Configuration Schemas."""
# pylint: disable=dangerous-default-value
from homeassistant.const import (
CONF_DEVICES, CONF_DISCOVERY, CONF_ID, CONF_NAME, CONF_PASSWORD,
CONF_SCAN_INTERVAL, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP)
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from .const import *
ALL_SENSORS_W_EXTRA = list(ALL_SENSORS.keys()) + list(EXTRA_SENSORS.keys())
SENSOR_SCHEMA = vol.Schema({
vol.Optional(CONF_NAME): cv.string,
})
SETTING_SCHEMA = vol.Schema({
vol.Optional(CONF_DECIMALS): cv.positive_int,
vol.Optional(CONF_DIV): cv.positive_int,
vol.Optional(CONF_UNIT): cv.string
})
SETTINGS_SCHEMA = vol.Schema({
vol.Optional('temperature'): SETTING_SCHEMA,
vol.Optional('humidity'): SETTING_SCHEMA,
vol.Optional('illuminance'): SETTING_SCHEMA,
vol.Optional('current'): SETTING_SCHEMA,
vol.Optional('total_consumption'): SETTING_SCHEMA,
vol.Optional('total_returned'): SETTING_SCHEMA,
vol.Optional('current_consumption'): SETTING_SCHEMA,
vol.Optional('device_temp'): SETTING_SCHEMA,
vol.Optional('voltage'): SETTING_SCHEMA,
vol.Optional('power_factor'): SETTING_SCHEMA,
vol.Optional('uptime'): SETTING_SCHEMA,
vol.Optional('rssi'): SETTING_SCHEMA
})
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_ID): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_LIGHT_SWITCH, default=False): cv.boolean,
vol.Optional(CONF_SENSORS):
vol.All(cv.ensure_list, [vol.In(ALL_SENSORS_W_EXTRA)]),
vol.Optional(CONF_UPGRADE_SWITCH): cv.boolean,
vol.Optional(CONF_UNAVALABLE_AFTER_SEC) : cv.positive_int,
vol.Optional(CONF_ENTITY_ID): cv.string,
vol.Optional(CONF_POWER_DECIMALS): cv.positive_int, #deprecated
vol.Optional(CONF_SETTINGS, default={}): SETTINGS_SCHEMA
})
STEP_SCHEMA = vol.Schema({
vol.Optional(CONF_OBJECT_ID_PREFIX,
default=DEFAULT_OBJECT_ID_PREFIX): str,
})
CONFIG_SCHEMA_ROOT = vol.Schema({
vol.Optional(CONF_IGMPFIX,
default=DEFAULT_IGMPFIX): cv.boolean,
vol.Optional(CONF_SHOW_ID_IN_NAME,
default=DEFAULT_SHOW_ID_IN_NAME): cv.boolean,
vol.Optional(CONF_DISCOVERY,
default=DEFAULT_DISCOVERY): cv.boolean,
vol.Optional(CONF_OBJECT_ID_PREFIX,
default=DEFAULT_OBJECT_ID_PREFIX): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_DEVICES,
default=[]): vol.All(cv.ensure_list, [DEVICE_SCHEMA]),
vol.Optional(CONF_VERSION,
default=False): cv.boolean,
vol.Optional(CONF_WIFI_SENSOR): cv.boolean, #deprecated
vol.Optional(CONF_UPTIME_SENSOR): cv.boolean, #deprecated
vol.Optional(CONF_UPGRADE_SWITCH, default=True): cv.boolean,
vol.Optional(CONF_UNAVALABLE_AFTER_SEC, default=90) : cv.positive_int,
vol.Optional(CONF_SENSORS, default=DEFAULT_SENSORS):
vol.All(cv.ensure_list, [vol.In(ALL_SENSORS_W_EXTRA)]),
vol.Optional(CONF_ATTRIBUTES, default=list(DEFAULT_ATTRIBUTES)):
vol.All(cv.ensure_list,
[vol.In(ALL_ATTRIBUTES | EXTRA_ATTRIBUTES)]),
vol.Optional(CONF_ADDITIONAL_INFO,
default=True): cv.boolean,
vol.Optional(CONF_SCAN_INTERVAL,
default=DEFAULT_SCAN_INTERVAL): cv.positive_int,
vol.Optional(CONF_POWER_DECIMALS): cv.positive_int, #deprecated
vol.Optional(CONF_LOCAL_PY_SHELLY,
default=False): cv.boolean,
vol.Optional(CONF_ONLY_DEVICE_ID) : cv.string,
vol.Optional(CONF_CLOUD_AUTH_KEY) : cv.string,
vol.Optional(CONF_CLOUD_SERVER) : cv.string,
vol.Optional(CONF_TMPL_NAME) : cv.string,
vol.Optional(CONF_DISCOVER_BY_IP, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_MDNS, default=DEFAULT_MDNS): cv.boolean,
vol.Optional(CONF_HOST_IP, default='') : cv.string,
vol.Optional(CONF_SETTINGS, default={}): SETTINGS_SCHEMA
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: CONFIG_SCHEMA_ROOT
}, extra=vol.ALLOW_EXTRA)
|
py | 7df96b6b683e4b42468548f3f6074b6bb75f65a6 | from app.models import role
from app.models import user
from app.models import post
# from app.models import thought
from app.models import comment
|
py | 7df96bdbb7776a6dfa5c75d13d75360587ec2c8a |
from queue import Queue, Empty
from threading import Thread
from .afl import AFL
import json
import os
import re
import subprocess
import shutil
import time
import stat
import glob
import logging
import urllib.request
l = logging.getLogger("phuzzer.phuzzers.wafl")
l.setLevel(logging.INFO)
class WitcherAFL(AFL):
""" WitcherAFL launches the web fuzzer building on the AFL object """
def __init__(
self, target, seeds=None, dictionary=None, create_dictionary=None,
work_dir=None, resume=False,
afl_count=1, memory="8G", timeout=None,
target_opts=None, extra_opts=None,
crash_mode=False, use_qemu=True,
run_timeout=None, login_json_fn="",
server_cmd=None, server_env_vars=None
):
"""
:param target: path to the script to fuzz (from AFL)
:param seeds: list of inputs to seed fuzzing with (from AFL)
:param dictionary: a list of bytes objects to seed the dictionary with (from AFL)
:param create_dictionary: create a dictionary from the string references in the binary (from AFL)
:param work_dir: the work directory which contains fuzzing jobs, our job directory will go here (from AFL)
:param resume: resume the prior run, if possible (from AFL)
:param afl_count:
:param memory: AFL child process memory limit (default: "8G")
:param afl_count: number of AFL jobs total to spin up for the binary
:param timeout: timeout for individual runs within AFL
:param library_path: library path to use, if none is specified a default is chosen
:param target_opts: extra options to pass to the target
:param extra_opts: extra options to pass to AFL when starting up
:param crash_mode: if set to True AFL is set to crash explorer mode, and seed will be expected to be a crashing input
:param use_qemu: Utilize QEMU for instrumentation of binary.
:param run_timeout: amount of time for AFL to wait for a single execution to finish
:param login_json_fn: login configuration file path for automatically craeting a login session and performing other initial tasks
"""
super().__init__(
target=target, work_dir=work_dir, seeds=seeds, afl_count=afl_count,
create_dictionary=create_dictionary, timeout=timeout,
memory=memory, dictionary=dictionary, use_qemu=use_qemu,
target_opts=target_opts, resume=resume, crash_mode=crash_mode, extra_opts=extra_opts,
run_timeout=run_timeout
)
self.login_json_fn = login_json_fn
self.used_sessions = set()
self.session_name = ""
self.bearer = ""
self.server_cmd = server_cmd
self.server_env_vars = server_env_vars
self.server_procs = []
if "AFL_PATH" in os.environ:
afl_fuzz_bin = os.path.join(os.environ['AFL_PATH'], "afl-fuzz")
if os.path.exists(afl_fuzz_bin):
self.afl_path = afl_fuzz_bin
else:
raise ValueError(
f"error, have AFL_PATH but cannot find afl-fuzz at {os.environ['AFL_PATH']} with {afl_fuzz_bin}")
def _start_afl_instance(self, instance_cnt=0):
args, fuzzer_id = self.build_args()
my_env = os.environ.copy()
final_args = []
for op in args:
target_var = op.replace("~~", "--").replace("@@PORT@@", my_env.get("PORT", "80"))
increasing_port = int(my_env.get("PORT", 14000)) + instance_cnt
if "@@PORT_INCREMENT@@" in target_var:
target_var = target_var.replace("@@PORT_INCREMENT@@", str(increasing_port))
my_env["PORT"] = str(increasing_port)
final_args.append(target_var)
print(f"TARGET OPTS::::: {final_args}")
self._get_login(my_env)
my_env["AFL_BASE"] = os.path.join(self.work_dir, fuzzer_id)
my_env["STRICT"] = "true"
if "METHOD" not in my_env:
my_env["METHOD"] = "POST"
# print(f"[WC] my word dir {self.work_dir} AFL_BASE={my_env['AFL_BASE']}")
self.log_command(final_args, fuzzer_id, my_env)
logpath = os.path.join(self.work_dir, fuzzer_id + ".log")
l.debug("execing: %s > %s", ' '.join(final_args), logpath)
# set core affinity if environment variable is set
if "AFL_SET_AFFINITY" in my_env:
tempint = int(my_env["AFL_SET_AFFINITY"])
tempint += instance_cnt
my_env["AFL_SET_AFFINITY"] = str(tempint)
scr_fn = f"/tmp/fuzz-{instance_cnt}.sh"
with open(scr_fn, "w") as scr:
scr.write("#! /bin/bash \n")
for key, val in my_env.items():
scr.write(f'export {key}="{val}"\n')
scr.write(" ".join(final_args) + "\n")
#scr.write(f"{final_args[0].replace('afl-fuzz','afl-showmap')} -o /tmp/outmap ")
l.info(f"Fuzz command written out to {scr_fn}")
os.chmod(scr_fn, mode=0o774)
with open(logpath, "w") as fp:
return subprocess.Popen([scr_fn], stdout=fp, stderr=fp, close_fds=True)
# with open(logpath, "w") as fp:
# return subprocess.Popen(final_args, stdout=fp, stderr=fp, close_fds=True, env=my_env)
def _check_for_authorized_response(self, body, headers, loginconfig):
return WitcherAFL._check_body(body, loginconfig) and WitcherAFL._check_headers(headers, loginconfig)
@staticmethod
def _check_body(self, body, loginconfig):
if "positiveBody" in loginconfig and len(loginconfig["positiveBody"]) > 1:
pattern = re.compile(loginconfig["positiveBody"])
return pattern.search(body) is None
return True
@staticmethod
def _check_headers(self, headers, loginconfig):
if "postiveHeaders" in loginconfig:
posHeaders = loginconfig["positiveHeaders"]
for ph in posHeaders:
for posname, posvalue in ph:
found = False
for headername, headervalue in headers:
if posname == headername and posvalue == headervalue:
found = True
if not found:
return False
return True
def _save_session(self, session_cookie, loginconfig):
session_cookie_locations = ["/tmp","/var/lib/php/sessions"]
if "loginSessionCookie" in loginconfig:
session_name = loginconfig["loginSessionCookie"]
else:
session_name = r".*"
if "cookieLocations" in loginconfig:
for cl in loginconfig["cookeLocations"]:
session_cookie_locations.append(cl)
sessidrex = re.compile(rf"{session_name}=(?P<sessid>[a-z0-9]{{24,40}})")
sessid = sessidrex.match(session_cookie).group("sessid")
if not sessid:
return False
# print("[WC] sessidrex " + sessid)
actual_sess_fn = ""
for f in session_cookie_locations:
sfile = f"*{sessid}"
sesmask = os.path.join(f,sfile)
for sfn in glob.glob(sesmask):
if os.path.isfile(sfn):
actual_sess_fn = sfn
break
if len(actual_sess_fn) > 0:
break
if len(actual_sess_fn) == 0:
return False
saved_sess_fn = f"/tmp/save_{sessid}"
if os.path.isfile(actual_sess_fn):
shutil.copyfile(actual_sess_fn, saved_sess_fn)
os.chmod(saved_sess_fn, stat.S_IRWXO | stat.S_IRWXG | stat.S_IRWXU)
self.used_sessions.add(saved_sess_fn)
return True
return False
def _extract_authdata(self, headers, loginconfig):
authdata = []
for headername, headervalue in headers:
if headername.upper() == "SET-COOKIE":
# Uses special authdata header so that the value prepends all other cookie values and
# random data from AFL does not interfere
if self._save_session(headervalue, loginconfig):
authdata.append(("LOGIN_COOKIE", headervalue))
if headername.upper() == "AUTHORIZATION":
self.bearer = [(headername, headervalue)]
authdata.append((headername, headervalue))
return authdata
def _do_local_cgi_req_login(self, loginconfig):
login_cmd = [loginconfig["cgiBinary"]]
# print("[WC] \033[34m starting with command " + str(login_cmd) + "\033[0m")
myenv = os.environ.copy()
if "AFL_BASE" in myenv:
del myenv["AFL_BASE"]
myenv["METHOD"] = loginconfig["method"]
myenv["STRICT"] = "1"
myenv["SCRIPT_FILENAME"] = loginconfig["url"]
if "afl_preload" in loginconfig:
myenv["LD_PRELOAD"] = loginconfig["afl_preload"]
if "ld_library_path" in loginconfig:
myenv["LD_LIBRARY_PATH"] = loginconfig["ld_library_path"]
cookieData = loginconfig["cookieData"] if "cookieData" in loginconfig else ""
getData = loginconfig["getData"] if "getData" in loginconfig else ""
postData = loginconfig["postData"] if "postData" in loginconfig else ""
httpdata = f'{cookieData}\x00{getData}\x00{postData}\x00'
open("/tmp/login_req.dat", "wb").write(httpdata.encode())
login_req_file = open("/tmp/login_req.dat", "r")
p = subprocess.Popen(login_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, stdin=login_req_file,
env=myenv)
nbsr = NonBlockingStreamReader(p.stdout)
strout = ""
while not nbsr.is_finished:
line = nbsr.readline(0.1)
if line is not None:
inp = line.decode('latin-1')
strout += inp
# print("\033[32m", end="")
# print(inp, end="")
# print("\033[0m", end="")
p.wait()
headers = []
body = ""
inbody = False
for respline in strout.splitlines():
if len(respline) == 0:
inbody = True
continue
if inbody:
body += respline + "\n"
else:
header = respline.split(":")
if len(header) > 1 and inbody:
headername = header[0].strip()
headerval = ":".join(header[1:])
headerval = headerval.lstrip()
headers.append((headername, headerval))
if not self._check_for_authorized_response(body, headers, loginconfig):
return []
return self._extract_authdata(headers, loginconfig)
def _do_http_req_login(self, loginconfig):
url = loginconfig["url"]
if "getData" in loginconfig:
url += f"?{loginconfig['getData']}"
post_data = loginconfig["postData"] if "postData" in loginconfig else ""
post_data = post_data.encode('ascii')
req_headers = loginconfig["headers"] if "headers" in loginconfig else {}
opener = urllib.request.build_opener(NoRedirection)
urllib.request.install_opener(opener)
req = urllib.request.Request(url, post_data, req_headers)
response = urllib.request.urlopen(req)
headers = response.getheaders()
body = response.read()
if not self._check_for_authorized_response(body, headers, loginconfig):
return []
return self._extract_authdata(headers, loginconfig)
@staticmethod
def _do_authorized_requests(self, loginconfig, authdata):
extra_requests = loginconfig["extra_authorized_requests"] if "postData" in loginconfig else []
for auth_request in extra_requests:
url = auth_request["url"]
if "getData" in auth_request:
url += f"?{auth_request['getData']}"
post_data = auth_request["postData"] if "postData" in auth_request else ""
post_data = post_data.encode('ascii')
req_headers = auth_request["headers"] if "headers" in auth_request else {}
for adname, advalue in authdata:
adname = adname.replace("LOGIN_COOKIE","Cookie")
req_headers[adname] = advalue
req = urllib.request.Request(url, post_data, req_headers)
urllib.request.urlopen(req)
def _get_login(self, my_env):
if self.login_json_fn == "":
return
if len(self.bearer) > 0:
for bname, bvalue in self.bearer:
my_env[bname] = bvalue
return
with open(self.login_json_fn, "r") as jfile:
jdata = json.load(jfile)
if jdata["direct"]["url"] == "NO_LOGIN":
return
loginconfig = jdata["direct"]
saved_session_id = self._get_saved_session()
if len(saved_session_id) > 0:
saved_session_name = loginconfig["loginSessionCookie"]
my_env["LOGIN_COOKIE"] = f"{saved_session_name}:{saved_session_id}"
return
authdata = None
for _ in range(0, 10):
if loginconfig["url"].startswith("http"):
authdata = self._do_http_req_login(loginconfig)
WitcherAFL._do_authorized_requests(loginconfig, authdata)
else:
authdata = self._do_local_cgi_req_login(loginconfig)
if authdata is not None:
break
time.sleep(5)
if authdata is None:
raise ValueError("Login failed to return authenticated cookie/bearer value")
for authname, authvalue in authdata:
my_env[authname] = authvalue
def _get_saved_session(self):
# if we have an unused session file, we are done for this worker.
for saved_sess_fn in glob.iglob("/tmp/save_????????????????????*"):
if saved_sess_fn not in self.used_sessions:
sess_fn = saved_sess_fn.replace("save", "sess")
# print("sess_fn=" + sess_fn)
self.used_sessions.add(saved_sess_fn)
shutil.copyfile(saved_sess_fn, sess_fn)
saved_session_id = saved_sess_fn.split("_")[1]
return saved_session_id
return ""
class NoRedirection(urllib.request.HTTPErrorProcessor):
def http_response(self, request, response):
return response
https_response = http_response
class NonBlockingStreamReader:
def __init__(self, stream):
'''
stream: the stream to read from.
Usually a process' stdout or stderr.
'''
self._s = stream
self._q = Queue()
self._finished = False
def _populateQueue(stream, queue):
'''
Collect lines from 'stream' and put them in 'quque'.
'''
while True:
line = stream.readline()
if line:
queue.put(line)
else:
self._finished = True
#raise UnexpectedEndOfStream
self._t = Thread(target = _populateQueue,
args = (self._s, self._q))
self._t.daemon = True
self._t.start() #start collecting lines from the stream
@property
def is_finished(self):
return self._finished
def readline(self, timeout = None):
try:
if self._finished:
return None
return self._q.get(block = timeout is not None,
timeout = timeout)
except Empty:
return None
class UnexpectedEndOfStream(Exception):
pass
|
py | 7df96c33bb7ca88bf3649b6099354802df0f28ea | # Generated by Django 4.0.2 on 2022-02-17 00:50
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='QuestionModel',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=280)),
],
),
]
|
py | 7df96cd6cf2dcd4edff4bf9d64ef4f8f0512c9a4 | import datetime as dt
import json
from django import forms
from django.conf import settings
from django.utils.translation import gettext as _
from django_scopes.forms import SafeModelChoiceField, SafeModelMultipleChoiceField
from pretalx.common.forms.fields import ImageField
from pretalx.common.mixins.forms import ReadOnlyFlag, RequestRequire
from pretalx.submission.models import Submission, SubmissionStates
class SubmissionForm(ReadOnlyFlag, RequestRequire, forms.ModelForm):
def __init__(self, event, anonymise=False, **kwargs):
self.event = event
initial_slot = {}
instance = kwargs.get("instance")
if instance and instance.pk:
slot = (
instance.slots.filter(schedule__version__isnull=True)
.select_related("room")
.order_by("start")
.first()
)
if slot:
initial_slot = {
"room": slot.room,
"start": slot.start.astimezone(self.event.tz).isoformat()
if slot.start
else "",
"end": slot.real_end.astimezone(self.event.tz).isoformat()
if slot.real_end
else "",
}
if anonymise:
kwargs.pop("initial", None)
initial = {}
instance = kwargs.pop("instance", None)
previous_data = instance.anonymised
for key in self._meta.fields:
initial[key] = (
previous_data.get(key) or getattr(instance, key, None) or ""
)
if hasattr(initial[key], "all"): # Tags, for the moment
initial[key] = initial[key].all()
kwargs["initial"] = initial
kwargs["initial"].update(initial_slot)
super().__init__(**kwargs)
if not self.event.tags.all().exists():
self.fields.pop("tags", None)
elif "tags" in self.fields:
self.fields["tags"].queryset = self.event.tags.all()
self.fields["tags"].required = False
self.is_creating = False
if not self.instance.pk:
self.is_creating = True
self.fields["speaker"] = forms.EmailField(
label=_("Speaker email"),
help_text=_(
"The email address of the speaker holding the session. They will be invited to create an account."
),
required=False,
)
self.fields["speaker_name"] = forms.CharField(
label=_("Speaker name"),
help_text=_(
"The name of the speaker that should be displayed publicly."
),
required=False,
)
if not anonymise:
self.fields["state"] = forms.ChoiceField(
label=_("Proposal state"),
choices=SubmissionStates.get_choices(),
initial=SubmissionStates.SUBMITTED,
)
if not self.instance.pk or self.instance.state in (
SubmissionStates.ACCEPTED,
SubmissionStates.CONFIRMED,
):
self.fields["room"] = forms.ModelChoiceField(
required=False,
queryset=event.rooms.all(),
label=_("Room"),
initial=initial_slot.get("room"),
)
self.fields["start"] = forms.DateTimeField(
required=False,
label=_("Start"),
widget=forms.DateInput(
attrs={
"class": "datetimepickerfield",
"data-date-start-date": event.date_from.isoformat(),
"data-date-end-date": (
event.date_to + dt.timedelta(days=1)
).isoformat(),
"data-date-before": "#id_end",
}
),
initial=initial_slot.get("start"),
)
self.fields["end"] = forms.DateTimeField(
required=False,
label=_("End"),
widget=forms.DateInput(
attrs={
"class": "datetimepickerfield",
"data-date-start-date": event.date_from.isoformat(),
"data-date-end-date": (
event.date_to + dt.timedelta(days=1)
).isoformat(),
"data-date-after": "#id_start",
}
),
initial=initial_slot.get("end"),
)
if "abstract" in self.fields:
self.fields["abstract"].widget.attrs["rows"] = 2
if not event.settings.present_multiple_times:
self.fields.pop("slot_count", None)
if not event.settings.use_tracks:
self.fields.pop("track", None)
elif "track" in self.fields:
self.fields["track"].queryset = event.tracks.all()
if "content_locale" in self.fields:
if len(event.locales) == 1:
self.initial["content_locale"] = event.locales[0]
self.fields["content_locale"].widget = forms.HiddenInput()
else:
locale_names = dict(settings.LANGUAGES)
self.fields["content_locale"].choices = [
(a, locale_names[a]) for a in event.locales
]
def save(self, *args, **kwargs):
if "content_locale" not in self.fields:
self.instance.content_locale = self.event.locale
instance = super().save(*args, **kwargs)
if self.is_creating:
instance._set_state(self.cleaned_data["state"], force=True)
else:
if instance.pk and "duration" in self.changed_data:
instance.update_duration()
if instance.pk and "track" in self.changed_data:
instance.update_review_scores()
if "slot_count" in self.changed_data and "slot_count" in self.initial:
instance.update_talk_slots()
if (
instance.state
in (
SubmissionStates.ACCEPTED,
SubmissionStates.CONFIRMED,
)
and self.cleaned_data.get("room")
and self.cleaned_data.get("start")
and any(field in self.changed_data for field in ("room", "start", "end"))
):
slot = (
instance.slots.filter(schedule=instance.event.wip_schedule)
.order_by("start")
.first()
)
slot.room = self.cleaned_data.get("room")
slot.start = self.cleaned_data.get("start")
slot.end = self.cleaned_data.get("end")
slot.save()
return instance
class Meta:
model = Submission
fields = [
"title",
"track",
"tags",
"abstract",
"description",
"notes",
"internal_notes",
"content_locale",
"do_not_record",
"duration",
"slot_count",
"image",
"is_featured",
]
widgets = {
"tags": forms.SelectMultiple(attrs={"class": "select2"}),
"track": forms.Select(attrs={"class": "select2"}),
}
field_classes = {
"tags": SafeModelMultipleChoiceField,
"track": SafeModelChoiceField,
"image": ImageField,
}
request_require = {
"title",
"abstract",
"description",
"notes",
"image",
"do_not_record",
"content_locale",
}
class AnonymiseForm(SubmissionForm):
def __init__(self, *args, **kwargs):
instance = kwargs.get("instance")
if not instance or not instance.pk:
raise Exception("Cannot anonymise unsaved submission.")
kwargs["event"] = instance.event
kwargs["anonymise"] = True
super().__init__(*args, **kwargs)
self._instance = instance
to_be_removed = []
for key, field in self.fields.items():
try:
field.plaintext = getattr(self._instance, key)
field.required = False
except AttributeError:
to_be_removed.append(key)
for key in to_be_removed:
self.fields.pop(key)
def save(self):
anonymised_data = {"_anonymised": True}
for key, value in self.cleaned_data.items():
if value != getattr(self._instance, key, ""):
anonymised_data[key] = value
self._instance.anonymised_data = json.dumps(anonymised_data)
self._instance.save(update_fields=["anonymised_data"])
class Meta:
model = Submission
fields = [
"title",
"abstract",
"description",
"notes",
]
request_require = fields
|
py | 7df96cf323429b3bba5422dc1d00fe4c09b6e50e | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.20
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1beta1PodDisruptionBudgetStatus(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'current_healthy': 'int',
'desired_healthy': 'int',
'disrupted_pods': 'dict(str, datetime)',
'disruptions_allowed': 'int',
'expected_pods': 'int',
'observed_generation': 'int'
}
attribute_map = {
'current_healthy': 'currentHealthy',
'desired_healthy': 'desiredHealthy',
'disrupted_pods': 'disruptedPods',
'disruptions_allowed': 'disruptionsAllowed',
'expected_pods': 'expectedPods',
'observed_generation': 'observedGeneration'
}
def __init__(self, current_healthy=None, desired_healthy=None, disrupted_pods=None, disruptions_allowed=None, expected_pods=None, observed_generation=None, local_vars_configuration=None): # noqa: E501
"""V1beta1PodDisruptionBudgetStatus - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._current_healthy = None
self._desired_healthy = None
self._disrupted_pods = None
self._disruptions_allowed = None
self._expected_pods = None
self._observed_generation = None
self.discriminator = None
self.current_healthy = current_healthy
self.desired_healthy = desired_healthy
if disrupted_pods is not None:
self.disrupted_pods = disrupted_pods
self.disruptions_allowed = disruptions_allowed
self.expected_pods = expected_pods
if observed_generation is not None:
self.observed_generation = observed_generation
@property
def current_healthy(self):
"""Gets the current_healthy of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
current number of healthy pods # noqa: E501
:return: The current_healthy of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:rtype: int
"""
return self._current_healthy
@current_healthy.setter
def current_healthy(self, current_healthy):
"""Sets the current_healthy of this V1beta1PodDisruptionBudgetStatus.
current number of healthy pods # noqa: E501
:param current_healthy: The current_healthy of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and current_healthy is None: # noqa: E501
raise ValueError("Invalid value for `current_healthy`, must not be `None`") # noqa: E501
self._current_healthy = current_healthy
@property
def desired_healthy(self):
"""Gets the desired_healthy of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
minimum desired number of healthy pods # noqa: E501
:return: The desired_healthy of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:rtype: int
"""
return self._desired_healthy
@desired_healthy.setter
def desired_healthy(self, desired_healthy):
"""Sets the desired_healthy of this V1beta1PodDisruptionBudgetStatus.
minimum desired number of healthy pods # noqa: E501
:param desired_healthy: The desired_healthy of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and desired_healthy is None: # noqa: E501
raise ValueError("Invalid value for `desired_healthy`, must not be `None`") # noqa: E501
self._desired_healthy = desired_healthy
@property
def disrupted_pods(self):
"""Gets the disrupted_pods of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
DisruptedPods contains information about pods whose eviction was processed by the API server eviction subresource handler but has not yet been observed by the PodDisruptionBudget controller. A pod will be in this map from the time when the API server processed the eviction request to the time when the pod is seen by PDB controller as having been marked for deletion (or after a timeout). The key in the map is the name of the pod and the value is the time when the API server processed the eviction request. If the deletion didn't occur and a pod is still there it will be removed from the list automatically by PodDisruptionBudget controller after some time. If everything goes smooth this map should be empty for the most of the time. Large number of entries in the map may indicate problems with pod deletions. # noqa: E501
:return: The disrupted_pods of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:rtype: dict(str, datetime)
"""
return self._disrupted_pods
@disrupted_pods.setter
def disrupted_pods(self, disrupted_pods):
"""Sets the disrupted_pods of this V1beta1PodDisruptionBudgetStatus.
DisruptedPods contains information about pods whose eviction was processed by the API server eviction subresource handler but has not yet been observed by the PodDisruptionBudget controller. A pod will be in this map from the time when the API server processed the eviction request to the time when the pod is seen by PDB controller as having been marked for deletion (or after a timeout). The key in the map is the name of the pod and the value is the time when the API server processed the eviction request. If the deletion didn't occur and a pod is still there it will be removed from the list automatically by PodDisruptionBudget controller after some time. If everything goes smooth this map should be empty for the most of the time. Large number of entries in the map may indicate problems with pod deletions. # noqa: E501
:param disrupted_pods: The disrupted_pods of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:type: dict(str, datetime)
"""
self._disrupted_pods = disrupted_pods
@property
def disruptions_allowed(self):
"""Gets the disruptions_allowed of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
Number of pod disruptions that are currently allowed. # noqa: E501
:return: The disruptions_allowed of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:rtype: int
"""
return self._disruptions_allowed
@disruptions_allowed.setter
def disruptions_allowed(self, disruptions_allowed):
"""Sets the disruptions_allowed of this V1beta1PodDisruptionBudgetStatus.
Number of pod disruptions that are currently allowed. # noqa: E501
:param disruptions_allowed: The disruptions_allowed of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and disruptions_allowed is None: # noqa: E501
raise ValueError("Invalid value for `disruptions_allowed`, must not be `None`") # noqa: E501
self._disruptions_allowed = disruptions_allowed
@property
def expected_pods(self):
"""Gets the expected_pods of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
total number of pods counted by this disruption budget # noqa: E501
:return: The expected_pods of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:rtype: int
"""
return self._expected_pods
@expected_pods.setter
def expected_pods(self, expected_pods):
"""Sets the expected_pods of this V1beta1PodDisruptionBudgetStatus.
total number of pods counted by this disruption budget # noqa: E501
:param expected_pods: The expected_pods of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and expected_pods is None: # noqa: E501
raise ValueError("Invalid value for `expected_pods`, must not be `None`") # noqa: E501
self._expected_pods = expected_pods
@property
def observed_generation(self):
"""Gets the observed_generation of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
Most recent generation observed when updating this PDB status. DisruptionsAllowed and other status information is valid only if observedGeneration equals to PDB's object generation. # noqa: E501
:return: The observed_generation of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:rtype: int
"""
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
"""Sets the observed_generation of this V1beta1PodDisruptionBudgetStatus.
Most recent generation observed when updating this PDB status. DisruptionsAllowed and other status information is valid only if observedGeneration equals to PDB's object generation. # noqa: E501
:param observed_generation: The observed_generation of this V1beta1PodDisruptionBudgetStatus. # noqa: E501
:type: int
"""
self._observed_generation = observed_generation
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1PodDisruptionBudgetStatus):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1PodDisruptionBudgetStatus):
return True
return self.to_dict() != other.to_dict()
|
py | 7df96d6fc6288a81464c1187b952a934acf47724 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import setuptools
name = 'google-cloud-kms'
description = 'Cloud Key Management Service (KMS) API API client library'
version = '1.0.0'
release_status = 'Development Status :: 5 - Production/Stable'
dependencies = [
'google-api-core[grpc] >= 1.6.0, < 2.0.0dev',
'grpc-google-iam-v1 >= 0.11.4, < 0.12dev',
'enum34; python_version < "3.4"',
]
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, 'README.rst')
with io.open(readme_filename, encoding='utf-8') as readme_file:
readme = readme_file.read()
packages = [
package for package in setuptools.find_packages()
if package.startswith('google')
]
namespaces = ['google']
if 'google.cloud' in packages:
namespaces.append('google.cloud')
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author='Google LLC',
author_email='[email protected]',
license='Apache 2.0',
url='https://github.com/GoogleCloudPlatform/google-cloud-python',
classifiers=[
release_status,
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Operating System :: OS Independent',
'Topic :: Internet',
],
platforms='Posix; MacOS X; Windows',
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
include_package_data=True,
zip_safe=False,
)
|
py | 7df96d93a11ea5f10ff173ccc5678fd33f45faf4 | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2018, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pysnmp/license.html
#
import sys
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from pysnmp.carrier.twisted.base import AbstractTwistedTransport
from pysnmp.carrier import error
from pysnmp import debug
class DgramTwistedTransport(DatagramProtocol, AbstractTwistedTransport):
"""Base Twisted datagram Transport, to be used with TwistedDispatcher"""
# Twisted Datagram API
def datagramReceived(self, datagram, transportAddress):
if self._cbFun is None:
raise error.CarrierError('Unable to call cbFun')
else:
# Callback fun is called through callLater() in attempt
# to make Twisted timed calls work under high load.
reactor.callLater(0, self._cbFun, self, transportAddress, datagram)
def startProtocol(self):
debug.logger & debug.flagIO and debug.logger('startProtocol: invoked')
while self._writeQ:
outgoingMessage, transportAddress = self._writeQ.pop(0)
debug.logger & debug.flagIO and debug.logger('startProtocol: transportAddress %r outgoingMessage %s' % (transportAddress, debug.hexdump(outgoingMessage)))
try:
self.transport.write(outgoingMessage, transportAddress)
except Exception:
raise error.CarrierError('Twisted exception: %s' % (sys.exc_info()[1],))
def stopProtocol(self):
debug.logger & debug.flagIO and debug.logger('stopProtocol: invoked')
def sendMessage(self, outgoingMessage, transportAddress):
debug.logger & debug.flagIO and debug.logger('startProtocol: %s transportAddress %r outgoingMessage %s' % ((self.transport is None and "queuing" or "sending"), transportAddress, debug.hexdump(outgoingMessage)))
if self.transport is None:
self._writeQ.append((outgoingMessage, transportAddress))
else:
try:
self.transport.write(outgoingMessage, transportAddress)
except Exception:
raise error.CarrierError('Twisted exception: %s' % (sys.exc_info()[1],))
|
py | 7df96e648455c59ce9c6073ea15baaa21798ceb8 | """
:class:`NetSuiteClient`: client proxy class which uses the python library
Zeep to connect to a NetSuite account and make requests.
"""
import base64
import hashlib
import hmac
import logging
import os.path
import random
import time
from zeep import Client
from zeep.cache import SqliteCache
from zeep.transports import Transport
from zeep.exceptions import Fault
from zeep.exceptions import LookupError as ZeepLookupError
from .constants import *
from .exceptions import *
from .netsuite_types import *
class NetSuiteClient:
"""The Netsuite client class providing access to the Netsuite
SOAP/WSDL web service"""
WSDL_URL_TEMPLATE = 'https://{account}.suitetalk.api.netsuite.com/wsdl/v2019_1_0/netsuite.wsdl'
DATACENTER_URL_TEMPLATE = 'https://{account}.suitetalk.api.netsuite.com/services/NetSuitePort_2019_1'
_search_preferences = None
_passport = None
_account = None
# Used by TBA
_consumer_key = None
_consumer_secret = None
_token_key = None
_token_secret = None
_app_id = None
def __init__(self, account=None, caching=True, caching_timeout=2592000, caching_path=None, search_body_fields_only=True,
page_size: int = 100):
"""
Initialize the Zeep SOAP client, parse the xsd specifications
of Netsuite and store the complex types as attributes of this
instance.
:param str account_id: Account ID to connect to
:param bool caching: If caching = 'sqlite', setup Sqlite caching
:param int caching_timeout: Timeout in seconds for caching.
If None, defaults to 30 days
:param str caching_path: Sqlite base file path. Default to python library path.
"""
self.logger = logging.getLogger(self.__class__.__name__)
assert account, 'Invalid account'
assert '-' not in account, 'Account cannot have hyphens, it is likely an underscore'
self._account = account
self._wsdl_url = self.WSDL_URL_TEMPLATE.format(account=account.replace('_', '-'))
self._datacenter_url = self.DATACENTER_URL_TEMPLATE.format(account=account.replace('_', '-'))
if caching:
base_path = os.path.dirname(os.path.abspath(__file__)) if not caching_path else caching_path
path = os.path.join(base_path, 'cache.db')
timeout = caching_timeout
cache = SqliteCache(path=path, timeout=timeout)
transport = Transport(cache=cache, operation_timeout=60)
transport.session.headers.update({"Connection": "close"})
else:
transport = None
# Initialize the Zeep Client
self._client = Client(self._wsdl_url, transport=transport)
# default service points to wrong data center. need to create a new service proxy and replace the default one
self._service_proxy = self._client.create_service(
'{urn:platform_2019_1.webservices.netsuite.com}NetSuiteBinding', self._datacenter_url)
# Parse all complex types specified in :const:`~netsuitesdk.netsuite_types.COMPLEX_TYPES`
# and store them as attributes of this instance. Same for simple types.
self._namespaces = {}
self._init_complex_types()
self._init_simple_types()
self._app_info = None
self._is_authenticated = False
self.set_search_preferences(page_size=page_size, search_body_fields_only=search_body_fields_only)
def set_search_preferences(self, page_size: int = 5, search_body_fields_only: bool = True,
return_search_columns: bool = True):
self._search_preferences = self.SearchPreferences(
bodyFieldsOnly=search_body_fields_only,
pageSize=page_size,
returnSearchColumns=return_search_columns
)
def _init_complex_types(self):
self._complex_types = {}
for namespace, complex_types in COMPLEX_TYPES.items():
if not namespace in self._namespaces:
self._namespaces[namespace] = []
for type_name in complex_types:
try:
verbose_type_name = '{namespace}:{type_name}'.format(
namespace=namespace,
type_name=type_name
)
complex_type = self._client.get_type(verbose_type_name)
except ZeepLookupError:
self.logger.warning('LookupError: Did not find complex type {}'.format(type_name))
else:
setattr(self, type_name, complex_type)
self._complex_types[type_name] = complex_type
self._namespaces[namespace].append(complex_type)
def _init_simple_types(self):
self._simple_types = {}
for namespace, simple_types in SIMPLE_TYPES.items():
if not namespace in self._namespaces:
self._namespaces[namespace] = []
for type_name in simple_types:
try:
verbose_type_name = '{namespace}:{type_name}'.format(
namespace=namespace,
type_name=type_name
)
simple_type = self._client.get_type(verbose_type_name)
except ZeepLookupError:
self.logger.warning('LookupError: Did not find simple type {}'.format(type_name))
else:
setattr(self, type_name, simple_type)
self._simple_types[type_name] = simple_type
self._namespaces[namespace].append(simple_type)
def get_complex_type(self, type_name):
# if ':' in type_name:
# namespace, type_name = type_name.split(':')
# namespace_index = namespace[2:]
return self._complex_types[type_name]
def get_simple_type(self, type_name):
return self._simple_types[type_name]
def get_complex_type_attributes(self, complex_type):
if isinstance(complex_type, str):
complex_type = self.get_complex_type(complex_type)
try:
return [(attribute.name, attribute.type.name) for attribute in complex_type._attributes]
except AttributeError:
return []
def get_complex_type_elements(self, complex_type):
if isinstance(complex_type, str):
complex_type = self.get_complex_type(complex_type)
try:
return [(attr_name, element.type.name) for attr_name, element in complex_type.elements]
except AttributeError:
return []
def get_complex_type_info(self, complex_type):
if isinstance(complex_type, str):
complex_type = self.get_complex_type(complex_type)
label = complex_type
else:
if hasattr(complex_type, 'name'):
label = complex_type.name
else:
label = str(complex_type)
attributes = self.get_complex_type_attributes(complex_type)
elements = self.get_complex_type_elements(complex_type)
yield 'complexType {}:'.format(label)
if attributes:
yield 'Attributes:'
for name, type_name in attributes:
yield '\t{}: {}'.format(name, type_name)
else:
yield 'No attributes'
if elements:
yield 'Elements:'
for name, type_name in elements:
yield '\t{}: {}'.format(name, type_name)
else:
yield 'No elements'
def login(self, email, password, role, application_id):
"""
Authenticate and login user for a Netsuite session. The passport argument is
of type Passport(email, password, role and account) which holds the credentials
and can be created with NetSuiteClient.create_password.
:param int applicationId: All requests done in this session will be identified
with this application id.
:param Passport passport: holds the credentials to authenticate the user.
:return: the login response which contains the response status and user roles
:rtype: LoginResponse
:raises :class:`~netsuitesdk.exceptions.NetSuiteLoginError`: if login was not successful. Possible codes
are: InsufficientPermissionFault, InvalidAccountFault, InvalidSessionFault,
InvalidCredentialsFault and UnexpectedErrorFault
"""
role = self.RecordRef(internalId=role)
self._passport = self.Passport(email=email, password=password, role=role, account=self._account)
if self._is_authenticated:
self.logout()
try:
self._app_info = self.ApplicationInfo(applicationId=application_id)
response = self._service_proxy.login(
self._passport,
_soapheaders={'applicationInfo': self._app_info}
)
if response.status.isSuccess:
self._is_authenticated = True
return response
else:
statusDetail = response.status['statusDetail'][0]
exc = self._request_error('login',
detail=statusDetail,
error_cls=NetSuiteLoginError)
raise exc
except Fault as fault:
exc = NetSuiteLoginError(str(fault), code=fault.code)
raise exc from None
def _generate_token_passport(self):
def compute_nonce(length=20):
"""pseudo-random generated numeric string"""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
nonce = compute_nonce(length=20)
timestamp = int(time.time())
key = '{}&{}'.format(self._consumer_secret, self._token_secret)
base_string = '&'.join([self._account, self._consumer_key, self._token_key, nonce, str(timestamp)])
key_bytes = key.encode(encoding='ascii')
message_bytes = base_string.encode(encoding='ascii')
# compute the signature
if self._signature_algorithm == 'HMAC-SHA256':
# hash
hashed_value = hmac.new(key_bytes, msg=message_bytes, digestmod=hashlib.sha256)
elif self._signature_algorithm == 'HMAC-SHA1':
hashed_value = hmac.new(key_bytes, msg=message_bytes, digestmod=hashlib.sha1)
else:
raise NetSuiteError("signature_algorithm needs to be one of 'HMAC-SHA256', 'HMAC-SHA1'")
dig = hashed_value.digest()
# convert dig (a byte sequence) to a base 64 string
value = base64.b64encode(dig).decode()
signature = self.TokenPassportSignature(value, algorithm=self._signature_algorithm)
return self.TokenPassport(account=self._account, consumerKey=self._consumer_key, token=self._token_key,
nonce=nonce, timestamp=timestamp, signature=signature)
def connect_tba(self, consumer_key, consumer_secret, token_key, token_secret, signature_algorithm='HMAC-SHA256'):
"""
Create a TokenPassport object holding credentials for Token based
authentication which will be passed to NetSuiteClient.login
:param str account: the NetSuite account ID
:param str consumer_key: the consumer key for the integration record
:param str consumer_secret: the consumer secret
:param str token_key: a string identifier of a token representing a
unique combination of a user, a role and an integration record
:param str token_secret: the token secret
:param str signature_algorithm: algorithm to compute the signature value (a hashed value),
choices are 'HMAC-SHA256' or 'HMAC-SHA1'
"""
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._token_key = token_key
self._token_secret = token_secret
self._signature_algorithm = signature_algorithm
@property
def logged_in(self):
return self._is_authenticated
def logout(self):
if not self._is_authenticated:
return
response = self._service_proxy.logout()
self._is_authenticated = False
self._consumer_key = None
self._consumer_secret = None
self._token_key = None
self._token_secret = None
return response.status
def _request_error(self, service_name, detail, error_cls=None):
if error_cls is None:
error_cls = NetSuiteRequestError
exc = error_cls(
"An error occured in a {service_name} request: {msg}".format(
service_name=service_name,
msg=detail['message']),
code=detail['code']
)
# self.logger.error(str(exc))
return exc
def _build_soap_headers(self, include_search_preferences: bool = False):
"""
Generate soap headers dictionary to send with a request
:param Passport passport: holds the authentication credentials
:param TokenPassport tokenPassport: holds the token based authentication details
:param ApplicationInfo applicationInfo: contains the application Id
:return: the dictionary representing the headers
:rtype: dict
:raises :class:`~netsuitesdk.exceptions.NetSuiteError`: if user is neither logged in nor a passport or tokenPassport was passed
"""
soapheaders = {}
if self._is_authenticated:
# User is already logged in, so there is no
# need to pass authentication details in the header
pass
elif self._consumer_key is not None:
soapheaders['tokenPassport'] = self._generate_token_passport()
elif self._passport is not None:
soapheaders['passport'] = self._passport
else:
raise NetSuiteError('Must either login first or pass passport or tokenPassport to request header.')
if include_search_preferences:
soapheaders['searchPreferences'] = self._search_preferences
return soapheaders
def request(self, name, *args, **kwargs):
"""
Make a NetSuite web service request
:param str name: the name of the request service ('get', 'search', ...)
:return: the request response object
:rtype: the exact type depends on the request
"""
method = getattr(self._service_proxy, name)
# call the service:
include_search_preferences = (name == 'search')
response = method(*args,
_soapheaders=self._build_soap_headers(include_search_preferences=include_search_preferences)
, **kwargs)
return response
def get(self, recordType, internalId=None, externalId=None):
"""
Make a get request to retrieve an object of type recordType
specified by either internalId or externalId
:param str recordType: the complex type (e.g. 'vendor')
:param int internalId: id specifying the record to be retrieved
:param str externalId: str specifying the record to be retrieved
:return: the matching record in case of success
:rtype: Record
:raises ValueError: if neither internalId nor externalId was passed
"""
recordType = recordType[0].lower() + recordType[1:]
if internalId is not None:
record_ref = self.RecordRef(type=recordType, internalId=internalId)
elif externalId is not None:
record_ref = self.RecordRef(type=recordType, externalId=externalId)
else:
raise ValueError('Either internalId or externalId is necessary to make a get request.')
response = self.request('get', baseRef=record_ref)
response = response.body.readResponse
status = response.status
if status.isSuccess:
record = response['record']
return record
else:
exc = self._request_error('get', detail=status['statusDetail'][0])
raise exc
def getAll(self, recordType):
"""
Make a getAll request to retrieve all objects of type recordType.
All NetSuite types available for a search
are listed under :const:`constants.GET_ALL_RECORD_TYPES`.
:param str recordType: the complex type (e.g. 'vendor')
:param int internalId: id specifying the record to be retrieved
:param str externalId: str specifying the record to be retrieved
:return: the matching record in case of success
:rtype: Record
"""
recordType = recordType[0].lower() + recordType[1:]
record = self.GetAllRecord(recordType=recordType)
response = self.request('getAll', record=record)
response = response.body.getAllResult
status = response.status
if status.isSuccess:
records = response['recordList']['record']
return records
else:
exc = self._request_error('getAll', detail=status['statusDetail'][0])
raise exc
def search_factory(self, type_name, **kwargs):
_type_name = type_name[0].lower() + type_name[1:]
if not _type_name in SEARCH_RECORD_TYPES:
raise NetSuiteTypeError('{} is not a searchable NetSuite type!'.format(type_name))
search_cls_name = '{}Search'.format(type_name)
search_cls = self.get_complex_type(search_cls_name)
search_record = search_cls(**kwargs)
return search_record
def basic_search_factory(self, type_name, **kwargs):
_type_name = type_name[0].lower() + type_name[1:]
if not _type_name in SEARCH_RECORD_TYPES:
raise NetSuiteTypeError('{} is not a searchable NetSuite type!'.format(type_name))
basic_search_cls_name = '{}SearchBasic'.format(type_name)
basic_search_cls = self.get_complex_type(basic_search_cls_name)
basic_search = basic_search_cls()
for key, value in kwargs.items():
setattr(basic_search, key, value)
return basic_search
def search(self, searchRecord):
"""
Make a search request to retrieve an object of type recordType
specified by internalId. All NetSuite types available for a search
are listed under :const:`constants.SEARCH_RECORD_TYPES`.
:param Record searchRecord: data object holding all parameters for the search.
The utility function `search_factory` can be used to create one.
:return: result records and meta data about search result
:rtype: SearchResult(type):
int totalRecords: total number of records
int pageSize: number of records per page
int totalPages: number of pages
int pageIndex: index of actual returned result page
str searchId: identifier for the search
list records: the actual records found
"""
response = self.request('search',
searchRecord=searchRecord)
result = response.body.searchResult
status = result.status
success = status.isSuccess
if success:
if hasattr(result.recordList, 'record'):
result.records = result.recordList.record
return result
else:
# Did not find anything
result.records = None
return result
else:
exc = self._request_error('search', detail=status['statusDetail'][0])
raise exc
def searchMoreWithId(self, searchId, pageIndex):
response = self.request('searchMoreWithId',
searchId=searchId,
pageIndex=pageIndex)
result = response.body.searchResult
status = result.status
success = status.isSuccess
if success:
result.records = result.recordList.record
return result
else:
exc = self._request_error('searchMoreWithId', detail=status['statusDetail'][0])
raise exc
def upsert(self, record):
"""
Add an object of type recordType with given externalId..
If a record of specified type with matching externalId already
exists, it is updated.
Usage example:
customer = self.Customer()
customer.externalId = 'customer_id'
customer.companyName = 'Test Inc.'
customer.email = '[email protected]'
self.upsert(record=customer)
:param str recordType: the complex type (e.g. either 'Customer' or 'vendors')
:param str externalId: str specifying the record to be retrieved
:return: a reference to the newly created or updated record (in case of success)
:rtype: RecordRef
"""
response = self.request('upsert', record=record)
response = response.body.writeResponse
status = response.status
if status.isSuccess:
record_ref = response['baseRef']
self.logger.debug(
'Successfully updated record of internalId: {internalId}, externalId: {externalId}, response: {recordRef}'.format(
internalId=record_ref['internalId'], externalId=record_ref['externalId'], recordRef=record_ref))
return record_ref
else:
exc = self._request_error('upsert', detail=status['statusDetail'][0])
raise exc
def basic_stringfield_search(self, type_name, attribute, value, operator=None):
"""
Searches for an object of type `type_name` whose name contains `value`
:param str type_name: the name of the NetSuite type to be searched in
:param str attribute: the attribute of the type to be used for the search
:param str value: the value to be used for the search
:param str operator: mode used to search for value, possible:
'is', 'contains', 'doesNotContain',
'doesNotStartWith', 'empty', 'hasKeywords',
'isNot', 'notEmpty', 'startsWith'
See for example: http://www.netsuite.com/help/helpcenter/en_US/srbrowser/Browser2017_2/schema/search/locationsearchbasic.html?mode=package
In general, one can find the possible search attributes for a basic search
in the type {type_name}SearchBasic
"""
search_cls_name = '{type_name}SearchBasic'.format(type_name=type_name)
search_cls = getattr(self, search_cls_name)
if not operator:
operator = 'is'
string_field = self.SearchStringField(
searchValue=value,
operator=operator)
basic_search = search_cls()
setattr(basic_search, attribute, string_field)
result = self.search(basic_search)
if result.records:
return result.records
# def upsertList(self, records):
# """
# Add objects of type recordType with given externalId..
# If a record of specified type with matching externalId already
# exists, it is updated.
# Usage example:
# customer1 = self.Customer(externalId='customer', email='[email protected]')
# customer2 = self.Customer(externalId='another_customer', email='[email protected]')
# self.upsertList(records=[customer1, customer2])
# :param list[CompoundValue] records: the records to be created or updated
# :return: a reference to the newly created or updated records
# :rtype: list[CompoundValue]
# """
# response = self.request('upsertList', record=records)
# responses = response.body.writeResponse
# record_refs = []
# for response in responses:
# status = response.status
# if status.isSuccess:
# record_ref = response['baseRef']
# self.logger.debug('Successfully updated record of type {type}, internalId: {internalId}, externalId: {externalId}'.format(
# type=record_ref['type'], internalId=record_ref['internalId'], externalId=record_ref['externalId']))
# record_refs.append(record_ref)
# else:
# exc = self._request_error('upsertList', detail=status['statusDetail'][0])
# has_failures = True
# raise exc
# return record_refs
|
py | 7df96f4518a830d8e42b1a63ce9bd4e3bd193d3e | # read version from installed package
from importlib.metadata import version
__version__ = version("pywordcounts") |
py | 7df96fa042b68707a4fb55bb329688ebb0fcd63a | import datetime
import os
import re
import unittest
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
try:
import pytz
except ImportError:
pytz = None
from django.contrib import admin
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor,
AdminOrderedAdminMethod,
AdminOrderedCallable,
AdminOrderedField,
AdminOrderedModelMethod,
Album,
Answer,
Answer2,
Article,
BarAccount,
Book,
Bookmark,
Box,
Category,
Chapter,
ChapterXtra1,
ChapterXtra2,
Character,
Child,
Choice,
City,
Collector,
Color,
ComplexSortedPerson,
CoverLetter,
CustomArticle,
CyclicOne,
CyclicTwo,
DooHickey,
Employee,
EmptyModel,
Fabric,
FancyDoodad,
FieldOverridePost,
FilteredManager,
FooAccount,
FoodDelivery,
FunkyTag,
Gallery,
Grommet,
Inquisition,
Language,
Link,
MainPrepopulated,
Media,
ModelWithStringPrimaryKey,
OtherStory,
Paper,
Parent,
ParentWithDependentChildren,
ParentWithUUIDPK,
Person,
Persona,
Picture,
Pizza,
Plot,
PlotDetails,
PluggableSearchPerson,
Podcast,
Post,
PrePopulatedPost,
Promo,
Question,
ReadablePizza,
ReadOnlyPizza,
ReadOnlyRelatedField,
Recommendation,
Recommender,
RelatedPrepopulated,
RelatedWithUUIDPKModel,
Report,
Restaurant,
RowLevelChangePermissionModel,
SecretHideout,
Section,
ShortMessage,
Simple,
Song,
State,
Story,
SuperSecretHideout,
SuperVillain,
Telegram,
TitleTranslation,
Topping,
UnchangeableObject,
UndeletableObject,
UnorderedObject,
UserProxy,
Villain,
Vodcast,
Whatsit,
Widget,
Worker,
WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
def make_aware_datetimes(dt, iana_key):
"""Makes one aware datetime for each supported time zone provider."""
yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key))
if pytz is not None:
yield pytz.timezone(iana_key).localize(dt, is_dst=None)
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context["adminform"]:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field["name"] == field_name:
return field
@override_settings(ROOT_URLCONF="admin_views.urls", USE_I18N=True, LANGUAGE_CODE="en")
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 1",
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 2",
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.color1 = Color.objects.create(value="Red", warm=True)
cls.color2 = Color.objects.create(value="Orange", warm=True)
cls.color3 = Color.objects.create(value="Blue", warm=False)
cls.color4 = Color.objects.create(value="Green", warm=False)
cls.fab1 = Fabric.objects.create(surface="x")
cls.fab2 = Fabric.objects.create(surface="y")
cls.fab3 = Fabric.objects.create(surface="plain")
cls.b1 = Book.objects.create(name="Book 1")
cls.b2 = Book.objects.create(name="Book 2")
cls.pro1 = Promo.objects.create(name="Promo 1", book=cls.b1)
cls.pro1 = Promo.objects.create(name="Promo 2", book=cls.b2)
cls.chap1 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b1
)
cls.chap3 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b2
)
cls.chap4 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b2
)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra="ChapterXtra1 1")
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra="ChapterXtra1 2")
Actor.objects.create(name="Palin", age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver รฆรธรฅ skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or "")
+ "\nResponse:\n"
+ response.content.decode(response.charset),
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse("admin:admin_views_article_add")
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(
reverse("admin:admin_views_section_add"), {"name": "My Section"}
)
self.assertContains(
response,
'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response",
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(quote("abc/<b>"),)),
follow=True,
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["section with ID โabc/<b>โ doesnโt exist. Perhaps it was deleted?"],
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)).replace(
"change/", ""
)
)
self.assertRedirects(
response, reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_supervillain_change", args=("abc",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["super villain with ID โabcโ doesnโt exist. Perhaps it was deleted?"],
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_section_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""HTTP response from a popup is properly escaped."""
post_data = {
IS_POPUP_VAR: "1",
"title": "title with a new\nline",
"content": "some content",
"date_0": "2010-09-10",
"date_1": "14:55:39",
}
response = self.client.post(reverse("admin:admin_views_article_add"), post_data)
self.assertContains(response, "title with a new\\nline")
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse("admin:admin_views_section_change", args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest("id").article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
# callables display the callable name.
self.assertContains(response, "column-callable_year")
self.assertContains(response, "field-callable_year")
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, "column-lambda8")
self.assertContains(response, "field-lambda8")
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 2}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_property(self):
"""
Sort on a list_display field that is a property (column 10 is
a property in Article model).
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 10}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on property are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on property are out of order.",
)
def test_change_list_sorting_callable_query_expression(self):
"""Query expressions may be used for admin_order_field."""
tests = [
("order_by_expression", 9),
("order_by_f_expression", 12),
("order_by_orderby_expression", 13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_callable_query_expression_reverse(self):
tests = [
("order_by_expression", -9),
("order_by_f_expression", -12),
("order_by_orderby_expression", -13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-3"}
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on Model method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on Model method are out of order.",
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "4"}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
td = '<td class="field-model_property_year">%s</td>'
td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "6"}
)
self.assertContentBefore(
response,
td_2009,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2000,
"Results of sorting on ModelAdmin method are out of order.",
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-6"}
)
self.assertContentBefore(
response,
td_2000,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2009,
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
# Sort by name, gender
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "1.2"}
)
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "-2.1"}
)
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
response = self.client.get(reverse("admin:admin_views_person_changelist"), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso="ur", name="Urdu")
l2 = Language.objects.create(iso="ar", name="Arabic")
link1 = reverse("admin:admin_views_language_change", args=(quote(l1.pk),))
link2 = reverse("admin:admin_views_language_change", args=(quote(l2.pk),))
response = self.client.get(reverse("admin:admin_views_language_changelist"), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(
reverse("admin:admin_views_language_changelist"), {"o": "-1"}
)
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_podcast_change", args=(p2.pk,))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(quote(p1.pk),))
link2 = reverse("admin:admin_views_podcast_change", args=(quote(p2.pk),))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse("admin:admin_views_complexsortedperson_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_complexsortedperson_change", args=(p2.pk,))
response = self.client.get(
reverse("admin:admin_views_complexsortedperson_changelist"), {}
)
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, "Name")
self.assertContains(response, "Colored name")
# Check order
self.assertContentBefore(response, "Name", "Colored name")
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [
(AdminOrderedField, "adminorderedfield"),
(AdminOrderedModelMethod, "adminorderedmodelmethod"),
(AdminOrderedAdminMethod, "adminorderedadminmethod"),
(AdminOrderedCallable, "adminorderedcallable"),
]
for model, url in models:
model.objects.create(stuff="The Last Item", order=3)
model.objects.create(stuff="The First Item", order=1)
model.objects.create(stuff="The Middle Item", order=2)
response = self.client.get(
reverse("admin:admin_views_%s_changelist" % url), {}
)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(
response.context["cl"].get_ordering_field_columns(), {2: "asc"}
)
# Check order of records.
self.assertContentBefore(response, "The First Item", "The Middle Item")
self.assertContentBefore(response, "The Middle Item", "The Last Item")
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name="Karnataka")
City.objects.create(state=state, name="Bangalore")
response = self.client.get(reverse("admin:admin_views_city_changelist"), {})
response.context["cl"].list_display = ["id", "name", "state"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["id", "name", "state_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name="Foo")
Vodcast.objects.create(media=media)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"), {})
response.context["cl"].list_display = ["media"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["media_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse("admin:admin_views_thing_changelist"))
self.assertContains(
response,
'<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view",
)
self.assertNotContains(
response,
'<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to",
)
def test_relation_spanning_filters(self):
changelist_url = reverse("admin:admin_views_chapterxtra1_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
"chap__id__exact": {
"values": [c.id for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.id == value,
},
"chap__title": {
"values": [c.title for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.title == value,
},
"chap__book__id__exact": {
"values": [b.id for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.id == value,
},
"chap__book__name": {
"values": [b.name for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.name == value,
},
"chap__book__promo__id__exact": {
"values": [p.id for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
id=value
).exists(),
},
"chap__book__promo__name": {
"values": [p.name for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
name=value
).exists(),
},
# A forward relation (book) after a reverse relation (promo).
"guest_author__promo__book__id__exact": {
"values": [p.id for p in Book.objects.all()],
"test": lambda obj, value: obj.guest_author.promo_set.filter(
book=value
).exists(),
},
}
for filter_path, params in filters.items():
for value in params["values"]:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get(
"%s?%s" % (changelist_url, query_string)
)
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context["cl"].queryset.all():
self.assertTrue(params["test"](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse("admin:admin_views_thing_changelist")
response = self.client.get(changelist_url, {"notarealfield": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {"notarealfield__whatever": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
response = self.client.get(
changelist_url, {"color__id__exact": "StringNotInteger!"}
)
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {"pub_date__gte": "foo"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(
title="I Could Go Anywhere",
content="Versatile",
date=datetime.datetime.now(),
)
changelist_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, "4 articles")
response = self.client.get(changelist_url, {"section__isnull": "false"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "0"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "true"})
self.assertContains(response, "1 article")
response = self.client.get(changelist_url, {"section__isnull": "1"})
self.assertContains(response, "1 article")
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(response, '<a href="%s">' % reverse("admin:logout"))
self.assertContains(
response, '<a href="%s">' % reverse("admin:password_change")
)
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse("admin:admin_views_fabric_change", args=(self.fab1.pk,))
link2 = reverse("admin:admin_views_fabric_change", args=(self.fab2.pk,))
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(
response,
'<a href="%s">Horizontal</a>' % link1,
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="%s">Vertical</a>' % link2,
msg_prefix=fail_msg,
html=True,
)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response,
'<a href="?surface__exact=x">Horizontal</a>',
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="?surface__exact=y">Vertical</a>',
msg_prefix=fail_msg,
html=True,
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse("admin:admin_views_post_changelist"))
self.assertContains(response, "icon-unknown.svg")
def test_display_decorator_with_boolean_and_empty_value(self):
msg = (
"The boolean and empty_value arguments to the @display decorator "
"are mutually exclusive."
)
with self.assertRaisesMessage(ValueError, msg):
class BookAdmin(admin.ModelAdmin):
@admin.display(boolean=True, empty_value="(Missing)")
def is_published(self, obj):
return obj.publish_date is not None
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("en-us"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "Choisir une heure")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertContains(response, "Choisir une heure")
def test_jsi18n_with_context(self):
response = self.client.get(reverse("admin-extra-context:jsi18n"))
self.assertEqual(response.status_code, 200)
def test_jsi18n_format_fallback(self):
"""
The JavaScript i18n view doesn't return localized date/time formats
when the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="ru"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "%d.%m.%Y %H:%M:%S")
self.assertContains(response, "%Y-%m-%d %H:%M:%S")
def test_disallowed_filtering(self):
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
"%s?owner__email__startswith=fuzzy"
% reverse("admin:admin_views_album_changelist")
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get(
"%s?color__value__startswith=red"
% reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
response = self.client.get(
"%s?color__value=red" % reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to allow them in list_filter or date_hierarchy.
response = self.client.get(
"%s?age__gt=30" % reverse("admin:admin_views_person_changelist")
)
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(
name="Anonymous", gender=1, age=22, alive=True, code="123"
)
e2 = Employee.objects.create(
name="Visitor", gender=2, age=19, alive=True, code="124"
)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertContains(response, "employee__person_ptr__exact")
response = self.client.get(
"%s?employee__person_ptr__exact=%d"
% (reverse("admin:admin_views_workhour_changelist"), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse("admin:admin_views_section_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "missing_field"})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(
reverse("admin:admin_views_section_changelist"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
# Primary key should always be allowed, even if the referenced model
# isn't registered.
response = self.client.get(
reverse("admin:admin_views_notreferenced_changelist"), {TO_FIELD_VAR: "id"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced by another model though a m2m should be
# allowed.
response = self.client.get(
reverse("admin:admin_views_recipe_changelist"), {TO_FIELD_VAR: "rname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced through a reverse m2m relationship
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_ingredient_changelist"), {TO_FIELD_VAR: "iname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is not referred by any other model directly
# registered to this admin site but registered through inheritance
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyparent_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is only referred to by a inline of a
# registered model should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyinline_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse("admin:admin_views_referencedbygenrel_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "object_id"})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(
reverse("admin:admin_views_section_add"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse("admin:admin_views_section_change", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
url = reverse("admin:admin_views_section_delete", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey
# pointing to this model.
url = "%s?leader__name=Palin&leader__age=27" % reverse(
"admin:admin_views_inquisition_changelist"
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get(
"%s?%s" % (reverse("admin:admin_views_actor_changelist"), IS_POPUP_VAR)
)
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username="super")
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse("admin:index"))
self.assertNotContains(
response,
reverse("admin:password_change"),
msg_prefix=(
'The "change password" link should not be displayed if a user does not '
"have a usable password."
),
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name="foo")
response = self.client.get(
reverse("admin:admin_views_undeletableobject_change", args=(instance.pk,))
)
self.assertNotContains(response, "deletelink")
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name="Cheese")
cheese = Topping.objects.create(name="cheese")
post_data = {"name": pizza.name, "toppings": [cheese.pk]}
response = self.client.post(
reverse("admin:admin_views_readablepizza_change", args=(pizza.pk,)),
post_data,
)
self.assertRedirects(
response, reverse("admin:admin_views_readablepizza_changelist")
)
pizza_ctype = ContentType.objects.get_for_model(
ReadablePizza, for_concrete_model=False
)
log = LogEntry.objects.filter(
content_type=pizza_ctype, object_id=pizza.pk
).first()
self.assertEqual(log.get_change_message(), "Changed Toppings.")
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse("admin:admin_views_simple_changelist"))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(
reverse("admin:admin_views_unchangeableobject_changelist")
)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(
response, '<th class="field-__str__">%s</th>' % o, html=True
)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = "/test_admin/admin/admin_views/"
confirm_good_url = reverse(
"admin:app_list", kwargs={"app_label": "admin_views"}
)
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", kwargs={"app_label": "this_should_fail"})
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", args=("admin_views2",))
def test_resolve_admin_views(self):
index_match = resolve("/test_admin/admin4/")
list_match = resolve("/test_admin/admin4/auth/user/")
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(
list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin
)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse("admin:index")
response = self.client.get(url)
self.assertEqual(response.context["site_url"], "/my-site-url/")
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
def test_date_hierarchy_empty_queryset(self):
self.assertIs(Question.objects.exists(), False)
response = self.client.get(reverse("admin:admin_views_answer2_changelist"))
self.assertEqual(response.status_code, 200)
@override_settings(TIME_ZONE="America/Sao_Paulo", USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
for date in make_aware_datetimes(
datetime.datetime(2016, 10, 16, 15), "America/Sao_Paulo"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=16")
self.assertContains(response, "question__expires__month=10")
self.assertContains(response, "question__expires__year=2016")
@override_settings(TIME_ZONE="America/Los_Angeles", USE_TZ=True)
def test_date_hierarchy_local_date_differ_from_utc(self):
# This datetime is 2017-01-01 in UTC.
for date in make_aware_datetimes(
datetime.datetime(2016, 12, 31, 16), "America/Los_Angeles"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=31")
self.assertContains(response, "question__expires__month=12")
self.assertContains(response, "question__expires__year=2016")
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ("date", "callable_year")
expected_not_sortable_fields = (
"content",
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
)
response = self.client.get(reverse("admin6:admin_views_article_changelist"))
for field_name in expected_sortable_fields:
self.assertContains(
response, '<th scope="col" class="sortable column-%s">' % field_name
)
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse("admin6:admin_views_actor_changelist"))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ("title", "book")
response = self.client.get(reverse("admin6:admin_views_chapter_changelist"))
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse("admin6:admin_views_color_changelist"))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_app_index_context(self):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
self.assertEqual(response.context["title"], "Admin_Views administration")
self.assertEqual(response.context["app_label"], "admin_views")
def test_change_view_subtitle_per_object(self):
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_view_subtitle_per_object(self):
viewuser = User.objects.create_user(
username="viewuser",
password="secret",
is_staff=True,
)
viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", Article._meta)),
)
self.client.force_login(viewuser)
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_formset_kwargs_can_be_overridden(self):
response = self.client.get(reverse("admin:admin_views_city_add"))
self.assertContains(response, "overridden_name")
def test_render_views_no_subtitle(self):
tests = [
reverse("admin:index"),
reverse("admin:password_change"),
reverse("admin:app_list", args=("admin_views",)),
reverse("admin:admin_views_article_delete", args=(self.a1.pk,)),
reverse("admin:admin_views_article_history", args=(self.a1.pk,)),
# Login must be after logout.
reverse("admin:logout"),
reverse("admin:login"),
]
for url in tests:
with self.subTest(url=url):
with self.assertNoLogs("django.template", "DEBUG"):
self.client.get(url)
def test_render_delete_selected_confirmation_no_subtitle(self):
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": self.a1.pk,
}
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:admin_views_article_changelist"), post_data)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
# Put this app's and the shared tests templates dirs in DIRS to
# take precedence over the admin's templates dir.
"DIRS": [
os.path.join(os.path.dirname(__file__), "templates"),
os.path.join(os.path.dirname(os.path.dirname(__file__)), "templates"),
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
]
)
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(
reverse("admin:admin_views_customarticle_changelist")
)
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, "custom_admin/change_list.html")
# Test custom add form template
response = self.client.get(reverse("admin:admin_views_customarticle_add"))
self.assertTemplateUsed(response, "custom_admin/add_form.html")
# Add an article so we can test delete, change, and history views
post = self.client.post(
reverse("admin:admin_views_customarticle_add"),
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
},
)
self.assertRedirects(
post, reverse("admin:admin_views_customarticle_changelist")
)
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(
reverse("admin:admin_views_customarticle_change", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/change_form.html")
response = self.client.get(
reverse("admin:admin_views_customarticle_delete", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/delete_confirmation.html")
response = self.client.post(
reverse("admin:admin_views_customarticle_changelist"),
data={
"index": 0,
"action": ["delete_selected"],
"_selected_action": ["1"],
},
)
self.assertTemplateUsed(
response, "custom_admin/delete_selected_confirmation.html"
)
response = self.client.get(
reverse("admin:admin_views_customarticle_history", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/object_history.html")
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(
reverse("admin:admin_views_customarticle_add") + "?%s=1" % IS_POPUP_VAR,
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
IS_POPUP_VAR: "1",
},
)
self.assertEqual(response.template_name, "custom_admin/popup_response.html")
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_change_password_template(self):
user = User.objects.get(username="super")
response = self.client.get(
reverse("admin:auth_user_password_change", args=(user.id,))
)
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, "bodyclass_consistency_check ")
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(
response, '<input type="text" name="username" value="super" class="hidden">'
)
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse("admin:index"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse("admin:login"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse("admin:auth_group_delete", args=(group.id,)))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": group.id,
}
response = self.client.post(reverse("admin:auth_group_changelist"), post_data)
self.assertEqual(response.context["site_header"], "Django administration")
self.assertContains(response, "bodyclass_consistency_check ")
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse("admin:admin_views_color2_changelist"))
self.assertTemplateUsed(response, "custom_filter_template.html")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse(
"admin:admin_views_section_change",
args=(self.s1.pk,),
current_app=self.current_app,
)
)
self.assertIn(
"form_url", response.context, msg="form_url not present in response.context"
)
self.assertEqual(response.context["form_url"], "pony")
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse("admin:admin_views_restaurant_add", current_app=self.current_app),
{"name": "test_value"},
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertNotContains(response, "vendor/jquery/jquery.js")
self.assertContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
with override_settings(DEBUG=True):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "vendor/jquery/jquery.js")
self.assertNotContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
@override_settings(ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)), post_data
)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest("id")
self.assertRedirects(
response, reverse("admin:admin_views_person_change", args=(new_person.pk,))
)
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
url = reverse(
"admin:admin_views_person_change",
args=(self.per1.pk,),
current_app=site2.name,
)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(
response,
reverse("admin:admin_views_person_changelist", current_app=site2.name),
)
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)),
{
"_saveasnew": "",
"gender": "invalid",
"_addanother": "fail",
},
)
self.assertContains(response, "Please correct the errors below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "Child",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "_invalid",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "_invalid",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "Father",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(
reverse("admin2:login"),
{
REDIRECT_FIELD_NAME: reverse("admin2:index"),
"username": "customform",
"password": "secret",
},
follow=True,
)
self.assertIsInstance(login, TemplateResponse)
self.assertContains(login, "custom form error")
self.assertContains(login, "path/to/media.css")
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/login.html")
self.assertContains(response, "Hello from a custom login template")
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse("admin2:logout"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/logout.html")
self.assertContains(response, "Hello from a custom logout template")
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse("admin2:index"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/index.html")
self.assertContains(response, "Hello from a custom index template *bar*")
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/app_index.html")
self.assertContains(response, "Hello from a custom app_index template")
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(
response, "Hello from a custom password change form template"
)
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(response, "eggs")
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse("admin2:password_change_done"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_done.html")
self.assertContains(
response, "Hello from a custom password change done template"
)
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:my_view"))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username="super")
response = self.client.get(
reverse("admin4:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF="admin_views.urls",
# Test with the admin's documented list of required context processors.
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.nostaffuser = User.objects.create_user(
username="nostaff", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", opts))
)
# User who can add Articles
cls.adduser.user_permissions.add(
get_perm(Article, get_permission_codename("add", opts))
)
# User who can change Articles
cls.changeuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
cls.nostaffuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
# User who can delete Articles
cls.deleteuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(Section, get_permission_codename("delete", Section._meta))
)
# login POST dicts
cls.index_url = reverse("admin:index")
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "super",
"password": "secret",
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "secret",
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "notsecret",
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "adduser",
"password": "secret",
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "changeuser",
"password": "secret",
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "deleteuser",
"password": "secret",
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "nostaff",
"password": "secret",
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "joepublic",
"password": "secret",
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "viewuser",
"password": "secret",
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"password": "secret",
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse("admin:logout"))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username="jondoe", password="secret", email="[email protected]")
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse("admin:logout"))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse("admin:logout"))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse("admin:logout"))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse("admin:logout"))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(login, "form", "username", ["This field is required."])
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse("admin:login"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.joepublic_login
)
self.assertContains(login, "permission denied")
# User with permissions should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.nostaff_login
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.get(reverse("has_permission_admin:logout"))
# Staff should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"),
{
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "deleteuser",
"password": "secret",
},
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.get(reverse("has_permission_admin:logout"))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = "the-answer=42"
redirect_url = "%s?%s" % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
"%s?%s" % (reverse("admin:login"), urlencode(new_next)), post_data
)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse("admin:logout"))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = "You are authenticated as {}"
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertNotContains(response, hint_template.format(""), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertContains(
response, hint_template.format(self.nostaffuser.username), status_code=200
)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
"title": "Dรธm ikke",
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse("admin:logout"))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.context["title"], "Add article")
self.assertContains(response, "<title>Add article | Django site admin</title>")
self.assertContains(
response, '<input type="submit" value="Save and view" name="_continue">'
)
post = self.client.post(
reverse("admin:admin_views_article_add"), add_dict, follow=False
)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest("pk")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(article.pk,))
)
self.assertContains(
response,
'<li class="success">The article โDรธm ikkeโ was added successfully.</li>',
)
article.delete()
self.client.get(reverse("admin:logout"))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
change_list_link = '› <a href="%s">Articles</a>' % reverse(
"admin:admin_views_article_changelist"
)
self.assertNotContains(
addpage,
change_list_link,
msg_prefix=(
"User restricted to add permission is given link to change list view "
"in breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "Greetings from a created object")
self.client.get(reverse("admin:logout"))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Dรธm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
self.assertContains(
addpage,
change_list_link,
msg_prefix=(
"Unrestricted user is not given link to change list view in "
"breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, reverse("admin:admin_views_article_changelist"))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse("admin:logout"))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch("django.contrib.admin.options.InlineModelAdmin.has_change_permission")
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("add", Section._meta))
)
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
"name": "New obj",
"article_set-TOTAL_FORMS": 0,
"article_set-INITIAL_FORMS": 0,
}
response = self.client.post(reverse("admin:admin_views_section_add"), data)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(Section.objects.latest("id").name, data["name"])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual(
[obj for (request, obj), _ in has_change_permission.call_args_list],
[None, None],
)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
"title": "Ikke fordรธmt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
article_changelist_url = reverse("admin:admin_views_article_changelist")
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse("admin:logout"))
# view user can view articles but not make changes.
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertContains(
response,
"<title>Select article to view | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to view</h1>")
self.assertEqual(response.context["title"], "Select article to view")
response = self.client.get(article_change_url)
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<label>Extra form field:</label>")
self.assertContains(
response,
'<a href="/test_admin/admin/admin_views/article/" class="closelink">Close'
"</a>",
)
self.assertEqual(response.context["title"], "View article")
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>Middle content</p>"
)
self.client.get(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.context["title"], "Select article to change")
self.assertContains(
response,
"<title>Select article to change | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to change</h1>")
response = self.client.get(article_change_url)
self.assertEqual(response.context["title"], "Change article")
self.assertContains(
response,
"<title>Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>edited article</p>"
)
# one error in form should produce singular error message, multiple
# errors plural.
change_dict["title"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the error below.",
msg_prefix=(
"Singular error message not found in response to post with one error"
),
)
change_dict["content"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the errors below.",
msg_prefix=(
"Plural error message not found in response to post with multiple "
"errors"
),
)
self.client.get(reverse("admin:logout"))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name="odd id mult 3")
r6 = RowLevelChangePermissionModel.objects.create(id=6, name="even id mult 3")
change_url_1 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r1.pk,)
)
change_url_2 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r2.pk,)
)
change_url_3 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r3.pk,)
)
change_url_6 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r6.pk,)
)
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {"name": "changed"})
self.assertEqual(response.status_code, 403)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=3).name,
"odd id mult 3",
)
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=6).name, "changed"
)
self.assertRedirects(response, self.index_url)
self.client.get(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_1, {"name": "changed"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertContains(response, "login-form")
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_2, {"name": "changed again"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertContains(response, "login-form")
self.client.get(reverse("admin:logout"))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse("admin9:admin_views_article_change", args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.context["title"], "View article")
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(
response,
'<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close'
"</a>",
)
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
"_saveasnew": "Save as new",
"title": "Ikke fordรธmt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest("id")
self.assertRedirects(
post, reverse("admin:admin_views_article_change", args=(new_article.pk,))
)
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 3,
"article_set-INITIAL_FORMS": 3,
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-3-id": [""],
"article_set-3-title": ["A title"],
"article_set-3-content": ["Added content"],
"article_set-3-date_0": ["2008-3-18"],
"article_set-3-date_1": ["11:54:58"],
"article_set-3-section": [str(self.s1.pk)],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", Article._meta))
)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_article_delete", args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Greetings from a deleted object")
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name="Double Cheese")
delete_url = reverse("admin:admin_views_readonlypizza_delete", args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/readonlypizza/%s/" % pizza.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Read only pizzas: 1</li>")
post = self.client.post(delete_url, {"post": "yes"})
self.assertRedirects(
post, reverse("admin:admin_views_readonlypizza_changelist")
)
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_article_delete", args=("nonexistent",))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID โnonexistentโ doesnโt exist. Perhaps it was deleted?"],
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 403)
self.client.get(reverse("admin:logout"))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
self.client.get(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
self.client.get(reverse("admin:logout"))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=("foo",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID โfooโ doesnโt exist. Perhaps it was deleted?"],
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse("admin:admin_views_article_add")
add_link_text = "add_id_section"
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("add", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return (
response.context["adminform"]
.form.fields["section"]
.widget.can_change_related
)
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the
# "change section" link.
url = reverse("admin:admin_views_article_add")
change_link_text = "change_id_section"
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the
# "change section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("change", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return (
response.context["adminform"]
.form.fields["sub_section"]
.widget.can_delete_related
)
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the
# "delete section" link.
url = reverse("admin:admin_views_article_add")
delete_link_text = "delete_id_sub_section"
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the
# "delete section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("delete", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username="changeuser")
permission = get_perm(Article, get_permission_codename("change", opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk="foo")
shortcut_url = reverse("admin:view_on_site", args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, "http://(testserver|example.com)/dummy/foo/")
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse("admin7:index")
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin7:app_list", args=("admin_views",)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_article_add"), post_data, follow=True
)
self.assertContains(
response,
'<li class="success">The article โFun & gamesโ was added successfully.'
"</li>",
html=True,
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("view", opts))
)
cls.adduser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("add", opts))
)
cls.changeuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("change", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("delete", opts))
)
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(
username="user_proxy", password="secret"
)
def test_add(self):
self.client.force_login(self.adduser)
url = reverse("admin:admin_views_userproxy_add")
data = {
"username": "can_add",
"password": "secret",
"date_joined_0": "2019-01-15",
"date_joined_1": "16:59:10",
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username="can_add").exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_userproxy_changelist"))
self.assertContains(response, "<h1>Select user proxy to view</h1>")
response = self.client.get(
reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
)
self.assertContains(response, "<h1>View user proxy</h1>")
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
"password": self.user_proxy.password,
"username": self.user_proxy.username,
"date_joined_0": self.user_proxy.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": self.user_proxy.date_joined.strftime("%H:%M:%S"),
"first_name": "first_name",
}
url = reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(
response, reverse("admin:admin_views_userproxy_changelist")
)
self.assertEqual(
UserProxy.objects.get(pk=self.user_proxy.pk).first_name, "first_name"
)
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_userproxy_delete", args=(self.user_proxy.pk,))
response = self.client.post(url, {"post": "yes"}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.changeuser.user_permissions.add(
get_perm(Report, get_permission_codename("change", Report._meta))
)
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse("admin:index"))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse("admin:logout"))
@skipUnlessDBFeature("can_defer_constraint_checks")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.v2 = Villain.objects.create(name="Sue")
cls.sv1 = SuperVillain.objects.create(name="Bob")
cls.pl1 = Plot.objects.create(
name="World Domination", team_leader=cls.v1, contact=cls.v2
)
cls.pl2 = Plot.objects.create(
name="World Peace", team_leader=cls.v2, contact=cls.v2
)
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
cls.pd1 = PlotDetails.objects.create(details="almost finished", plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(
location="underground bunker", villain=cls.v1
)
cls.sh2 = SecretHideout.objects.create(
location="floating castle", villain=cls.sv1
)
cls.ssh1 = SuperSecretHideout.objects.create(
location="super floating castle!", supervillain=cls.sv1
)
cls.cy1 = CyclicOne.objects.create(name="I am recursive", two_id=1)
cls.cy2 = CyclicTwo.objects.create(name="I am recursive too", one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>'
% (
reverse("admin:admin_views_plot_change", args=(self.pl1.pk,)),
reverse("admin:admin_views_plotdetails_change", args=(self.pd1.pk,)),
)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse("admin:admin_views_cyclicone_change", args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse("admin:admin_views_cyclictwo_change", args=(self.cy2.pk,)),
)
response = self.client.get(
reverse("admin:admin_views_cyclicone_delete", args=(self.cy1.pk,))
)
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username="deleteuser")
delete_user.user_permissions.add(
get_perm(Plot, get_permission_codename("delete", Plot._meta))
)
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(self.pl1.pk,))
)
self.assertContains(
response,
"your account doesn't have permission to delete the following types of "
"objects",
)
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(
reverse("admin:admin_views_question_delete", args=(q.pk,))
)
self.assertContains(
response, "would require deleting the following protected related objects"
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a1.pk,)),
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a2.pk,)),
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question="Why?")
Answer.objects.create(question=q, answer="Because.")
response = self.client.post(
reverse("admin:admin_views_question_delete", args=(q.pk,)), {"post": "yes"}
)
self.assertEqual(Question.objects.count(), 1)
self.assertContains(
response, "would require deleting the following protected related objects"
)
def test_restricted(self):
album = Album.objects.create(title="Amaryllis")
song = Song.objects.create(album=album, name="Unity")
response = self.client.get(
reverse("admin:admin_views_album_delete", args=(album.pk,))
)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
self.assertContains(
response,
'<li>Song: <a href="%s">Unity</a></li>'
% reverse("admin:admin_views_song_change", args=(song.pk,)),
)
def test_post_delete_restricted(self):
album = Album.objects.create(title="Amaryllis")
Song.objects.create(album=album, name="Unity")
response = self.client.post(
reverse("admin:admin_views_album_delete", args=(album.pk,)),
{"post": "yes"},
)
self.assertEqual(Album.objects.count(), 1)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
"admin:admin_views_plot_change", args=(self.pl1.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
"admin:admin_views_plot_change", args=(self.pl2.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_villain_change", args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_supervillain_change", args=(self.sv1.pk,)),
"<li>Secret hideout: floating castle",
"<li>Super secret hideout: super floating castle!",
]
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(
reverse("admin:admin_views_supervillain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name="hott")
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
"admin:admin_views_funkytag_change", args=(tag.id,)
)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(plot.pk,))
)
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name="djangoproject")
tag = FunkyTag.objects.create(content_object=bookmark, name="django")
tag_url = reverse("admin:admin_views_funkytag_change", args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(
reverse("admin:admin_views_bookmark_delete", args=(bookmark.pk,))
)
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name="Test Book")
response = self.client.get(
reverse("admin2:admin_views_book_delete", args=(book.pk,))
)
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, "a deletable object")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name="hott")
response = self.client.get(reverse("admin:admin_views_funkytag_changelist"))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(
ModelWithStringPrimaryKey
).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
2,
change_message="Changed something",
)
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_history", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
self.assertContains(response, "Changed something")
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
def test_changelist_to_changeform_link(self):
"""
Link to the changeform of the object in changelist should use reverse()
and be quoted.
"""
response = self.client.get(
reverse("admin:admin_views_modelwithstringprimarykey_changelist")
)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (
change_url,
escape(self.pk),
)
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"""
The link from the recent actions list referring to the changeform of
the object should be quoted.
"""
response = self.client.get(reverse("admin:index"))
link = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(quote(self.pk),)
)
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
""" "
The link from the delete confirmation page referring back to the
changeform of the object should be quoted.
"""
url = reverse(
"admin:admin_views_modelwithstringprimarykey_delete", args=(quote(self.pk),)
)
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(
pk="i have something to add"
)
add_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse("admin:admin_views_modelwithstringprimarykey_add")
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model2.pk),),
)
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(delete_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(history_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk="abc_123")
model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(model.pk),),
)
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
"admin:%s_modelwithstringprimarykey_change"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
"admin:%s_modelwithstringprimarykey_history"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
self.assertContains(
response, '<a href="%s" class="historylink"' % escape(expected_link)
)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse("admin:admin_views_modelwithstringprimarykey_add"),
{
"string_pk": "123/history",
"_continue": "1", # Save and continue editing
},
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn("/123_2Fhistory/", response.headers["location"]) # PK is quoted
@override_settings(ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse("secure_view")
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), secure_url)
)
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = "/test_admin/admin/secure-view2/"
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?myfield=%s" % (reverse("admin:login"), secure_url)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.b1 = Book.objects.create(name="Lรฆrdommer")
cls.p1 = Promo.objects.create(name="<Promo for Lรฆrdommer>", book=cls.b1)
cls.chap1 = Chapter.objects.create(
title="Norske bostaver รฆรธรฅ skaper problemer",
content="<p>Svรฆrt frustrerende med UnicodeDecodeErro</p>",
book=cls.b1,
)
cls.chap2 = Chapter.objects.create(
title="Kjรฆrlighet",
content="<p>La kjรฆrligheten til de lidende seire.</p>",
book=cls.b1,
)
cls.chap3 = Chapter.objects.create(
title="Kjรฆrlighet", content="<p>Noe innhold</p>", book=cls.b1
)
cls.chap4 = ChapterXtra1.objects.create(
chap=cls.chap1, xtra="<Xtra(1) Norske bostaver รฆรธรฅ skaper problemer>"
)
cls.chap5 = ChapterXtra1.objects.create(
chap=cls.chap2, xtra="<Xtra(1) Kjรฆrlighet>"
)
cls.chap6 = ChapterXtra1.objects.create(
chap=cls.chap3, xtra="<Xtra(1) Kjรฆrlighet>"
)
cls.chap7 = ChapterXtra2.objects.create(
chap=cls.chap1, xtra="<Xtra(2) Norske bostaver รฆรธรฅ skaper problemer>"
)
cls.chap8 = ChapterXtra2.objects.create(
chap=cls.chap2, xtra="<Xtra(2) Kjรฆrlighet>"
)
cls.chap9 = ChapterXtra2.objects.create(
chap=cls.chap3, xtra="<Xtra(2) Kjรฆrlighet>"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lรฆrdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver รฆรธรฅ skaper problemer",
"chapter_set-0-content": (
"<p>Svรฆrt frustrerende med UnicodeDecodeError</p>"
),
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjรฆrlighet.",
"chapter_set-1-content": (
"<p>La kjรฆrligheten til de lidende seire.</p>"
),
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(
reverse("admin:admin_views_book_change", args=(self.b1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_book_delete", args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse("admin:admin_views_book_changelist"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(
name="This Week in Django", release_date=datetime.date.today()
)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso="en", name="English", english_name="English")
response = self.client.get(reverse("admin:admin_views_language_changelist"))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse("admin:admin_views_person_changelist"))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=20)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data, follow=True
)
self.assertEqual(len(response.context["messages"]), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": str(self.per3.pk),
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?gender__exact=1", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?q=john", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(
reference="123", driver="bill", restaurant="thai"
)
fd2 = FoodDelivery.objects.create(
reference="456", driver="bill", restaurant="india"
)
fd3 = FoodDelivery.objects.create(
reference="789", driver="bill", restaurant="pizza"
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
1,
html=True,
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
2,
html=True,
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
non_form_errors = response.context["cl"].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(
str(non_form_errors),
str(ErrorList(["Grace is not a Zombie"], error_class="nonform")),
)
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_category_changelist"), data
)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name="Unordered object #1")
UnorderedObject.objects.create(id=2, name="Unordered object #2")
UnorderedObject.objects.create(id=3, name="Unordered object #3")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist")
)
self.assertContains(response, "Unordered object #3")
self.assertContains(response, "Unordered object #2")
self.assertNotContains(response, "Unordered object #1")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist") + "?p=2"
)
self.assertNotContains(response, "Unordered object #3")
self.assertNotContains(response, "Unordered object #2")
self.assertContains(response, "Unordered object #1")
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ["3"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
"_selected_action": ["1"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse("admin:admin_views_person_changelist"))
self.assertNotEqual(response.context["cl"].list_editable, ())
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?%s" % IS_POPUP_VAR
)
self.assertEqual(response.context["cl"].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(
title="The adventures of Guido", content="Once upon a time in Djangoland..."
)
story2 = Story.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
response = self.client.get(reverse("admin:admin_views_story_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
"""Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title="The adventures of Guido",
content="Once upon a time in Djangoland...",
)
story2 = OtherStory.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
link1 = reverse("admin:admin_views_otherstory_change", args=(story1.pk,))
link2 = reverse("admin:admin_views_otherstory_change", args=(story2.pk,))
response = self.client.get(reverse("admin:admin_views_otherstory_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id),
1,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id),
1,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="John Doe", gender=1)
Person.objects.create(name='John O"Hara', gender=1)
Person.objects.create(name="John O'Hara", gender=1)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text="Bar")
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text="Foo")
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text="Few")
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text="Bas")
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(
reverse("admin:auth_user_changelist") + "?q=joe&%s=id" % TO_FIELD_VAR
)
self.assertContains(response, "\n1 user\n")
self.assertContains(
response,
'<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR,
html=True,
)
def test_exact_matches(self):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=ba"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=uido"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=Bob"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=20"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""",
html=True,
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True,
)
self.assertTrue(response.context["cl"].show_admin_actions)
def test_search_with_spaces(self):
url = reverse("admin:admin_views_person_changelist") + "?q=%s"
tests = [
('"John Doe"', 1),
("'John Doe'", 1),
("John Doe", 0),
('"John Doe" John', 1),
("'John Doe' John", 1),
("John Doe John", 0),
('"John Do"', 1),
("'John Do'", 1),
("'John O'Hara'", 0),
("'John O\\'Hara'", 1),
('"John O"Hara"', 0),
('"John O\\"Hara"', 1),
]
for search, hits in tests:
with self.subTest(search=search):
response = self.client.get(url % search)
self.assertContains(response, "\n%s person" % hits)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse("admin:admin_views_persona_add"))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse("admin:admin_views_persona_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(
reverse("admin:admin_views_persona_change", args=(persona_id,))
)
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(
reverse("admin:admin_views_persona_change", args=(persona_id,)), post_data
)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse("admin:admin_views_gadget_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
# Data is still not visible on the page
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
self.assertNotContains(response, "First Gadget")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse("admin:admin_views_parent_add"))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse("admin:index"),
"username": "super",
"password": "secret",
}
def test_changelist_view(self):
response = self.client.get(reverse("admin:admin_views_emptymodel_changelist"))
for i in self.pks:
if i > 1:
self.assertContains(response, "Primary key = %s" % i)
else:
self.assertNotContains(response, "Primary key = %s" % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name="person1", gender=1)
Person.objects.create(name="person2", gender=2)
changelist_url = reverse("admin:admin_views_person_changelist")
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"q": "not_in_name"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 0 selected")
self.assertEqual(resp.context["selection_note_all"], "All 0 selected")
with self.assertNumQueries(5):
extra = {"q": "person"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"gender__exact": "1"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 1 selected")
self.assertEqual(resp.context["selection_note_all"], "1 selected")
def test_change_view(self):
for i in self.pks:
url = reverse("admin:admin_views_emptymodel_change", args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["empty model with ID โ1โ doesnโt exist. Perhaps it was deleted?"],
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_coverletter_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter โ<a href="%s">'
"Candidate, Best</a>โ was added successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_shortmessage_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message โ<a href="%s">'
"%s</a>โ was added successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram โ<a href="%s">'
"Urgent telegram</a>โ was added successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper โ<a href="%s">'
"%s</a>โ was added successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter โ<a href="%s">'
"John Doe II</a>โ was changed successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(cl.pk,)),
html=True,
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message โ<a href="%s">'
"%s</a>โ was changed successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_telegram_change", args=(t.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_change", args=(t.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram โ<a href="%s">'
"Telegram without typo</a>โ was changed successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(t.pk,)),
html=True,
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_paper_change", args=(p.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_change", args=(p.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper โ<a href="%s">'
"%s</a>โ was changed successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse("admin:login"), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(
reverse("admin:admin_views_filteredmanager_changelist")
)
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(1,))
).status_code,
200,
)
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(2,))
).status_code,
200,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b"a" * (2**21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name="Test Gallery")
cls.picture = Picture.objects.create(
name="Test Picture",
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,))
)
self.assertIs(response.context["has_file_field"], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,)),
post_data,
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.collector = Collector.objects.create(pk=1, name="John Fowles")
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data["widget_set-0-name"] = "Widget 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context["has_file_field"], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"""
A model with an explicit autofield primary key can be saved as inlines.
"""
# First add a new inline
self.post_data["grommet_set-0-name"] = "Grommet 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update(
{
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
}
)
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse("admin:admin_views_fabric_add"))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(
reverse("admin:admin_views_section_history", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse("admin:logout"))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse("admin:password_change"))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse("admin:password_change_done"))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse("admin:jsi18n"))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse("admin:admin_views_prepopulatedpost_add"))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
def test_prepopulated_off(self):
response = self.client.get(
reverse("admin:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(
reverse("admin:admin_views_prepopulatedpostlargeslug_add")
)
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse("admin7:admin_views_prepopulatedpost_add"))
self.assertContains(response, "data-prepopulated-fields=")
self.assertContains(response, ""id": "#id_slug"")
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(
reverse("admin7:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF="admin_views.urls")
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ["admin_views"] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
self.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def test_login_button_centered(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
button = self.selenium.find_element(By.CSS_SELECTOR, ".submit-row input")
offset_left = button.get_property("offsetLeft")
offset_right = button.get_property("offsetParent").get_property(
"offsetWidth"
) - (offset_left + button.get_property("offsetWidth"))
# Use assertAlmostEqual to avoid pixel rounding errors.
self.assertAlmostEqual(offset_left, offset_right, delta=3)
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_mainprepopulated_add")
)
self.wait_for(".select2")
# Main form ----------------------------------------------------------
self.selenium.find_element(By.ID, "id_pubdate").send_keys("2012-02-18")
self.select_option("#id_status", "option two")
self.selenium.find_element(By.ID, "id_name").send_keys(
" the mAin nรMรซ and it's awฮตลกomeฤฑฤฑฤฑฤฐ"
)
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
slug3 = self.selenium.find_element(By.ID, "id_slug3").get_attribute("value")
self.assertEqual(slug1, "the-main-name-and-its-awesomeiiii-2012-02-18")
self.assertEqual(slug2, "option-two-the-main-name-and-its-awesomeiiii")
self.assertEqual(
slug3, "the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i"
)
# Stacked inlines with fieldsets -------------------------------------
# Initial inline
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-pubdate"
).send_keys("2011-12-17")
self.select_option("#id_relatedprepopulated_set-0-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-name"
).send_keys(" here is a sลคฤรkeรฐ inline ! ")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "here-is-a-stacked-inline-2011-12-17")
self.assertEqual(slug2, "option-one-here-is-a-stacked-inline")
initial_select2_inputs = self.selenium.find_elements(
By.CLASS_NAME, "select2-selection"
)
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
0
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 2,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-pubdate"
).send_keys("1999-01-25")
self.select_option("#id_relatedprepopulated_set-1-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-name"
).send_keys(
" now you haVe anรถther sลคฤรkeรฐ inline with a very ... "
"loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog "
"text... "
)
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug2"
).get_attribute("value")
# 50 characters maximum for slug1 field
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
# 60 characters maximum for slug2 field
self.assertEqual(
slug2, "option-two-now-you-have-another-stacked-inline-with-a-very-l"
)
# Tabular inlines ----------------------------------------------------
# Initial inline
element = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-status"
)
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-pubdate"
).send_keys("1234-12-07")
self.select_option("#id_relatedprepopulated_set-2-0-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-name"
).send_keys("And now, with a tรbลฑlaล inline !!!")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "and-now-with-a-tabular-inline-1234-12-07")
self.assertEqual(slug2, "option-two-and-now-with-a-tabular-inline")
# Add an inline
# Button may be outside the browser frame.
element = self.selenium.find_elements(
By.LINK_TEXT, "Add another Related prepopulated"
)[1]
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
element.click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 4,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-pubdate"
).send_keys("1981-08-22")
self.select_option("#id_relatedprepopulated_set-2-1-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-name"
).send_keys(r'tรbลฑlaล inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug2"
).get_attribute("value")
self.assertEqual(slug1, "tabular-inline-with-ignored-characters-1981-08-22")
self.assertEqual(slug2, "option-one-tabular-inline-with-ignored-characters")
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
2
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 6,
)
# Stacked Inlines without fieldsets ----------------------------------
# Initial inline.
row_id = "id_relatedprepopulated_set-4-0-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("2011-12-12")
self.select_option(f"#{row_id}status", "option one")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" sลคฤรkeรฐ inline ! "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "stacked-inline-2011-12-12")
self.assertEqual(slug2, "option-one")
# Add inline.
self.selenium.find_elements(
By.LINK_TEXT,
"Add another Related prepopulated",
)[3].click()
row_id = "id_relatedprepopulated_set-4-1-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("1999-01-20")
self.select_option(f"#{row_id}status", "option two")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" now you haVe anรถther sลคฤรkeรฐ inline with a very loooong "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
self.assertEqual(slug2, "option-two")
# Save and check that everything is properly stored in the database
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=" the mAin nรMรซ and it's awฮตลกomeฤฑฤฑฤฑฤฐ",
pubdate="2012-02-18",
status="option two",
slug1="the-main-name-and-its-awesomeiiii-2012-02-18",
slug2="option-two-the-main-name-and-its-awesomeiiii",
slug3="the-main-nร mรซ-and-its-awฮตลกomeฤฑฤฑฤฑi",
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 6)
RelatedPrepopulated.objects.get(
name=" here is a sลคฤรkeรฐ inline ! ",
pubdate="2011-12-17",
status="option one",
slug1="here-is-a-stacked-inline-2011-12-17",
slug2="option-one-here-is-a-stacked-inline",
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=(
" now you haVe anรถther sลคฤรkeรฐ inline with a very ... "
"loooooooooooooooooo"
),
pubdate="1999-01-25",
status="option two",
slug1="now-you-have-another-stacked-inline-with-a-very-lo",
slug2="option-two-now-you-have-another-stacked-inline-with-a-very-l",
)
RelatedPrepopulated.objects.get(
name="And now, with a tรbลฑlaล inline !!!",
pubdate="1234-12-07",
status="option two",
slug1="and-now-with-a-tabular-inline-1234-12-07",
slug2="option-two-and-now-with-a-tabular-inline",
)
RelatedPrepopulated.objects.get(
name=r'tรbลฑlaล inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate="1981-08-22",
status="option one",
slug1="tabular-inline-with-ignored-characters-1981-08-22",
slug2="option-one-tabular-inline-with-ignored-characters",
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
from selenium.webdriver.common.by import By
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=" this is the mAin nรMรซ",
pubdate="2012-02-18",
status="option two",
slug1="",
slug2="",
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
object_url = self.live_server_url + reverse(
"admin:admin_views_mainprepopulated_change", args=(item.id,)
)
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" the best")
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
# Save the object
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" hello")
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_add")
)
self.assertFalse(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
self.assertTrue(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.assertEqual(
self.selenium.find_element(By.ID, "fieldsetcollapser0").text, "Hide"
)
def test_selectbox_height_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_pizza_add")
self.selenium.get(url)
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
filter_box = self.selenium.find_element(By.ID, "id_toppings_filter")
from_box = self.selenium.find_element(By.ID, "id_toppings_from")
to_box = self.selenium.find_element(By.ID, "id_toppings_to")
self.assertEqual(
to_box.get_property("offsetHeight"),
(
filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_selectbox_height_not_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_question_add")
self.selenium.get(url)
filter_box = self.selenium.find_element(By.ID, "id_related_questions_filter")
from_box = self.selenium.find_element(By.ID, "id_related_questions_from")
to_box = self.selenium.find_element(By.ID, "id_related_questions_to")
self.assertEqual(
to_box.get_property("offsetHeight"),
(
filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
from selenium.webdriver.common.by import By
# First form field has a single widget
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_picture_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_name"),
)
# First form field has a MultiWidget
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_reservation_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_start_date_0"),
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name="Test section")
Article.objects.create(
title="foo",
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_changelist")
)
# Change popup
self.selenium.find_element(By.ID, "change_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Change section")
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.clear()
name_input.send_keys("<i>edited section</i>")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Hide sidebar.
toggle_button = self.selenium.find_element(
By.CSS_SELECTOR, "#toggle-nav-sidebar"
)
toggle_button.click()
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "<i>edited section</i>")
# Rendered select2 input.
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (ร\n) is included in text.
self.assertEqual(select2_display.text, "ร\n<i>edited section</i>")
# Add popup
self.selenium.find_element(By.ID, "add_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Add section")
self.selenium.find_element(By.ID, "id_name").send_keys("new section")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "new section")
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (ร\n) is included in text.
self.assertEqual(select2_display.text, "ร\nnew section")
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "change_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(
select.first_selected_option.get_attribute("value"), str(parent.id)
)
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url
+ reverse("admin:admin_views_relatedwithuuidpkmodel_add")
)
self.selenium.find_element(By.ID, "add_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute("value"), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Yes, Iโm sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, "---------")
self.assertEqual(select.first_selected_option.get_attribute("value"), "")
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
parent2 = ParentWithUUIDPK.objects.create(title="test2")
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_changelist",
current_app=site2.name,
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "lookup_id_form-0-parent").click()
self.wait_for_and_switch_to_popup()
# Select "parent2" in the popup.
self.selenium.find_element(By.LINK_TEXT, str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element(By.ID, "id_form-0-parent").get_attribute(
"value"
)
self.assertEqual(value, str(parent2.pk))
def test_input_element_font(self):
"""
Browsers' default stylesheets override the font of inputs. The admin
adds additional CSS to handle this.
"""
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
element = self.selenium.find_element(By.ID, "id_username")
# Some browsers quotes the fonts, some don't.
fonts = [
font.strip().strip('"')
for font in element.value_of_css_property("font-family").split(",")
]
self.assertEqual(
fonts,
["Roboto", "Lucida Grande", "Verdana", "Arial", "sans-serif"],
)
def test_search_input_filtered_page(self):
from selenium.webdriver.common.by import By
Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="Grace Hopper", gender=1, alive=False)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
person_url = reverse("admin:admin_views_person_changelist") + "?q=Gui"
self.selenium.get(self.live_server_url + person_url)
self.assertGreater(
self.selenium.find_element(By.ID, "searchbar").rect["width"],
50,
)
def test_related_popup_index(self):
"""
Create a chain of 'self' related objects via popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
base_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
popup_window_test = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
popup_window_test2 = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.selenium.find_element(By.ID, "id_title").send_keys("test3")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test2)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test3").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test2").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(base_window)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
def test_related_popup_incorrect_close(self):
"""
Cleanup child popups when closing a parent popup.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
test_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
test2_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.assertEqual(len(self.selenium.window_handles), 4)
self.selenium.switch_to.window(test2_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 2, 1)
self.assertEqual(len(self.selenium.window_handles), 2)
# Close final popup to clean up test.
self.selenium.switch_to.window(test_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 1, 1)
self.selenium.switch_to.window(self.selenium.window_handles[-1])
def test_hidden_fields_small_window(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin:index"),
)
self.selenium.get(self.live_server_url + reverse("admin:admin_views_story_add"))
field_title = self.selenium.find_element(By.CLASS_NAME, "field-title")
current_size = self.selenium.get_window_size()
try:
self.selenium.set_window_size(1024, 768)
self.assertIs(field_title.is_displayed(), False)
self.selenium.set_window_size(767, 575)
self.assertIs(field_title.is_displayed(), False)
finally:
self.selenium.set_window_size(current_size["width"], current_size["height"])
@override_settings(ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_readonly_get(self):
response = self.client.get(reverse("admin:admin_views_post_add"))
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=16)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, "Multiline<br>test<br>string")
self.assertContains(
response,
'<div class="readonly">Multiline<br>html<br>content</div>',
html=True,
)
self.assertContains(response, "InlineMultiline<br>test<br>string")
self.assertContains(
response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)),
)
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help">', 3)
self.assertContains(
response,
'<div class="help">Some help text for the title (with Unicode ล ฤฤลฝฤลพลกฤ)'
"</div>",
html=True,
)
self.assertContains(
response,
'<div class="help">Some help text for the content (with Unicode ล ฤฤลฝฤลพลกฤ)'
"</div>",
html=True,
)
self.assertContains(
response,
'<div class="help">Some help text for the date (with Unicode ล ฤฤลฝฤลพลกฤ)'
"</div>",
html=True,
)
p = Post.objects.create(
title="I worked on readonly_fields", content="Its good stuff"
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test",
content="test",
readonly_content="test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest",
)
Link.objects.create(
url="http://www.djangoproject.com",
post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
# Checking readonly field.
self.assertContains(response, "test<br><br>test<br><br>test<br><br>test")
# Checking readonly field in inline.
self.assertContains(response, "test<br>link")
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by("-id")[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse("admin:admin_views_pizza_add"))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(
reverse("admin2:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(
reverse("admin:admin_views_choice_change", args=(choice.pk,))
)
self.assertContains(
response, '<div class="readonly">No opinion</div>', html=True
)
def _test_readonly_foreignkey_links(self, admin_site):
"""
ForeignKey readonly fields render as links if the target model is
registered in admin.
"""
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.objects.create(name="Book 1"),
)
language = Language.objects.create(iso="_40", name="Test")
obj = ReadOnlyRelatedField.objects.create(
chapter=chapter,
language=language,
user=self.superuser,
)
response = self.client.get(
reverse(
f"{admin_site}:admin_views_readonlyrelatedfield_change", args=(obj.pk,)
),
)
# Related ForeignKey object registered in admin.
user_url = reverse(f"{admin_site}:auth_user_change", args=(self.superuser.pk,))
self.assertContains(
response,
'<div class="readonly"><a href="%s">super</a></div>' % user_url,
html=True,
)
# Related ForeignKey with the string primary key registered in admin.
language_url = reverse(
f"{admin_site}:admin_views_language_change",
args=(quote(language.pk),),
)
self.assertContains(
response,
'<div class="readonly"><a href="%s">_40</a></div>' % language_url,
html=True,
)
# Related ForeignKey object not registered in admin.
self.assertContains(
response, '<div class="readonly">Chapter 1</div>', html=True
)
def test_readonly_foreignkey_links_default_admin_site(self):
self._test_readonly_foreignkey_links("admin")
def test_readonly_foreignkey_links_custom_admin_site(self):
self._test_readonly_foreignkey_links("namespaced_admin")
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(reverse("admin:admin_views_topping_add"))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(
reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
)
self.assertContains(response, "<label>Toppings:</label>", html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name="Adam")
pl = Plot.objects.create(name="Test Plot", team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details="Brand New Plot", plot=pl)
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
pd_url = reverse("admin:admin_views_plotdetails_change", args=(pd.pk,))
self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url)
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
self.assertEqual(field.contents(), "-") # default empty value
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(
reverse("admin:admin_views_fieldoverridepost_change", args=(p.pk,))
)
self.assertContains(
response, '<div class="help">Overridden help text for the date</div>'
)
self.assertContains(
response,
'<label for="id_public">Overridden public label:</label>',
html=True,
)
self.assertNotContains(
response, "Some help text for the date (with Unicode ล ฤฤลฝฤลพลกฤ)"
)
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name="<a>evil</a>")
response = self.client.get(
reverse("admin:admin_views_section_change", args=(section.pk,))
)
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
def test_label_suffix_translated(self):
pizza = Pizza.objects.create(name="Americano")
url = reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
with self.settings(LANGUAGE_CODE="fr"):
response = self.client.get(url)
self.assertContains(response, "<label>Toppings\u00A0:</label>", html=True)
@override_settings(ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username="threepwood",
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username="marley",
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse("admin:admin_views_stumpjoke_add"))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
Inquisition.objects.create(expected=False, leader=actor, country="Spain")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse("admin:admin_views_inquisition_changelist"))
self.assertContains(response, "list-display-sketch")
@override_settings(ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
},
)
new_user = User.objects.get(username="newuser")
self.assertRedirects(
response, reverse("admin:auth_user_change", args=(new_user.pk,))
)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_continue": "1",
},
)
new_user = User.objects.get(username="newuser")
new_user_url = reverse("admin:auth_user_change", args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user โ<a href="%s">'
"%s</a>โ was added successfully. You may edit it again below.</li>"
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "mismatch",
},
)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "adminform", "password", [])
self.assertFormError(
response,
"adminform",
"password2",
["The two password fields didnโt match."],
)
def test_user_fk_add_popup(self):
"""
User addition through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(response, reverse("admin:auth_user_add"))
self.assertContains(
response,
'class="related-widget-wrapper-link add-related" id="add_id_owner"',
)
response = self.client.get(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR
)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR, data, follow=True
)
self.assertContains(response, ""obj": "newuser"")
def test_user_fk_change_popup(self):
"""
User change through a FK popup should return the appropriate JavaScript
response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_change", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_change", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""obj": "newuser"")
self.assertContains(response, ""action": "change"")
def test_user_fk_delete_popup(self):
"""
User deletion through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_delete", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_delete", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
"post": "yes",
IS_POPUP_VAR: "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""action": "delete"")
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_addanother": "1",
},
)
new_user = User.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_user_add"))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(
reverse("admin3:auth_user_password_change", args=(u.pk,))
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["form_url"], "pony")
@override_settings(ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(
reverse("admin:auth_group_add"),
{
"name": "newgroup",
},
)
Group.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_group_changelist"))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse("admin:admin_views_post_add"))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse("admin:index"))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse("admin:admin_views_section_changelist"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
"action": "delete_selected",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_section_changelist"), action_data
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their
class attribute.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF="admin_views.urls")
@modify_settings(
INSTALLED_APPS={"append": ["django.contrib.admindocs", "django.contrib.flatpages"]}
)
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse("django-admindocs-tags"))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(
response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#built_in-autoescape">autoescape</a></li>',
html=True,
)
# An app tag exists in both the index and detail
self.assertContains(
response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>',
html=True,
)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(
response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#admin_list-admin_actions">admin_actions</a></li>',
html=True,
)
def test_filters(self):
response = self.client.get(reverse("django-admindocs-filters"))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(
response, '<li><a href="#built_in-add">add</a></li>', html=True
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
with translation.override(None):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF="admin_views.urls", USE_THOUSAND_SEPARATOR=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % date.year)
def assert_contains_month_link(self, response, date):
self.assertContains(
response,
'?release_date__month=%d&release_date__year=%d"'
% (date.month, date.year),
)
def assert_contains_day_link(self, response, date):
self.assertContains(
response,
"?release_date__day=%d&"
'release_date__month=%d&release_date__year=%d"'
% (date.day, date.month, date.year),
)
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertNotContains(response, "release_date__year=")
self.assertNotContains(response, "release_date__month=")
self.assertNotContains(response, "release_date__day=")
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (
datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (
datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, "release_date__day=")
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (
datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
# no day/month-level links
self.assertNotContains(response, "release_date__day=")
self.assertNotContains(response, "release_date__month=")
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = "%s?release_date__year=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = "%s?release_date__year=%d&release_date__month=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
date.month,
)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse("admin:admin_views_answer_changelist"))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % date.year
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
"child_set-TOTAL_FORMS": "3",
"child_set-INITIAL_FORMS": "0",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-1-name": "Catherine",
}
self.client.post(reverse("admin:admin_views_parent_add"), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name="Josh Stone")
paul = Child.objects.create(parent=parent, name="Paul")
catherine = Child.objects.create(parent=parent, name="Catherine")
post = {
"child_set-TOTAL_FORMS": "5",
"child_set-INITIAL_FORMS": "2",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-0-id": paul.id,
"child_set-1-name": "Catherine",
"child_set-1-id": catherine.id,
}
self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.id,)), post
)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name="Josh Rock")
Child.objects.create(parent=parent, name="Paul")
Child.objects.create(parent=parent, name="Catherine")
post = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": parent.id,
"form-0-name": "Josh Stone",
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_parent_changelist"), post)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin:logout"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "registration/logged_out.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:logout"))
self.assertFalse(response.context["has_permission"])
self.assertNotContains(
response, "user-tools"
) # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse("admin:logout"))
self.assertEqual(
response.status_code, 302
) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse("admin:logout"), follow=True)
self.assertContains(
response,
'<input type="hidden" name="next" value="%s">' % reverse("admin:index"),
)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:login"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_%s" % level,
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="%s">Test %s</li>' % (level, level), html=True
)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message("debug")
def test_message_info(self):
self.send_message("info")
def test_message_success(self):
self.send_message("success")
def test_message_warning(self):
self.send_message("warning")
def test_message_error(self):
self.send_message("error")
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_extra_tags",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="extra_tag info">Test tags</li>', html=True
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=""):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if "_changelist_filters" in parsed_qs:
changelist_filters = parsed_qs["_changelist_filters"]
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs["_changelist_filters"] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse(
"admin:auth_user_change", args=(self.joepublicuser.pk,)
)
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D1%26is_superuser__exact%3D1".format(change_user_url),
)
# Ignore scheme and host.
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Ignore ordering of querystring.
self.assertURLEqual(
"{}?is_staff__exact=0&is_superuser__exact=0".format(
reverse("admin:auth_user_changelist")
),
"{}?is_superuser__exact=0&is_staff__exact=0".format(
reverse("admin:auth_user_changelist")
),
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_superuser__exact%3D0%26is_staff__exact%3D0".format(change_user_url),
)
def get_changelist_filters(self):
return {
"is_superuser__exact": 0,
"is_staff__exact": 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode(
{"_changelist_filters": self.get_changelist_filters_querystring()}
)
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return "%s?%s" % (
reverse("admin:auth_user_changelist", current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self, add_preserved_filters=True):
url = reverse("admin:auth_user_add", current_app=self.admin_site.name)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_change_url(self, user_id=None, add_preserved_filters=True):
if user_id is None:
user_id = self.get_sample_user_id()
url = reverse(
"admin:auth_user_change", args=(user_id,), current_app=self.admin_site.name
)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_history",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_delete",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode(),
)
self.assertURLEqual(detail_link[1], self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>', response.content.decode()
)
self.assertURLEqual(history_link[1], self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode()
)
self.assertURLEqual(delete_link[1], self.get_delete_url())
# Test redirect on "Save".
post_data = {
"username": "joepublic",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
}
post_data["_save"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["_continue"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["_addanother"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_change_view_without_preserved_filters(self):
response = self.client.get(self.get_change_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
post_data = {
"username": "dummy",
"password1": "test",
"password2": "test",
}
# Test redirect on "Save".
post_data["_save"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy").pk)
)
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["username"] = "dummy2"
post_data["_continue"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy2").pk)
)
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["username"] = "dummy3"
post_data["_addanother"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_add_view_without_preserved_filters(self):
response = self.client.get(self.get_add_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {"post": "yes"})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
"preserved_filters": self.get_preserved_filters_querystring(),
"opts": User._meta,
}
prefixes = ("", "/prefix/", "/ๅพๅฐ/")
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse(
"admin:auth_user_changelist", current_app=self.admin_site.name
)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
"""#11277 -Labels of hidden fields in admin were not hidden."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse("admin:admin_views_emptymodelvisible_add"))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, "first")
self.assert_field_visible(response, "second")
def test_all_fields_hidden(self):
response = self.client.get(reverse("admin:admin_views_emptymodelhidden_add"))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, "first")
self.assert_field_hidden(response, "second")
def test_mixin(self):
response = self.client.get(reverse("admin:admin_views_emptymodelmixin_add"))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, "first")
self.assert_field_visible(response, "second")
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(
response, '<div class="fieldBox field-%s hidden">' % field_name
)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormsetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2",
}
response = self.client.post(
reverse("admin:admin_views_parentwithdependentchildren_add"), post_data
)
self.assertFormError(
response, "adminform", "some_required_info", ["This field is required."]
)
msg = (
"The form 'adminform' in context 0 does not contain the non-field error "
"'Error'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, "adminform", None, ["Error"])
self.assertFormsetError(
response,
"inline_admin_formset",
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
msg = (
"The formset 'inline_admin_formset' in context 22 does not contain any "
"non-form errors."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(
response, "inline_admin_formset", None, None, ["Error"]
)
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(
some_required_info=6, family_name="Test1"
)
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1",
}
response = self.client.post(
reverse(
"admin:admin_views_parentwithdependentchildren_change", args=(pwdc.id,)
),
post_data,
)
self.assertFormError(
response, "adminform", "some_required_info", ["This field is required."]
)
self.assertFormsetError(
response,
"inline_admin_formset",
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(
admin.check(),
[
Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=CityAdmin,
id="admin.E025",
),
],
)
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, 1))
)
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_worker_change", args=(self.w1.pk,))
)
self.assertContains(
response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)
)
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_state_change", args=(self.s1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.r1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
self.assertContains(
response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSiteFinalCatchAllPatternTests(TestCase):
"""
Verifies the behaviour of the admin catch-all view.
* Anonynous/non-staff users are redirected to login for all URLs, whether
otherwise valid or not.
* APPEND_SLASH is applied for staff if needed.
* Otherwise Http404.
* Catch-all view disabled via AdminSite.final_catch_all_view.
"""
def test_unknown_url_redirects_login_if_not_authenticated(self):
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), unknown_url)
)
def test_unknown_url_404_if_authenticated(self):
superuser = User.objects.create_superuser(
username="super",
password="secret",
email="[email protected]",
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_known_url_missing_slash_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")[:-1]
response = self.client.get(known_url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_non_admin_url_shares_url_prefix(self):
url = reverse("non_admin")[:-1]
response = self.client.get(url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")[:-1]
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME="/prefix/")
def test_missing_slash_append_slash_true_force_script_name(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user(self):
user = User.objects.create_user(
username="user",
password="secret",
email="[email protected]",
is_staff=False,
)
self.client.force_login(user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article",
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_single_model_no_append_slash(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin9:admin_views_actor_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Same tests above with final_catch_all_view=False.
def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self):
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self):
superuser = User.objects.create_superuser(
username="super",
password="secret",
email="[email protected]",
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_auth_without_final_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin10:login"), known_url)
)
def test_known_url_missing_slash_redirects_with_slash_if_not_auth_no_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, fetch_redirect_response=False
)
def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self):
url = reverse("non_admin10")
response = self.client.get(url[:-1])
self.assertRedirects(response, url, status_code=301)
def test_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin10:login"), url))
def test_unknown_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")[:-1]
response = self.client.get(url)
# Matches test_admin/admin10/admin_views/article/<path:object_id>/
self.assertRedirects(
response, url + "/", status_code=301, fetch_redirect_response=False
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(
self,
):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Outside admin.
def test_non_admin_url_404_if_not_authenticated(self):
unknown_url = "/unknown/"
response = self.client.get(unknown_url)
# Does not redirect to the admin login.
self.assertEqual(response.status_code, 404)
|
py | 7df96fb80c5213e0926278a6be1133c542a29f5c | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2021 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class InputSecureFileLocation(TLObject): # type: ignore
"""This object is a constructor of the base type :obj:`~pyrogram.raw.base.InputFileLocation`.
Details:
- Layer: ``126``
- ID: ``0xcbc7ee28``
Parameters:
id: ``int`` ``64-bit``
access_hash: ``int`` ``64-bit``
"""
__slots__: List[str] = ["id", "access_hash"]
ID = 0xcbc7ee28
QUALNAME = "types.InputSecureFileLocation"
def __init__(self, *, id: int, access_hash: int) -> None:
self.id = id # long
self.access_hash = access_hash # long
@staticmethod
def read(data: BytesIO, *args: Any) -> "InputSecureFileLocation":
# No flags
id = Long.read(data)
access_hash = Long.read(data)
return InputSecureFileLocation(id=id, access_hash=access_hash)
def write(self) -> bytes:
data = BytesIO()
data.write(Int(self.ID, False))
# No flags
data.write(Long(self.id))
data.write(Long(self.access_hash))
return data.getvalue()
|
py | 7df9703e62e73095285d835812fb9b4f54675d35 | from frappe import permissions, _dict
from frappe.permissions import rights
def allow_everything():
'''
returns a dict with access to everything
eg. {"read": 1, "write": 1, ...}
'''
perm = _dict({ptype: 1 for ptype in rights})
return perm
permissions.allow_everything = allow_everything |
py | 7df9704b55382dcc2762d2efc3fb7c6fe84cb5e5 | from __future__ import print_function
import math
import Queue
import sys
import random
def make(filename):
""" build a graph """
fin = open(filename, "r")
linenum = 0
verts = 0
edges = 0
graph = None
no_weight = False
for line in fin:
# print ("line " + line)
values = line.split("\t")
# print ("values " + str(values))
# strip the wack n if present
try:
for i in values:
# print ("i [" + i + "]")
i = float(str(i).strip("\n"))
except Exception as ex:
print ("\nError parsing the file. This is probably from using spaces instead of tabs.")
print("Exiting...\n")
# print(ex)
raise ex
# if first get graph verts n edges
if linenum == 0:
verts = int(values[0])
edges = int(values[1])
graph = Graph(int(verts), int(edges))
else: # else connect the verts
node1 = int(values[0])
node2 = int(values[1])
graph.connect(node1, node2)
if len(values) == 3:
weight = float(values[2])
graph.add_cost(node1, node2, weight)
else:
no_weight = True
linenum += 1
if no_weight:
print ("\nThe file you passed does not contain measures for weighted edges.")
print ("Please make sure this is correct.\n")
fin.close()
return graph
class GraphException(Exception):
""" make a graph ex """
def __str__(self):
return repr(self.message)
### not used, just messing with python overloading
class Matrix:
""" make a matrix """
def __init__(self, r, c):
self.rows = r
self.cols = c
self.data = [[0 in range(self.cols)] in range(self.rows)]
def __getitem__(self, key):
print ("key: " + str(key))
return self.data[key]
def __setitem__(self, key, value):
print ("set key: " + str(key) + " val: " + str(value))
self.data[key] = value
def output(self):
for i in range(self.rows):
row = ""
for j in range(self.cols):
row += (str(self.data[i][j]) + " ")
print (row + "\n")
def set(self, a, b, val):
self.data[a][b] = val
def fill(self, value):
for i in range(self.rows):
for j in range(self.cols):
self.set(i, j, value)
class Graph:
def __init__(self, vs, es, hasWeights=False):
self.verts = vs
self.edges = es
self.data = [[0 for x in range(self.verts)] for y in range(self.verts)]
self.isWeighted = bool(hasWeights)
# init all weights to "infinity"
self.weights = [[sys.maxint for x in range(self.verts)] for y in range(self.verts)]
def __getitem__(self, key):
return self.data[key]
def output(self):
for i in range(self.verts):
row = ""
for j in range(self.verts):
row += (str(self.data[i][j]) + " ")
print( row + "\n")
def add_cost(self, a, b, weight):
self.weights[a][b] = float(weight)
self.weights[b][a] = float(weight)
def connect(self, a, b, weight=None):
self.data[a][b] = 1
self.data[b][a] = 1
if weight != None:
add_cost(a, b, weight)
def remove(self, a, b):
self.data[a][b] = 0
self.data[b][a] = 0
def density(self):
if self.edges == 0 and self.verts == 0:
return 0
else:
top = 2 * float(self.edges)
bottom = float(self.verts) * float(self.verts - 1)
return round((top/bottom), 5)
def edge_cost(self, a, b):
return self.weights[a][b]
# run a bfs
def bfs(self, start):
visited = list()
queue = Queue.Queue()
queue.put(start)
while not queue.empty():
vert = queue.get()
if vert not in visited:
visited.append(vert)
for index in range(0, len(self.data[vert])):
if self.data[vert][index] == 1:
queue.put(index)
return visited
# run a dfs
def dfs(self, start):
visited = list()
stack = list()
stack.append(start)
while len(stack):
vert = stack.pop()
if vert not in visited:
visited.append(vert)
for index in range(0, len(self.data[vert])):
if self.data[vert][index] == 1:
stack.append(index)
return visited
def dij_path(self, start, end):
if end >= self.verts:
raise GraphException("Cannot find a vertex that is not in the graph")
visited = list()
dists = [sys.maxint for x in range(self.verts)]
dists[start] = 0
search = self.dfs(start)
path = list()
queue = Queue.Queue()
queue.put(start)
while not queue.empty():
vert = queue.get()
if vert not in visited:
visited.append(vert)
for index in range(0, len(self.data[vert])):
if self.data[vert][index] == 1:
queue.put(index)
if (dists[vert] + self.weights[vert][index]) < dists[index]:
# print("its less")
dists[index] = dists[vert] + self.weights[vert][index]
if dists[vert] == sys.maxint:
# print("inf, setting to", self.weights[vert][index])
dists[index] = self.weights[vert][index]
# path.append(vert)
for i in search:
path.append(i)
if i == end:
break
return {"distance": dists[end], "path": path}
def comps(self):
ret = set()
seen = set()
while ( len(seen) != len(self.data) ):
for index in range(0, len(self.data[0])):
if index not in seen:
conns = frozenset(self.dfs(index))
seen = seen | conns ## union the sets
ret.add(conns)
return ret
def degree(self, switch):
target = 0
if (switch == "min"):
target = self.verts - 1
if ( target < 0 ):
target = 0
for i in range(self.verts):
tmp = 0
for j in range(self.verts):
tmp += self.data[i][j]
if (switch == "max"):
if (tmp > target):
target = tmp
elif(switch == "min"):
if ( tmp < target):
target = tmp
else:
print (GraphException("Invalid switch passed to degree."))
return target
def order_verts(self, direction):
deg_bound = 0
degs = list()
for i in range( self.verts ):
deg = 0
for j in range(self.verts):
if self.data[i][j] == 1:
deg += 1
degs.append([i,deg])
if direction == "max":
degs = sorted(degs, key=lambda tup: tup[1])
degs.reverse()
elif direction == "min":
degs = sorted(degs, key=lambda tup: tup[1])
elif direction == "random":
degs = random.sample(degs, len(degs))
else:
raise GraphException("Invalid direction passed to order_verts: " + direction)
# pluck out the vert numbers and drop the deg used to order
degs = [i for [i,j] in degs]
return degs
def color(self, direction):
vert_set = None
try:
vert_set = self.order_verts(direction=direction)
except GraphException as ex:
print ("Cannot continue, invalid direction given")
raise ex
except Exception as generalEx:
raise GraphException(generalEx)
colors = set()
current_color = 1
colored = dict() ## dict[vert]: color
colors.add(0)
try:
for vert in vert_set:
valid_colors = set()
valid_colors = valid_colors | colors # make all colors initially valid
if vert not in colored:
for i in range(self.verts):
if self.data[vert][i] == 0:
continue
neighbor = i
if neighbor in colored.keys():
try:
# print "neighbor color:", colored[neighbor], "valid color:", colored[neighbor] in valid_colors
if colored[neighbor] in valid_colors:
# remove the neighbor color from valid list
valid_colors.remove(colored[neighbor])
except Exception as ex:
print ("neighbor check error for", neighbor)
raise ex
try:
if len(valid_colors) == 0:
colors.add(current_color)
colored[vert] = current_color
current_color += 1
else:
colored[vert] = min(valid_colors)
except Exception as ex:
print("assign error")
raise ex
else:
print ("vert", vert, "already colored")
# print colored
# print "took", len(colors), "different colors"
return { "number": len(colors), "colors": colors }
except Exception as ex:
raise ex
|
py | 7df97138967d3b73eae75b3e6cfa08f35d7de04f | """ACIL_GetImage is a module developed for the internal use of the Applied Chest Imaging Laboratory to download
cases stored in MAD server via ssh.
It works both in Unix/Mac/Windows, and it uses an internal SSH key created specifically for this purpose, so it
doesn't need that the user has an authorized SSH key installed.
First version: Jorge Onieva (ACIL, [email protected]). Sept 2014"""
import os, sys
from __main__ import vtk, qt, ctk, slicer
from collections import OrderedDict
import subprocess
# Add the CIP common library to the path if it has not been loaded yet
try:
from CIP.logic.SlicerUtil import SlicerUtil
except Exception as ex:
currentpath = os.path.dirname(os.path.realpath(__file__))
# We assume that CIP_Common is in the development structure
path = os.path.normpath(currentpath + '/../../Scripted/CIP_Common')
if not os.path.exists(path):
# We assume that CIP is a subfolder (Slicer behaviour)
path = os.path.normpath(currentpath + '/CIP')
sys.path.append(path)
print(("The following path was manually added to the PythonPath in CIP_GetImage: " + path))
from CIP.logic.SlicerUtil import SlicerUtil
from CIP.logic import Util
import CIP.ui as CIPUI
class CIP_GetImage:
"""Load cases from a SSH server or other device"""
def __init__(self, parent):
"""Constructor for main class"""
self.parent = parent
#ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "CIP GetImage"
self.parent.categories = ["Chest Imaging Platform.Modules"]
self.parent.dependencies = []
self.parent.contributors = ["Jorge Onieva", "Applied Chest Imaging Laboratory", "Brigham and Women's Hospital"]
self.parent.helpText = "This is an internal module to load images from MAD repository via SSH"
self.parent.acknowledgementText = SlicerUtil.ACIL_AcknowledgementText
class CIP_GetImageWidget:
"""Visual object"""
# Study ids. Convention: Descriptive text (key) / Name of the folder in the server
studyIds = OrderedDict()
studyIds["Study 1"] = "Study1"
studyIds["Study 2"] = "Study2"
studyIds["Other"] = "Other"
# Image types. You can add as many as different volume types you have
# Convention:
# Descriptive text (key)
# Files extension (example: "processed").
imageTypes = OrderedDict()
imageTypes["CT"] = "" # Default. No extension
imageTypes["CT Processed"] = "processed" # Default. No extension
# Label maps types. Idem
# Convention:
# Descriptive text (key)
# Checked by default
# Files extension (example: case_partialLungLabelMap.nrrd)
labelMapTypes = OrderedDict()
labelMapTypes["Partial Lung"] = (False, "_partialLungLabelMap")
labelMapTypes["Body Composition"] = (False, "_bodyComposition")
labelMapTypes["Body Composition (interactive)"] = (False, "_interactiveBodyComposition")
def __init__(self, parent = None):
"""Widget constructor (existing module)"""
if not parent:
self.parent = slicer.qMRMLWidget()
self.parent.setLayout(qt.QVBoxLayout())
self.parent.setMRMLScene(slicer.mrmlScene)
else:
self.parent = parent
self.layout = self.parent.layout()
if not parent:
self.setup()
self.parent.show()
def setup(self):
"""Init the widget """
self.modulePath = SlicerUtil.getModuleFolder("CIP_GetImage")
self.resourcesPath = os.path.join(self.modulePath, "CIP_GetImage_Resources")
self.StudyId = ""
self.logic = CIP_GetImageLogic(self.modulePath)
# Widget to load cases faster
self.loadSaveDatabuttonsWidget = CIPUI.LoadSaveDataWidget(parentWidget=self.parent)
self.loadSaveDatabuttonsWidget.setup(moduleName="CIP_GetImage")
#
# Obligatory parameters area
#
parametersCollapsibleButton = ctk.ctkCollapsibleButton()
parametersCollapsibleButton.text = "Image data"
self.layout.addWidget(parametersCollapsibleButton)
parametersFormLayout = qt.QFormLayout(parametersCollapsibleButton)
# Study radio buttons
label = qt.QLabel()
label.text = "Select the study:"
parametersFormLayout.addRow(label)
self.rbgStudy=qt.QButtonGroup()
for key in self.studyIds:
rbStudyid = qt.QRadioButton(key)
self.rbgStudy.addButton(rbStudyid)
parametersFormLayout.addWidget(rbStudyid)
self.txtOtherStudy = qt.QLineEdit()
self.txtOtherStudy.hide()
parametersFormLayout.addWidget(self.txtOtherStudy)
# Case id
self.txtCaseId = qt.QLineEdit()
parametersFormLayout.addRow("Case ID ", self.txtCaseId)
# Image types
label = qt.QLabel()
label.text = "Select the images that you want to load:"
parametersFormLayout.addRow(label)
self.cbsImageTypes = []
for key in self.imageTypes:
check = qt.QCheckBox()
check.checked = True
check.setText(key)
parametersFormLayout.addWidget(check)
self.cbsImageTypes.append(check)
# Label maps
label = qt.QLabel()
label.text = "Select the label maps that you want to load:"
parametersFormLayout.addRow(label)
# Labelmap types checkboxes
self.cbsLabelMapTypes = []
for key in self.labelMapTypes:
check = qt.QCheckBox()
check.setText(key)
check.checked = self.labelMapTypes[key][0]
parametersFormLayout.addWidget(check)
self.cbsLabelMapTypes.append(check)
# Load image Button
self.downloadButton = qt.QPushButton("Download")
self.downloadButton.toolTip = "Load the image"
#self.downloadButton.enabled = False
self.downloadButton.setStyleSheet("background-color: green; font-weight:bold; color:white" )
parametersFormLayout.addRow(self.downloadButton)
self.downloadButton.connect('clicked (bool)', self.onDownloadButton)
# Information message
self.lblDownloading = qt.QLabel()
self.lblDownloading.text = "Downloading images. Please wait..."
self.lblDownloading.hide()
parametersFormLayout.addRow(self.lblDownloading)
#
# Optional Parameters
#
optionalParametersCollapsibleButton = ctk.ctkCollapsibleButton()
optionalParametersCollapsibleButton.text = "Optional parameters"
self.layout.addWidget(optionalParametersCollapsibleButton)
optionalParametersFormLayout = qt.QFormLayout(optionalParametersCollapsibleButton)
# Local storage (Slicer temporary path)
self.localStoragePath = "{0}/CIP".format(slicer.app.temporaryPath)
if not os.path.exists(self.localStoragePath):
os.makedirs(self.localStoragePath)
# Make sure that everybody has write permissions (sometimes there are problems because of umask)
os.chmod(self.localStoragePath, 0o777)
self.storagePathButton = ctk.ctkDirectoryButton()
self.storagePathButton.directory = self.localStoragePath
optionalParametersFormLayout.addRow("Local directory: ", self.storagePathButton)
# Connection type (SSH, "normal")
label = qt.QLabel()
label.text = "Connection type:"
optionalParametersFormLayout.addRow(label)
self.rbgConnectionType=qt.QButtonGroup()
self.rbSSH = qt.QRadioButton("SSH (secure connection)")
self.rbSSH.setChecked(True)
self.rbgConnectionType.addButton(self.rbSSH)
optionalParametersFormLayout.addWidget(self.rbSSH)
self.rbCP = qt.QRadioButton("Common")
self.rbgConnectionType.addButton(self.rbCP)
optionalParametersFormLayout.addWidget(self.rbCP)
# SSH Server login
self.txtServer = qt.QLineEdit()
s = SlicerUtil.settingGetOrSetDefault("CIP_GetImage", "server", "This is your ssh user and server. Example: [email protected]")
self.txtServer.text = s # This is your ssh user and server. Example: [email protected]"
optionalParametersFormLayout.addRow("Server:", self.txtServer)
# Server root path
self.txtServerpath = qt.QLineEdit()
s = SlicerUtil.settingGetOrSetDefault("CIP_GetImage", "serverRootPath", "This is your root path to search for files. Ex: /Cases/Processed")
self.txtServerpath.text = s # This is your root path to search for files. Ex: /Cases/Processed
optionalParametersFormLayout.addRow("Server root path:", self.txtServerpath)
# SSH Private key
self.txtPrivateKeySSH = qt.QLineEdit()
s = SlicerUtil.settingGetOrSetDefault("CIP_GetImage", "sshKey", "")
self.txtPrivateKeySSH.text = s # this is the full path to your ssh key if you need it. Be aware of Unix/Windows comaptibility (hint: use os.path.join)
# Please notice that you won't need a SSH key if your computer already has one locally installed"
optionalParametersFormLayout.addRow("SSH private key (leave blank for computer's default): ", self.txtPrivateKeySSH)
# Cache mode
self.cbCacheMode = qt.QCheckBox("Cache mode activated")
self.cbCacheMode.setChecked(True) # Cache mode is activated by default
optionalParametersFormLayout.addRow("", self.cbCacheMode)
# Clean cache Button
self.cleanCacheButton = qt.QPushButton("Clean cache")
self.cleanCacheButton.toolTip = "Remove all the local cached files"
optionalParametersFormLayout.addRow(self.cleanCacheButton)
optionalParametersCollapsibleButton.collapsed = True
if SlicerUtil.IsDevelopment:
# reload button
self.reloadButton = qt.QPushButton("Reload (just development)")
self.reloadButton.toolTip = "Reload this module (for development purposes)."
self.reloadButton.name = "Reload"
self.layout.addWidget(self.reloadButton)
self.reloadButton.connect('clicked()', self.onReload)
# Add vertical spacer
self.layout.addStretch(1)
# Connections
self.rbgStudy.connect("buttonClicked (QAbstractButton*)", self.onRbStudyClicked)
self.txtOtherStudy.connect("textEdited (QString)", self.onTxtOtherStudyEdited)
self.rbgConnectionType.connect("buttonClicked (QAbstractButton*)", self.onRbgConnectionType)
self.storagePathButton.connect("directorySelected(QString)", self.onTmpDirChanged)
self.cleanCacheButton.connect('clicked (bool)', self.onCleanCacheButtonClicked)
def saveSettings(self):
"""Save the current values in settings to reuse it in future sessions"""
SlicerUtil.setSetting("CIP_GetImage", "sshKey", self.txtPrivateKeySSH.text)
SlicerUtil.setSetting("CIP_GetImage", "server", self.txtServer.text)
SlicerUtil.setSetting("CIP_GetImage", "serverRootPath", self.txtServerpath.text)
def cleanup(self):
self.saveSettings()
#
# Events handling
#
def onDownloadButton(self):
"""Click in download button"""
# Check if there is a Study and Case introduced
self.CaseId = self.txtCaseId.text.strip()
if self.CaseId and self.StudyId:
self.lblDownloading.show()
slicer.app.processEvents()
# Get the selected image types and label maps
imageTypes = [self.imageTypes[cb.text] for cb in [check for check in self.cbsImageTypes if check.isChecked()]]
labelMapExtensions = [self.labelMapTypes[cb.text] for cb in [check for check in self.cbsLabelMapTypes if check.isChecked()]]
result = self.logic.loadCase(self.txtServer.text, self.txtServerpath.text, self.StudyId, self.txtCaseId.text, imageTypes, labelMapExtensions, self.localStoragePath, self.cbCacheMode.checkState(), self.rbSSH.isChecked(), self.txtPrivateKeySSH.text)
self.lblDownloading.hide()
if (result == Util.ERROR):
self.msgBox = qt.QMessageBox(qt.QMessageBox.Warning, 'Error', "There was an error when downloading some of the images of this case. It is possible that some of the selected images where not available in the server. Please review the log console for more details.\nSuggested actions:\n-Empty cache\n-Restart Slicer")
self.msgBox.show()
else:
# Show info messsage
self.msgBox = qt.QMessageBox(qt.QMessageBox.Information, 'Attention', "Please make sure that you have selected a study and a case")
self.msgBox.show()
def onRbStudyClicked(self, button):
"""Study radio buttons clicked (any of them)"""
self.StudyId = self.studyIds[button.text]
self.txtOtherStudy.visible = (button.text == "Other")
if (self.txtOtherStudy.visible):
self.StudyId = self.txtOtherStudy.text.strip()
#self.checkDownloadButtonEnabled()
def onRbgConnectionType(self, button):
self.txtServer.enabled = self.txtPrivateKeySSH.enabled = self.rbSSH.isChecked()
#self.txtPrivateKeySSH.enabled = self.rbSSH.checked
def onTxtOtherStudyEdited(self, text):
"""Any letter typed in "Other study" text box """
self.StudyId = text
#self.checkDownloadButtonEnabled()
def onCleanCacheButtonClicked(self):
"""Clean cache button clicked. Remove all the files in the current local storage path directory"""
import shutil
# Remove directory
shutil.rmtree(self.localStoragePath, ignore_errors=True)
# Recreate it (this is a safe method for symbolic links)
os.makedirs(self.localStoragePath)
# Make sure that everybody has write permissions (sometimes there are problems because of umask)
os.chmod(self.localStoragePath, 0o777)
def onTmpDirChanged(self, d):
print(("Temp dir changed. New dir: " + d))
self.localStoragePath = d
def onReload(self, moduleName="CIP_GetImage"):
"""Reload the module. Just for development purposes. This is a combination of the old and new style in modules writing"""
try:
slicer.util.reloadScriptedModule(moduleName)
except:
#Generic reload method for any scripted module.
#ModuleWizard will subsitute correct default moduleName.
import imp, sys
widgetName = moduleName + "Widget"
# reload the source code
# - set source file path
# - load the module to the global space
filePath = eval('slicer.modules.%s.path' % moduleName.lower())
p = os.path.dirname(filePath)
if not sys.path.__contains__(p):
sys.path.insert(0,p)
fp = open(filePath, "r")
globals()[moduleName] = imp.load_module(
moduleName, fp, filePath, ('.py', 'r', imp.PY_SOURCE))
fp.close()
# rebuild the widget
# - find and hide the existing widget
# - create a new widget in the existing parent
# parent = slicer.util.findChildren(name='%s Reload' % moduleName)[0].parent()
parent = self.parent
for child in parent.children():
try:
child.hide()
except AttributeError:
pass
globals()[widgetName.lower()] = eval(
'globals()["%s"].%s(parent)' % (moduleName, widgetName))
globals()[widgetName.lower()].setup()
#
# CIP_GetImageLogic
# This class makes all the operations not related with the user interface (download and handle volumes, etc.)
#
class CIP_GetImageLogic:
def __init__(self, modulePath):
"""Constructor. Adapt the module full path to windows convention when necessary"""
#ScriptedLoadableModuleLogic.__init__(self)
self.modulePath = modulePath
def loadCase(self, server, serverPath, studyId, caseId, imageTypesExtensions, labelMapExtensions, localStoragePath, cacheOn, sshMode, privateKeySSH):
"""Load all the asked images for a case: main images and label maps.
Arguments:
- server -- User and name of the host. Default: [email protected]
- serverPath -- Root path for all the cases. Default: /mad/store-replicated/clients/copd/Processed
- studyId -- Code of the study. Ex: COPDGene
- caseId -- Case id (NOT patient! It will be extracted from here). Example: 12257B_INSP_STD_UIA_COPD
- imageTypesExtensions -- Extensions of the images that must be appended before 'nrrd' in the filename. Default is blank
- labelMapExtensions -- Extensions that must be appended to the file name to find the labelmap. Ex: _partialLungLabelMap
- localStoragePath -- Local folder where all the images will be downloaded
- cacheOn -- When True, the images are not downloaded if they already exist in local
- privateKeySSH -- Full path to the file that contains the private key used to connect with SSH to the server
Returns OK or ERROR
"""
try:
# Extract Patient Id
patientId = caseId.split('_')[0]
for ext in imageTypesExtensions:
locPath = self.downloadNrrdFile(server, serverPath, studyId, patientId, caseId, ext, localStoragePath, cacheOn, sshMode, privateKeySSH)
if (SlicerUtil.IsDevelopment): print("Loading volume stored in " + locPath)
slicer.util.loadVolume(locPath)
for ext in labelMapExtensions:
locPath = self.downloadNrrdFile(server, serverPath, studyId, patientId, caseId, ext[1], localStoragePath, cacheOn, sshMode, privateKeySSH)
if (SlicerUtil.IsDevelopment): print("Loading label map stored in " + locPath)
(code, vtkLabelmapVolumeNode) = slicer.util.loadLabelVolume(locPath, {}, returnNode=True) # Braces are needed for Windows compatibility... No comments...
return Util.OK
except Exception as exception:
print(exception)
return Util.ERROR
def mustSplit(self, labelMapStructure):
return labelMapStructure[3] is not None
def downloadNrrdFile(self, server, serverPath, studyId, patientId, caseId, ext, localStoragePath, cacheOn, sshMode=True, privateKeySSH=None):
"""Download Header and Raw data in a Nrrd file.
Returns the full local path for the nhrd file (header)
"""
localFile = "{0}/{1}{2}.nhdr".format(localStoragePath, caseId, ext)
# If cache mode is not activated or the file does not exist locally, proceed to download
if (not cacheOn or not os.path.isfile(localFile)):
error = False
try:
if os.path.isfile(localFile):
# Delete file previously to avoid confirmation messages
print("Remove cached files: " + localFile)
try:
os.clear(localFile)
os.clear("{0}/{1}{2}.raw.gz".format(localStoragePath, caseId, ext))
except:
print("Error when deleting local files ({0})".format(localFile))
# Make sure that the ssh key has not too many permissions if it is used (otherwise scp will return an error)
if privateKeySSH:
os.chmod(privateKeySSH, 0o600)
# Download header
if (os.sys.platform == "win32"):
localStoragePath = localStoragePath.replace('/', '\\') + '\\'
if sshMode:
if privateKeySSH:
privateKeyCommand = "-privatekey={0}".format(privateKeySSH)
else:
privateKeyCommand = ""
params = [("%s\\CIP_GetImage_Resources\\WinSCP.com" % self.modulePath) ,"/command", 'open {0} {1}'.format(server, privateKeyCommand), \
'get {0}/{1}/{2}/{3}/{3}{4}.nhdr {5}'.format(serverPath, studyId, patientId, caseId, ext, localStoragePath), "exit"]
else:
params = ['copy',"{0}\\{1}\\{2}\\{3}\\{3}{4}.nhdr".format(serverPath, studyId, patientId, caseId, ext), localStoragePath]
else:
# Unix
if sshMode:
keyCommand = ("-i %s " % privateKeySSH) if privateKeySSH else "" # Set a command if privateKeySsh has any value (non empty)
params = ['scp',"{0}{1}:{2}/{3}/{4}/{5}/{5}{6}.nhdr".format(keyCommand, server, serverPath, studyId, patientId, caseId, ext), localStoragePath]
else:
params = ['cp',"{0}/{1}/{2}/{3}/{3}{4}.nhdr".format(serverPath, studyId, patientId, caseId, ext), localStoragePath]
fullStrCommand = " ".join(params)
(result, output, error) = self.executeDownloadCommand(params)
if (result == Util.ERROR):
print("Error when executing download command. Params:")
print(params)
if (error == None):
error = "Unnknown error"
raise Exception(error)
# Download raw data (just update a parameter)
if (os.sys.platform == "win32"):
if sshMode: paramToModify = 3
else: paramToModify = 1
else:
# Unix
paramToModify = 1
# Replace the name of the parameter
params[paramToModify] = params[paramToModify].replace(".nhdr", ".raw.gz")
# Dowload the raw data
(result, output, error) = self.executeDownloadCommand(params)
if (result == Util.ERROR):
print ("Error when executing download command. Params:")
print (params)
if (error == None):
error = "Unnknown error"
raise Exception(error)
# If everything goes well, check the the path of the Nrrd file to verify that the file have been correctly downlaoded
missingFiles = ""
if not os.path.isfile(localFile):
missingFiles = missingFiles + localFile + ";"
if not os.path.isfile(localFile.replace(".nhdr", ".raw.gz")):
missingFiles = missingFiles + localFile.replace(".nhdr", ".raw.gz") + ";"
if missingFiles:
raise Exception("The download command did not return any error message, but the following files have not been downloaded: " + missingFiles)
except Exception as ex:
# There was en error in the preferred method. If we are in a Unix system, we will try the backup method
if os.sys.platform != "win32":
print(("There was an error when downloading some of the files: " + error))
print("Trying alternative method...")
self.executeDowloadCommand_Backup(fullStrCommand)
# If everything goes well, check the the path of the Nrrd file to verify that the file have been correctly downlaoded
missingFiles = ""
if not os.path.isfile(localFile): missingFiles = missingFiles + localFile + ";"
if not os.path.isfile(localFile.replace(".nhdr", ".raw.gz")): missingFiles = missingFiles + localFile.replace(".nhdr", ".raw.gz") + ";"
if missingFiles:
raise Exception("After a second attempt, the following files have not been downloaded: " + missingFiles)
print("Apparently it worked!")
else:
raise ex
else:
print("File {0} already cached".format(localFile))
# Return path to the Nrrd header file
return localFile
def executeDownloadCommand(self, params):
"""Execute a command to download fisically the file. It will be different depending on the current platform.
In Unix, we will use the "scp" command.
In Windows, we will use WinSCP tool (attached to the module in "Resources" folder)
It returns a tuple: OK/ERROR, StandardOutput, ErrorMessage"""
if SlicerUtil.IsDevelopment:
print ("Attempt to download with these params:")
print (params)
try:
out = err = None
if (os.sys.platform == "win32"):
# Hide console window
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
proc = subprocess.Popen(params, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo)
print ("Launch process")
# Launch the process
(out, err) = proc.communicate()
print("End of process")
else:
# Preferred method.
proc = subprocess.Popen(params, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Launch the process
(out, err) = proc.communicate()
if SlicerUtil.IsDevelopment:
print("Out: " + out)
print("Err:" + err)
if err:
print("Error returned by system process: " + err)
except Exception as ex:
print("FATAL ERROR IN COPY PROCESS:")
print(ex)
# Fatal error
return (Util.ERROR, out, err)
# In Unix sometimes if there is some error, stderr will contain some value
if err:
return (Util.ERROR, out, err) # ERROR!
## Everything ok
return (Util.OK, out, err)
def executeDowloadCommand_Backup(self, command):
"""Backup function that will be used when the preferred method fails"""
subprocess.check_call(command, shell=True)
subprocess.check_call(command.replace(".nhdr", ".raw.gz"), shell=True)
|
py | 7df971d39788d10a3158175dcd5bbf0b7c51c332 | # Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
from datadog_api_client.v1.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
)
def lazy_import():
from datadog_api_client.v1.model.synthetics_device_id import SyntheticsDeviceID
from datadog_api_client.v1.model.synthetics_test_options_monitor_options import SyntheticsTestOptionsMonitorOptions
from datadog_api_client.v1.model.synthetics_test_options_retry import SyntheticsTestOptionsRetry
globals()["SyntheticsDeviceID"] = SyntheticsDeviceID
globals()["SyntheticsTestOptionsMonitorOptions"] = SyntheticsTestOptionsMonitorOptions
globals()["SyntheticsTestOptionsRetry"] = SyntheticsTestOptionsRetry
class SyntheticsTestOptions(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {
("monitor_priority",): {
"inclusive_maximum": 5,
"inclusive_minimum": 1,
},
("tick_every",): {
"inclusive_maximum": 604800,
"inclusive_minimum": 30,
},
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
"accept_self_signed": (bool,), # noqa: E501
"allow_insecure": (bool,), # noqa: E501
"device_ids": ([SyntheticsDeviceID],), # noqa: E501
"disable_cors": (bool,), # noqa: E501
"follow_redirects": (bool,), # noqa: E501
"min_failure_duration": (int,), # noqa: E501
"min_location_failed": (int,), # noqa: E501
"monitor_name": (str,), # noqa: E501
"monitor_options": (SyntheticsTestOptionsMonitorOptions,), # noqa: E501
"monitor_priority": (int,), # noqa: E501
"no_screenshot": (bool,), # noqa: E501
"retry": (SyntheticsTestOptionsRetry,), # noqa: E501
"tick_every": (int,), # noqa: E501
}
discriminator = None
attribute_map = {
"accept_self_signed": "accept_self_signed", # noqa: E501
"allow_insecure": "allow_insecure", # noqa: E501
"device_ids": "device_ids", # noqa: E501
"disable_cors": "disableCors", # noqa: E501
"follow_redirects": "follow_redirects", # noqa: E501
"min_failure_duration": "min_failure_duration", # noqa: E501
"min_location_failed": "min_location_failed", # noqa: E501
"monitor_name": "monitor_name", # noqa: E501
"monitor_options": "monitor_options", # noqa: E501
"monitor_priority": "monitor_priority", # noqa: E501
"no_screenshot": "noScreenshot", # noqa: E501
"retry": "retry", # noqa: E501
"tick_every": "tick_every", # noqa: E501
}
read_only_vars = {}
_composed_schemas = {}
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SyntheticsTestOptions - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
accept_self_signed (bool): For SSL test, whether or not the test should allow self signed certificates.. [optional] # noqa: E501
allow_insecure (bool): Allows loading insecure content for an HTTP request.. [optional] # noqa: E501
device_ids ([SyntheticsDeviceID]): For browser test, array with the different device IDs used to run the test.. [optional] # noqa: E501
disable_cors (bool): Whether or not to disable CORS mechanism.. [optional] # noqa: E501
follow_redirects (bool): For API HTTP test, whether or not the test should follow redirects.. [optional] # noqa: E501
min_failure_duration (int): Minimum amount of time in failure required to trigger an alert.. [optional] # noqa: E501
min_location_failed (int): Minimum number of locations in failure required to trigger an alert.. [optional] # noqa: E501
monitor_name (str): The monitor name is used for the alert title as well as for all monitor dashboard widgets and SLOs.. [optional] # noqa: E501
monitor_options (SyntheticsTestOptionsMonitorOptions): [optional] # noqa: E501
monitor_priority (int): Integer from 1 (high) to 5 (low) indicating alert severity.. [optional] # noqa: E501
no_screenshot (bool): Prevents saving screenshots of the steps.. [optional] # noqa: E501
retry (SyntheticsTestOptionsRetry): [optional] # noqa: E501
tick_every (int): The frequency at which to run the Synthetic test (in seconds).. [optional] # noqa: E501
"""
super().__init__(kwargs)
self._check_pos_args(args)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""Helper creating a new instance from a response."""
self = super(SyntheticsTestOptions, cls)._from_openapi_data(kwargs)
self._check_pos_args(args)
return self
|
py | 7df971fd0299625e8fec90dc1f5dfa5f288d4a8f | import pytest
from thedarn.rules.az_cli import match, get_new_command
from thedarn.types import Command
no_suggestions = '''\
az provider: error: the following arguments are required: _subcommand
usage: az provider [-h] {list,show,register,unregister,operation} ...
'''
misspelled_command = '''\
az: 'providers' is not in the 'az' command group. See 'az --help'.
The most similar choice to 'providers' is:
provider
'''
misspelled_subcommand = '''\
az provider: 'lis' is not in the 'az provider' command group. See 'az provider --help'.
The most similar choice to 'lis' is:
list
'''
@pytest.mark.parametrize('command', [
Command('az providers', misspelled_command),
Command('az provider lis', misspelled_subcommand)])
def test_match(command):
assert match(command)
def test_not_match():
assert not match(Command('az provider', no_suggestions))
@pytest.mark.parametrize('command, result', [
(Command('az providers list', misspelled_command), ['az provider list']),
(Command('az provider lis', misspelled_subcommand), ['az provider list'])
])
def test_get_new_command(command, result):
assert get_new_command(command) == result
|
py | 7df972151c7ca3053e4b8ab364ad88f4fa6bce74 | """Provides device conditions for switches."""
from typing import List
import voluptuous as vol
from homeassistant.core import HomeAssistant
from homeassistant.components.device_automation import toggle_entity
from homeassistant.const import CONF_DOMAIN
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.condition import ConditionCheckerType
from . import DOMAIN
CONDITION_SCHEMA = toggle_entity.CONDITION_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN}
)
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
if config_validation:
config = CONDITION_SCHEMA(config)
return toggle_entity.async_condition_from_config(config)
async def async_get_conditions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device conditions."""
return await toggle_entity.async_get_conditions(hass, device_id, DOMAIN)
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
return await toggle_entity.async_get_condition_capabilities(hass, config)
|
py | 7df9729662d91f5acb3afd329f3f7b1ba5955721 | def countto0(x):
while x > 0:
print x
x -= 1
while x < 0:
print x
x += 1
def gettheno():
x = int(raw_input("From what number do you want to count to zero?"))
countto0(x)
#gettheno()
#-------------------------------------------------------------------------------
def countfromxtoy(x, y):
while x >= y:
print x
x -= 1
while x <= y:
print x
x += 1
#countfromxtoy(20, 10)
#-------------------------------------------------------------------------------
def addoddnoto0(x):
sumofx = 0
if x > 0:
while x > 0:
if x % 2 != 0:
sumofx += x
x -= 1
elif x < 0:
while x < 0:
if x % 2 != 0:
sumofx += 1
x += 1
print sumofx
#addoddnoto0(-20)
#-------------------------------------------------------------------------------
def dots(w, h):
out = ""
x = 0
while x < h:
out += "."
x += 1
return out
print dots(15, 10)
|
py | 7df9734914867bec41247a1c0cb0183fc9e0d1f9 | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7ka8jr8^opxz2qjth27e)@vm9rnx1khw3fo-%)uargq_yz1vx_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/London' # 'UTC', 'Europe/London'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
|
py | 7df97383da1fb5859030c8afb292f472a23e2dc8 | _base_ = "./FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_Pbr_01_02MasterChefCan_bop_test.py"
OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV2_Flat_ycbvPbr_SO/09_10PottedMeatCan"
DATASETS = dict(TRAIN=("ycbv_010_potted_meat_can_train_pbr",))
|
py | 7df974397afcb7ed396cc963804e34117893297d | # !/usr/bin/python2
""" DEVICE CONFIGURATION FILES """
__all__ = ['options_snmp', 'options_config', 'options_database', 'options_threads']
# Import required python libraries
import os, sys
import time
from shutil import copyfile
from subprocess import Popen, PIPE
from stat import ST_MTIME
from utils import eprint, sprint, str_to_bool, ConfigFileError, DiffProcedureError
compare = str_to_bool(os.getenv('CONFIG_COMPARE'))
""" generate diff file for configurations in one folder """
no_duplicates = str_to_bool(os.getenv('CONFIG_NODUP'))
""" delete new configurations without changes """
folder = os.getenv('CONFIG_FOLDER')
""" folder to store device configurations in 'format' macro, fields syslog, l0ip """
file = os.getenv('CONFIG_FILE')
""" device configuration names in 'format' macro, fields syslog, l0ip, n """
diff = os.getenv('CONFIG_DIFF')
""" device configuration different file name in 'format' macro, fields syslog, l0ip """
_diff_cmp = ['/usr/bin/diff', '-qs']
_diff_out = ['/usr/bin/diff', os.getenv('CONFIG_DIFF_OPT')]
def do_save(src, msg, opt):
l0ip = get_l0ip(msg['SOURCEIP'])
dst_folder = get_config_folder(msg, l0ip=l0ip)
if not os.path.exists(dst_folder):
os.makedirs(dst_folder)
os.chmod(dst_folder, 0o775)
os.chown(dst_folder, -1, 4)
n = 0
dst = get_config_path(msg, config_folder=dst_folder, l0ip=l0ip)
if no_duplicates:
file_first, file_last = get_first_last(dst_folder)
if file_first is not None:
if file_last is None:
file_test = file_first
else:
file_test = file_last
if same(file_test, src, opt):
sprint("backup", msg['SOURCEIP'], "complete, configs are same")
return False
sprint("backup", msg['SOURCEIP'], 'complete', dst)
copyfile(src, dst)
os.chmod(dst, 0o664)
os.chown(dst, -1, 4)
return True
def do_compare(msg, opt=[]):
l0ip = get_l0ip(msg['SOURCEIP'])
diff_folder = folder.format(syslog=msg, l0ip=l0ip)
diff_path = get_diff_path(msg, config_folder=diff_folder, l0ip=l0ip)
file_first, file_last = get_first_last(diff_folder)
if (file_last is None):
return False
elif same(file_first, file_last, opt):
if os.path.exists(diff_path):
os.remove(diff_path)
return False
diff_cmd = _diff_out + opt + [file_first, file_last]
sprint("Generate difference", diff_path)
with open(diff_path, 'w') as df:
diff = Popen(diff_cmd, stdout = df)
diff.communicate()
if diff.returncode > 1:
raise DiffProcedureError(' '.join(diff_cmd))
return True
os.chmod(diff_path, 0o664)
os.chown(diff_path, -1, 4)
def get_first_last(folder):
files = os.listdir(folder)
if len(files) == 0:
return None, None
elif len(files) == 1:
return os.path.join(folder, files[0]), None
file_first = file_last = os.path.join(folder, files[0])
time_first = time_last = os.stat(file_first)[ST_MTIME]
paths = (os.path.join(folder, fn) for fn in files)
items = ((os.stat(path)[ST_MTIME], path) for path in paths)
for st, fp in items:
if st < time_first:
time_first = st
file_first = fp
elif st > time_last:
time_last = st
file_last = fp
return file_first, file_last
def same(first, second, opt=[]):
diff_cmd = _diff_cmp + opt + [first, second]
diff = Popen(diff_cmd, stdout=PIPE)
diff.communicate()
if diff.returncode < 2:
return diff.returncode == 0
raise DiffProcedureError(' '.join(diff_cmd))
def get_l0ip(ip):
return "{0[0]:0>3}.{0[1]:0>3}.{0[2]:0>3}.{0[3]:0>3}".format(ip.split('.'))
def get_config_folder(msg, l0ip=None):
if l0ip is None:
l0ip = get_l0ip(msg['SOURCEIP'])
return folder.format(syslog=msg, l0ip=l0ip)
def get_config_path(msg, config_folder=None, l0ip=None):
if l0ip is None:
l0ip = get_l0ip(msg['SOURCEIP'])
if config_folder is None:
config_folder = get_config_folder(msg, l0ip=l0ip)
n = 0
config_path = os.path.join(config_folder, file.format(syslog=msg, l0ip=l0ip, n=n))
start = time.time()
while os.path.exists(config_path):
n +=1
if n == sys.maxsize:
raise ConfigFileError("n value too mush '{}'".format(n))
sec = time.time() - start
if sec > 10:
raise ConfigFileError("file name generation too long {} sec".format(sec))
config_path = os.path.join(config_folder, file.format(syslog=msg, l0ip=l0ip, n=n))
return config_path
def get_diff_path(msg, config_folder=None, l0ip=None):
if l0ip is None:
l0ip = get_l0ip(msg['SOURCEIP'])
if config_folder is None:
config_folder = get_config_folder(msg, l0ip=l0ip)
return os.path.join(config_folder, diff.format(syslog=msg, l0ip=l0ip))
|
py | 7df974afb018223df227840423fa4868d5e1a8e3 | import unittest
from dinosauron import dinosauron
from unittest.mock import create_autospec
class TestDinosauron(unittest.TestCase):
def setUp(self):
self.d = dinosauron.Dinosauron()
def test_dig_mdns(self):
mock = create_autospec(self.d.dig_mdns, return_value='')
args = '127.0.0.1'
mock(args)
mock.assert_called_once_with(args)
|
py | 7df975853d2ec7d43eb721a07fe8194c18d98f42 | from planer import tile, mapcoord
import planer as rt
import numpy as np
import scipy.ndimage as ndimg
root = '/'.join(__file__.split('\\')[:-1])+'/models'
def load(lang='ch'):
with open(root+('/ch', '/en')[lang=='en']+'_dict.txt', encoding='utf-8') as f:
globals()['lab_dic'] = np.array(f.read().split('\n') + [' '])
globals()['det_net'] = rt.InferenceSession(root+'/ppocr_mobilev2_det_%s.onnx'%lang)
globals()['rec_net'] = rt.InferenceSession(root+'/ppocr_mobilev2_rec_%s.onnx'%lang)
globals()['cls_net'] = rt.InferenceSession(root+'/ppocr_mobilev2_cls_all.onnx')
# get mask
@tile(glob=32)
def get_mask(img):
img = img[:,:,:3].astype('float32')/255
offset = [0.485, 0.456, 0.406]
offset = np.array(offset, dtype=np.float32)
img = img - offset[None,None,:]
img /= np.array([0.229, 0.224, 0.225])
img = img.transpose(2,0,1)[None,:]
return det_net.run(None, {'x':img})[0][0,0]
# find boxes from mask, and filter the bad boxes
def db_box(hot, thr=0.3, boxthr=0.7, sizethr=5, ratio=2):
lab, n = ndimg.label(hot > thr)
idx = np.arange(n) + 1
level = ndimg.mean(hot, lab, idx)
objs = ndimg.find_objects(lab, n)
boxes = []
for i, l, sli in zip(idx, level, objs):
if l < boxthr: continue
rcs = np.array(np.where(lab[sli]==i)).T
if rcs.shape[0] < sizethr**2: continue
o = rcs.mean(axis=0); rcs = rcs - o
vs, ds = np.linalg.eig(np.cov(rcs.T))
if vs[0]>vs[1]:
vs, ds = vs[[1,0]], ds[:,[1,0]]
if ds[0,1]<0: ds[:,1] *= -1
if np.cross(ds[:,0], ds[:,1])>0:
ds[:,0] *= -1
mar = vs.min() ** 0.5 * ratio * 2
rcs = np.linalg.inv(ds) @ rcs.T
minr, minc = rcs.min(axis=1) - mar
maxr, maxc = rcs.max(axis=1) + mar
if rcs.ptp(axis=1).min()<sizethr: continue
rs = [minr,minc,minr,maxc,
maxr,maxc,maxr,minc]
rec = ds @ np.array(rs).reshape(-1,2,1)
o += sli[0].start, sli[1].start
rec = rec.reshape(4,2) + o
first = np.argmin(rec.sum(axis=1))
if vs[1]/vs[0]>2 and first%2==0: first+=1
boxes.append(rec[(np.arange(5)+first)%4])
return np.array(boxes)
# extract text image from given box
def extract(img, box, height=32):
h = ((box[1]-box[0])**2).sum()**0.5
w = ((box[2]-box[1])**2).sum()**0.5
h, w = height, int(height * w / h)
rr = box[[0,3,1,2],0].reshape(2,2)
cc = box[[0,3,1,2],1].reshape(2,2)
rcs = np.mgrid[0:1:h*1j, 0:1:w*1j]
r2 = mapcoord(rr, *rcs, backend=np)
c2 = mapcoord(cc, *rcs, backend=np)
return mapcoord(img, r2, c2, backend=np)
# batch extract by boxes
def extracts(img, boxes, height=32, mwidth=0):
rst = []
for box in boxes:
temp = extract(img, box, height)
temp = temp.astype(np.float32)
temp /= 128; temp -= 1
rst.append(temp)
ws = np.array([i.shape[1] for i in rst])
maxw = max([i.shape[1] for i in rst])
for i in range(len(rst)):
mar = maxw - rst[i].shape[1] + 10
rst[i] = np.pad(rst[i], [(0,0),(0,mar),(0,0)])
if mwidth>0: rst[i] = rst[i][:,:mwidth]
return np.array(rst).transpose(0,3,1,2), ws
# direction fixed
def fixdir(img, boxes):
x, ws = extracts(img, boxes, 48, 256)
y = cls_net.run(None, {'x':x})[0]
dirs = np.argmax(y, axis=1)
prob = np.max(y, axis=1)
for b,d,p in zip(boxes, dirs, prob):
if d and p>0.9: b[:] = b[[2,3,0,1,2]]
return dirs, np.max(y, axis=1)
# decode
def ctc_decode(x, blank=10):
x, p = x.argmax(axis=1), x.max(axis=1)
if x.max()==0: return 'nothing', 0
sep = (np.diff(x, prepend=[-1]) != 0)
lut = np.where(sep)[0][np.cumsum(sep)-1]
cal = np.arange(len(lut)) - (lut-1)
msk = np.hstack((sep[1:], x[-1:]>0))
msk = (msk>0) & ((x>0) | (cal>blank))
cont = ''.join(lab_dic[x[msk]-1])
return cont, p[msk].mean()
# recognize and decode every tensor
def recognize(img, boxes):
x, ws = extracts(img, boxes, 32)
cls = ws // 256
rsts = ['nothing'] * len(boxes)
for level in range(cls.max()+1):
idx = np.where(cls==level)[0]
if len(idx)==0: continue
subx = x[idx,:,:,:(level+1) * 256]
y = rec_net.run(None, {'x':subx})[0]
for i in range(len(y)):
rsts[idx[i]] = ctc_decode(y[i])
return rsts
def ocr(img, autodir=False, thr=0.3, boxthr=0.7,
sizethr=5, ratio=1.5, prothr=0.6):
hot = get_mask(img)
boxes = db_box(hot, thr, boxthr, sizethr, ratio)
if autodir: fixdir(img, boxes)
box_cont = zip(boxes, recognize(img, boxes))
rst = [(b.tolist(), *sp) for b,sp in box_cont]
return [i for i in rst if i[2]>prothr]
def test():
import matplotlib.pyplot as plt
from imageio import imread
img = imread(root + '/card.jpg')[:,:,:3]
conts = ocr(img, autodir=True)
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
plt.imshow(img)
for b,s,p in conts:
b = np.array(b)
plt.plot(*b.T[::-1], 'blue')
plt.text(*b[0,::-1]-5, s, color='red')
plt.show()
if __name__ == '__main__':
import planer
model = planer.load(__name__)
model.load('en')
model.test()
|
py | 7df975b1bdf8ef4c02fd972cbeea7479aae72933 | # File: hackertarget_connector.py
#
# Copyright (c) 2016-2022 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
#
#
# Phantom imports
import ipaddress
import re
import time
import phantom.app as phantom
import requests
import simplejson as json
from phantom.action_result import ActionResult
from phantom.base_connector import BaseConnector
# THIS Connector imports
from hackertarget_consts import *
class HackerTargetConnector(BaseConnector):
# actions supported by this script
ACTION_ID_TRACEROUTE_IP = "traceroute_ip"
ACTION_ID_TRACEROUTE_DOMAIN = "traceroute_domain"
ACTION_ID_PING_IP = "ping_ip"
ACTION_ID_PING_DOMAIN = "ping_domain"
ACTION_ID_REVERSE_IP = "reverse_ip"
ACTION_ID_REVERSE_DOMAIN = "reverse_domain"
ACTION_ID_WHOIS_IP = "whois_ip"
ACTION_ID_WHOIS_DOMAIN = "whois_domain"
ACTION_ID_GEOLOCATE_IP = "geolocate_ip"
ACTION_ID_GEOLOCATE_DOMAIN = "geolocate_domain"
ACTION_ID_GET_HEADERS = "get_headers"
ACTION_ID_GET_LINKS = "get_links"
def __init__(self):
self.__id_to_name = {}
# Call the BaseConnectors init first
super(HackerTargetConnector, self).__init__()
def initialize(self):
""" Called once for every action, all member initializations occur here"""
config = self.get_config()
# Get the Base URL from the asset config and so some cleanup
self._base_url = config.get('base_url', HACKERTARGET_BASE_URL)
if self._base_url.endswith('/'):
self._base_url = self._base_url[:-1]
self._api_key = config.get('api_key', None)
# The host member extacts the host from the URL, is used in creating status messages
self._host = self._base_url[self._base_url.find('//') + 2:]
# The headers, initialize them here once and use them for all other REST calls
self._headers = {'Accept': 'application/json'}
self.set_validator('ipv6', self._is_ip)
# The common part after the base url, but before the specific endpoint
# Intiliazed here and used on every REST endpoint calls
self._api_uri = HACKERTARGET_BASE_API
if self._api_uri.endswith('/'):
self._api_uri = self._api_uri[:-1]
return phantom.APP_SUCCESS
def _is_ip(self, ip_address):
""" Function that checks given address and return True if address is valid IPv4 or IPV6 address.
:param ip_address: IP address
:return: status (success/failure)
"""
input_ip_address = ip_address
try:
ipaddress.ip_address(input_ip_address)
except Exception as ex:
self.debug_print("Exception occurred in is_ip: {}".format(ex))
return False
return True
def _test_connectivity(self, param):
""" Function that handles the test connectivity action, it is much simpler than other action handlers."""
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
action_result = ActionResult()
# Make the rest endpoint call
ret_val = self._ping_host(param={"ip": "8.8.8.8"})
# Process errors
if phantom.is_fail(ret_val):
# Dump error messages in the log
self.debug_print(action_result.get_message())
# Append the message to display
self.append_to_message(ERR_CONNECTIVITY_TEST)
# return error
return phantom.APP_ERROR
# Set the status of the connector result
return self.set_status_save_progress(phantom.APP_SUCCESS, SUCC_CONNECTIVITY_TEST)
def _make_rest_call(self, endpoint, action_result, headers=None, params=None, data=None, method="get"):
""" Function that makes the REST call to the device, generic function that can be called from various action handlers"""
if headers is None:
headers = {}
if params is None:
params = {}
# Create the headers
headers.update(self._headers)
if method in ['put', 'post']:
headers.update({'Content-Type': 'application/json'})
if self._api_key is not None:
params.update({'apikey': self._api_key})
# get or post or put, whatever the caller asked us to use, if not specified the default will be 'get'
request_func = getattr(requests, method)
# handle the error in case the caller specified a non-existant method
if not request_func:
action_result.set_status(phantom.APP_ERROR, ERR_API_UNSUPPORTED_METHOD, method=method)
# Make the call
retry_count = MAX_TIMEOUT_DEF
success = False
while not success and (retry_count > 0):
try:
"""
The complete url is made up of the base_url, the api url and the endpiont
The data is converted to json string format if present, else just set to None
"""
r = request_func(self._base_url + self._api_uri + endpoint,
data=json.dumps(data) if data else None,
headers=headers, # The headers to send in the HTTP call
params=params) # uri parameters if any
except Exception as e:
return action_result.set_status(phantom.APP_ERROR, ERR_SERVER_CONNECTION + ":{}".format(str(e))), None
if r.status_code == 200:
success = True
else:
time.sleep(SLEEP_SECS)
retry_count -= 1
if phantom.is_fail(r.status_code) or r.text is False or r.text == HACKERTARGET_INVALID_KEY:
self.debug_print('FAILURE: Found in the app response.\nResponse: {}'.format(r.text))
return phantom.APP_ERROR, r.text
if r.text:
if HACKERTARGET_INPUT_INVALID.lower() in r.text.lower() \
or HACKERTARGET_NO_RESULTS.lower() in r.text.lower() \
or HACKERTARGET_FAIL_ERROR in r.text:
self.debug_print('FAILURE: Found in the app response.\nResponse: {}'.format(r.text))
return phantom.APP_SUCCESS, r.text
# Handle/process any errors that we get back from the device
if r.status_code == 200:
# Success
return phantom.APP_SUCCESS, r.text
# Handle any special HTTP error codes here, many devices return an HTTP error code like 204. The requests module treats these as error
if r.status_code == 404:
message = ERR_FROM_SERVER.format(status=r.status_code, detail=ERR_404_MSG)
return action_result.set_status(phantom.APP_ERROR, message), None
# Failure
return action_result.set_status(phantom.APP_ERROR, ERR_FROM_SERVER.format(status=r.status_code, detail=r.text.encode('utf-8'))), None
def _geolocate_domain(self, param):
""" Action handler for the '_ping_host' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_GEOIP_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
if param.get('ip'):
request_params = {'q': param.get('ip')}
else:
request_params = {'q': param.get('domain')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
try:
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
if 'error' in response: # summary has been set to error per rest pull code, exit with success
if param.get('domain'):
response = "Error: Invalid input. Enter the valid domain."
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response}
response = response.split('\n')
for line in response:
linedata = (line.strip().split(':'))
if len(linedata) > 1:
if "state" in linedata[0].lower(): # make same as maxmind
response_data['state_name'] = linedata[1].strip()
elif "city" in linedata[0].lower():
response_data['city_name'] = linedata[1].strip()
elif "country" in linedata[0].lower():
response_data['country_name'] = linedata[1].strip()
elif "ip" in linedata[0].lower():
response_data['ip'] = linedata[1].strip()
elif "latitude" in linedata[0].lower():
response_data['latitude'] = linedata[1].strip()
elif "longitude" in linedata[0].lower():
response_data['longitude'] = linedata[1].strip()
else:
response_data[linedata[0].strip().lower().replace(' ', '_')] = linedata[1].strip()
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({'latitude': response_data['latitude'], 'longitude': response_data['longitude']})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
except Exception as e:
return action_result.set_status(phantom.APP_ERROR, "Unable to execute geolocate domain. Error:{0}".format(e)), None
def _reverse_domain(self, param):
""" Action handler for the '_reverse_domain' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# parameters here
# host - hostname; required.
request_params = {'q': param.get('domain')}
endpoint = HACKERTARGET_REVERSEDNS_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
error = False
for err in API_ERRORS:
if err in response:
error = True
break
if error: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response, 'domain_names': []}
response = response.strip().split('\n')
tempresponse_data = {}
for line in response:
line = re.sub(r'\s', ',', line)
arr_list = line.split(',', 1)
if len(arr_list) > 1:
domain_name = arr_list[0]
ip_addrs = arr_list[1]
if domain_name in tempresponse_data.keys():
tempresponse_data[domain_name]['ip_addresses'].append(ip_addrs.split(','))
tempresponse_data[domain_name]['ip_count'] += len(ip_addrs.split(','))
else:
tempresponse_data[domain_name] = {'domain': domain_name,
'ip_addresses': ip_addrs.split(','), 'ip_count': len(ip_addrs.split(','))}
else:
self.debug_print("Skipping current response line - {}".format(line))
ip_count_total = 0
for domain_name in tempresponse_data.keys():
response_data['domain_names'].append(tempresponse_data[domain_name])
ip_count_total += tempresponse_data[domain_name]['ip_count']
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({'total_domains': len(tempresponse_data.keys()), 'total_ips': ip_count_total})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _reverse_ip(self, param):
""" Action handler for the '_reverse_ip' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# parameters here
# host - hostname; required.
request_params = {'q': param.get('ip')}
# endpoint = HACKERTARGET_REVERSEIP_URI - as of writing, reverse ip is busted, but can use reverse dns URI.
endpoint = HACKERTARGET_REVERSEIP_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
error = False
for err in API_ERRORS:
if err in response:
error = True
break
if error: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response, 'domain_names': []}
response = response.strip().split('\n')
for line in response:
response_data['domain_names'].append(line)
domain_count_total = len(response_data['domain_names'])
# Set the summary and response data
for domain in response_data['domain_names']:
action_result.add_data({'domain': domain})
action_result.set_summary({'total_domains': domain_count_total})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _ping_host(self, param):
""" Action handler for the '_ping_host' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_PING_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
if param.get('domain'):
request_params = {'q': param.get('domain')}
else:
request_params = {'q': param.get('ip')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
error = False
for err in API_ERRORS:
if err in response:
error = True
break
if error: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response}
response = response.split('\n')
for line in response:
if "Raw packets sent:" in line:
linedata = line.strip().split('|')
# self.debug_print('LINDATA: {}'.format(linedata))
response_data['sent'] = linedata[0].split(':')[1].strip()
response_data['succeeded'] = linedata[1].split(':')[1].strip()
response_data['failed'] = linedata[2].split(':')[1].strip().split(' ')[0].strip()
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({'sent': response_data['sent'][0],
'received': response_data['succeeded'][0], 'failed': response_data['failed'][0]})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _whois_ip(self, param):
""" Action handler for the 'whois ip' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_WHOIS_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
request_params = {'q': param.get('ip')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
error = False
for err in API_ERRORS:
if err in response:
error = True
break
if error: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response}
response = response.strip().split('\n')
for line in response:
if line.startswith('#'): # ignore comment line
continue
line = line.split(':', 1)
if len(line) > 1: # check if array is empty or contains key-value content
key = line[0].strip().replace(' ', '_')
if key in response_data:
response_data[key] += ', ' + line[1].strip()
else:
response_data[key] = line[1].strip()
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({"CIDR": response_data["CIDR"]})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _whois_domain(self, param):
""" Action handler for the 'whois_domain' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_WHOIS_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
request_params = {'q': param.get('domain')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
error = False
for err in API_ERRORS:
if err in response:
error = True
break
if error: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response}
response = response.strip().split('\n')
for line in response:
if line.startswith('>>>'):
break
line = line.split(':', 1)
if len(line) > 1: # check if array is empty or contains key-value content
key = line[0].strip().replace(' ', '_')
if key in response_data:
response_data[key] += ', ' + line[1].strip()
else:
response_data[key] = line[1].strip()
# Set the summary and response data
self.debug_print(response_data)
action_result.add_data(response_data)
action_result.set_summary({ 'Domain': response_data['Domain_Name']})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _get_http_headers(self, param):
""" Action handler for the 'get_http_headers' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_HTTPHEADERS_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
request_params = {'q': param.get('url')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
if 'error: ' in response: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response}
response_headers = response.strip().split('HTTP/')[1:]
response_data['headers'] = []
for response2 in response_headers:
response2 = response2.strip().split('\n')
response_data_temp = {}
for line in response2:
if ": " in line:
response_data_temp[line.split(': ', 1)[0].strip().replace(' ', '_')] = line.split(': ', 1)[1].strip()
elif len(line.split(' ')) > 2:
response_data_temp['http_version'] = line.split(' ')[0]
response_data_temp['response_code'] = line.split(' ')[1]
response_data['headers'].append(response_data_temp)
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({'header_count': len(response_data['headers'])})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _get_http_links(self, param):
""" Action handler for the 'get_http_links' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_PAGELINKS_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
request_params = {'q': param.get('url')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
if 'error: ' in response: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response}
response = response.strip().split('\n')
response_data['urls'] = []
for line in response:
if "http" in line:
response_data['urls'].append({'url': line})
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({'total_urls': len(response_data['urls'])})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def _traceroute_host(self, param):
""" Action handler for the 'run traceroute' action"""
# This is an action that needs to be represented by the ActionResult object
# So create one and add it to 'self' (i.e. add it to the BaseConnector)
# When the action_result is created this way, the parameter is also passed.
# Other things like the summary, data and status is set later on.
action_result = self.add_action_result(ActionResult(dict(param)))
# Endpoint
endpoint = HACKERTARGET_MTR_URI
# Progress
self.save_progress(USING_BASE_URL, base_url=self._base_url, api_uri=self._api_uri, endpoint=endpoint)
# Connectivity
self.save_progress(phantom.APP_PROG_CONNECTING_TO_ELLIPSES, self._host)
# parameters here
# host - hostname; required.
if param.get('ip'):
request_params = {'q': param.get('ip')}
else:
request_params = {'q': param.get('domain')}
# Make the rest call, note that if we try for cached and its not there, it will automatically go to start a new analysis.
# unless specified start a new as above.
ret_val, response = self._make_rest_call(endpoint, action_result, params=request_params)
if ret_val:
error = False
for err in API_ERRORS:
if err in response:
error = True
break
if error: # summary has been set to error per rest pull code, exit with success
return action_result.set_status(phantom.APP_SUCCESS, response)
else:
response_data = {'raw': response, 'hop': {}}
response = response.split('\n')
for line in response:
if "|--" in line:
linedata = " ".join(line.strip().split())
lineno = linedata[0].split('.')[0]
response_data['hop'][lineno] = {}
response_data['hop'][lineno]['raw'] = linedata
response_data['hop'][lineno]['host'] = response_data['hop'][lineno]['raw'].split(' ')[1]
response_data['hop'][lineno]['loss'] = response_data['hop'][lineno]['raw'].split(' ')[2]
response_data['hop'][lineno]['sent'] = response_data['hop'][lineno]['raw'].split(' ')[3]
response_data['hop'][lineno]['last'] = response_data['hop'][lineno]['raw'].split(' ')[4]
response_data['hop'][lineno]['avg'] = response_data['hop'][lineno]['raw'].split(' ')[5]
response_data['hop'][lineno]['best'] = response_data['hop'][lineno]['raw'].split(' ')[6]
response_data['hop'][lineno]['worst'] = response_data['hop'][lineno]['raw'].split(' ')[7]
response_data['hop'][lineno]['stdev'] = response_data['hop'][lineno]['raw'].split(' ')[8]
response_data['hop'][lineno]['hop'] = lineno
# Set the summary and response data
action_result.add_data(response_data)
action_result.set_summary({'total_hops': len(response_data['hop'])})
# Set the Status
return action_result.set_status(phantom.APP_SUCCESS)
else:
return action_result.set_status(phantom.APP_ERROR, response)
def handle_action(self, param):
"""Function that handles all the actions"""
# Get the action that we are supposed to carry out, set it in the connection result object
action = self.get_action_identifier()
# Intialize it to success
ret_val = phantom.APP_SUCCESS
if action == self.ACTION_ID_TRACEROUTE_IP:
ret_val = self._traceroute_host(param)
elif action == self.ACTION_ID_TRACEROUTE_DOMAIN:
ret_val = self._traceroute_host(param)
elif action == self.ACTION_ID_PING_IP:
ret_val = self._ping_host(param)
elif action == self.ACTION_ID_PING_DOMAIN:
ret_val = self._ping_host(param)
elif action == self.ACTION_ID_REVERSE_IP:
ret_val = self._reverse_ip(param)
elif action == self.ACTION_ID_REVERSE_DOMAIN:
ret_val = self._reverse_domain(param)
elif action == self.ACTION_ID_WHOIS_IP:
ret_val = self._whois_ip(param)
elif action == self.ACTION_ID_WHOIS_DOMAIN:
ret_val = self._whois_domain(param)
elif action == self.ACTION_ID_GEOLOCATE_IP:
ret_val = self._geolocate_domain(param)
elif action == self.ACTION_ID_GEOLOCATE_DOMAIN:
ret_val = self._geolocate_domain(param)
elif action == self.ACTION_ID_GET_HEADERS:
ret_val = self._get_http_headers(param)
elif action == self.ACTION_ID_GET_LINKS:
ret_val = self._get_http_links(param)
elif action == phantom.ACTION_ID_TEST_ASSET_CONNECTIVITY:
ret_val = self._test_connectivity(param)
return ret_val
if __name__ == '__main__':
""" Code that is executed when run in standalone debug mode
for .e.g:
python2.7 ./hackertarget.py /tmp/hackertarget.json
"""
# Imports
import sys
import pudb
# Breakpoint at runtime
pudb.set_trace()
# The first param is the input json file
with open(sys.argv[1]) as f:
# Load the input json file
in_json = f.read()
in_json = json.loads(in_json)
print(json.dumps(in_json, indent=' ' * 4))
# Create the connector class object
connector = HackerTargetConnector()
# Se the member vars
connector.print_progress_message = True
# Call BaseConnector::_handle_action(...) to kickoff action handling.
ret_val = connector._handle_action(json.dumps(in_json), None)
# Dump the return value
print(ret_val)
sys.exit(0)
|
py | 7df976aeabccfb64726088b5d19b194a672d27dd | import json
import pathlib
from django.core.management.base import BaseCommand, CommandError, CommandParser
from dynamic_menu import models, utils
class Command(BaseCommand):
"""
This command will initialize the menu from a JSON file.
"""
help = 'Initialize the menu from a JSON file.'
def add_arguments(self, parser: CommandParser):
parser.add_argument('fixture', help='JSON file with data.',
nargs=1, type=str)
def handle(self, *args, **options):
# Reading arguments and parsing
if 'fixture' in options:
json_file = pathlib.Path(options.get('fixture', '')[0])
else:
json_file = pathlib.Path('')
if not json_file.exists():
raise CommandError('Fixture does not exist.')
if not json_file.is_file():
raise CommandError('Fixture is not a file.')
# Reading JSON
data = json.load(json_file.open())
# Creating entries
utils.populate(data, models.DynamicMenu)
self.stdout.write(self.style.SUCCESS('Menu initialized.'))
|
py | 7df97722a77e6291685d7618f09e4bfc68df1ee3 | from keras_contrib.layers.normalization.instancenormalization import InstanceNormalization
from keras.layers import Input, Dense, Reshape, Flatten, Dropout, Concatenate
from keras.layers import BatchNormalization, Activation, ZeroPadding2D, Add
from keras.layers.advanced_activations import PReLU, LeakyReLU
from keras.layers.convolutional import UpSampling2D, Conv2D
from keras.models import Sequential, Model
from srgan import SRGAN
from PIL import Image
import numpy as np
def build_generator():
def residual_block(layer_input, filters):
d = Conv2D(filters, kernel_size=3, strides=1, padding='same')(layer_input)
d = Activation('relu')(d)
d = BatchNormalization(momentum=0.8)(d)
d = Conv2D(filters, kernel_size=3, strides=1, padding='same')(d)
d = BatchNormalization(momentum=0.8)(d)
d = Add()([d, layer_input])
return d
def deconv2d(layer_input):
u = UpSampling2D(size=2)(layer_input)
u = Conv2D(256, kernel_size=3, strides=1, padding='same')(u)
u = Activation('relu')(u)
return u
img_lr = Input(shape=[None,None,3])
# ็ฌฌไธ้จๅ๏ผไฝๅ่พจ็ๅพๅ่ฟๅ
ฅๅไผ็ป่ฟไธไธชๅท็งฏ+RELUๅฝๆฐ
c1 = Conv2D(64, kernel_size=9, strides=1, padding='same')(img_lr)
c1 = Activation('relu')(c1)
# ็ฌฌไบ้จๅ๏ผ็ป่ฟ16ไธชๆฎๅทฎ็ฝ็ป็ปๆ๏ผๆฏไธชๆฎๅทฎ็ฝ็ปๅ
้จๅ
ๅซไธคไธชๅท็งฏ+ๆ ๅๅ+RELU๏ผ่ฟๆไธไธชๆฎๅทฎ่พนใ
r = residual_block(c1, 64)
for _ in range(15):
r = residual_block(r, 64)
# ็ฌฌไธ้จๅ๏ผไธ้ๆ ท้จๅ๏ผๅฐ้ฟๅฎฝ่ฟ่กๆพๅคง๏ผไธคๆฌกไธ้ๆ ทๅ๏ผๅไธบๅๆฅ็4ๅ๏ผๅฎ็ฐๆ้ซๅ่พจ็ใ
c2 = Conv2D(64, kernel_size=3, strides=1, padding='same')(r)
c2 = BatchNormalization(momentum=0.8)(c2)
c2 = Add()([c2, c1])
u1 = deconv2d(c2)
u2 = deconv2d(u1)
gen_hr = Conv2D(3, kernel_size=9, strides=1, padding='same', activation='tanh')(u2)
return Model(img_lr, gen_hr)
model = build_generator()
model.load_weights(r"weights\DIV\gen_epoch38500.h5")
before_image = Image.open(r"before.png")
new_image = Image.new('RGB', before_image.size, (128,128,128))
new_image.paste(before_image)
new_image = np.array(new_image)/127.5 - 1
new_image = np.expand_dims(new_image,axis=0)
fake = (model.predict(new_image)*0.5 + 0.5)*255
fake = Image.fromarray(np.uint8(fake[0]))
fake.save("out.png")
fake.show() |
py | 7df9776d185677d309d6ef378e4668ea1d29af6c | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class UsagePlan(pulumi.CustomResource):
api_stages: pulumi.Output[list]
"""
The associated API stages of the usage plan.
* `api_id` (`str`) - API Id of the associated API stage in a usage plan.
* `stage` (`str`) - API stage name of the associated API stage in a usage plan.
"""
arn: pulumi.Output[str]
"""
Amazon Resource Name (ARN)
"""
description: pulumi.Output[str]
"""
The description of a usage plan.
"""
name: pulumi.Output[str]
"""
The name of the usage plan.
"""
product_code: pulumi.Output[str]
"""
The AWS Markeplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace.
"""
quota_settings: pulumi.Output[dict]
"""
The quota settings of the usage plan.
* `limit` (`float`) - The maximum number of requests that can be made in a given time period.
* `offset` (`float`) - The number of requests subtracted from the given limit in the initial time period.
* `period` (`str`) - The time period in which the limit applies. Valid values are "DAY", "WEEK" or "MONTH".
"""
tags: pulumi.Output[dict]
"""
Key-value map of resource tags
"""
throttle_settings: pulumi.Output[dict]
"""
The throttling limits of the usage plan.
* `burstLimit` (`float`) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity.
* `rate_limit` (`float`) - The API request steady-state rate limit.
"""
def __init__(__self__, resource_name, opts=None, api_stages=None, description=None, name=None, product_code=None, quota_settings=None, tags=None, throttle_settings=None, __props__=None, __name__=None, __opts__=None):
"""
Provides an API Gateway Usage Plan.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
myapi = aws.apigateway.RestApi("myapi")
dev = aws.apigateway.Deployment("dev",
rest_api=myapi.id,
stage_name="dev")
prod = aws.apigateway.Deployment("prod",
rest_api=myapi.id,
stage_name="prod")
my_usage_plan = aws.apigateway.UsagePlan("myUsagePlan",
api_stages=[
{
"api_id": myapi.id,
"stage": dev.stage_name,
},
{
"api_id": myapi.id,
"stage": prod.stage_name,
},
],
description="my description",
product_code="MYCODE",
quota_settings={
"limit": 20,
"offset": 2,
"period": "WEEK",
},
throttle_settings={
"burstLimit": 5,
"rate_limit": 10,
})
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] api_stages: The associated API stages of the usage plan.
:param pulumi.Input[str] description: The description of a usage plan.
:param pulumi.Input[str] name: The name of the usage plan.
:param pulumi.Input[str] product_code: The AWS Markeplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace.
:param pulumi.Input[dict] quota_settings: The quota settings of the usage plan.
:param pulumi.Input[dict] tags: Key-value map of resource tags
:param pulumi.Input[dict] throttle_settings: The throttling limits of the usage plan.
The **api_stages** object supports the following:
* `api_id` (`pulumi.Input[str]`) - API Id of the associated API stage in a usage plan.
* `stage` (`pulumi.Input[str]`) - API stage name of the associated API stage in a usage plan.
The **quota_settings** object supports the following:
* `limit` (`pulumi.Input[float]`) - The maximum number of requests that can be made in a given time period.
* `offset` (`pulumi.Input[float]`) - The number of requests subtracted from the given limit in the initial time period.
* `period` (`pulumi.Input[str]`) - The time period in which the limit applies. Valid values are "DAY", "WEEK" or "MONTH".
The **throttle_settings** object supports the following:
* `burstLimit` (`pulumi.Input[float]`) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity.
* `rate_limit` (`pulumi.Input[float]`) - The API request steady-state rate limit.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['api_stages'] = api_stages
__props__['description'] = description
__props__['name'] = name
__props__['product_code'] = product_code
__props__['quota_settings'] = quota_settings
__props__['tags'] = tags
__props__['throttle_settings'] = throttle_settings
__props__['arn'] = None
super(UsagePlan, __self__).__init__(
'aws:apigateway/usagePlan:UsagePlan',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, api_stages=None, arn=None, description=None, name=None, product_code=None, quota_settings=None, tags=None, throttle_settings=None):
"""
Get an existing UsagePlan resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] api_stages: The associated API stages of the usage plan.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN)
:param pulumi.Input[str] description: The description of a usage plan.
:param pulumi.Input[str] name: The name of the usage plan.
:param pulumi.Input[str] product_code: The AWS Markeplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace.
:param pulumi.Input[dict] quota_settings: The quota settings of the usage plan.
:param pulumi.Input[dict] tags: Key-value map of resource tags
:param pulumi.Input[dict] throttle_settings: The throttling limits of the usage plan.
The **api_stages** object supports the following:
* `api_id` (`pulumi.Input[str]`) - API Id of the associated API stage in a usage plan.
* `stage` (`pulumi.Input[str]`) - API stage name of the associated API stage in a usage plan.
The **quota_settings** object supports the following:
* `limit` (`pulumi.Input[float]`) - The maximum number of requests that can be made in a given time period.
* `offset` (`pulumi.Input[float]`) - The number of requests subtracted from the given limit in the initial time period.
* `period` (`pulumi.Input[str]`) - The time period in which the limit applies. Valid values are "DAY", "WEEK" or "MONTH".
The **throttle_settings** object supports the following:
* `burstLimit` (`pulumi.Input[float]`) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity.
* `rate_limit` (`pulumi.Input[float]`) - The API request steady-state rate limit.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["api_stages"] = api_stages
__props__["arn"] = arn
__props__["description"] = description
__props__["name"] = name
__props__["product_code"] = product_code
__props__["quota_settings"] = quota_settings
__props__["tags"] = tags
__props__["throttle_settings"] = throttle_settings
return UsagePlan(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
py | 7df9781fc8e3f2d7904f34838f1b8e6c0dd6b2b9 | """
API-endpoint to deliver a JSON with all active languages of an region.
"""
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, JsonResponse
from cms.models import Region
def languages(_, region_slug):
"""
Function to add all languages related to a region to a JSON.
Args:
_ : Originally the request paramenter, but is not used in this case.
region_slug ([String]): Slug to identify the desired region.
Returns:
[String]: JSON with all used languages in a region.
"""
try:
region = Region.objects.get(slug=region_slug)
result = list(map(lambda l: {
'id': l.language.id,
'code': l.language.code,
'native_name': l.language.native_name,
'english_name': l.language.english_name,
'text_direction': l.language.text_direction,
}, region.language_tree_nodes.filter(active=True)))
return JsonResponse(result, safe=False) # Turn off Safe-Mode to allow serializing arrays
except ObjectDoesNotExist:
return HttpResponse(f'No Region found with name "{region_slug}".',
content_type='text/plain',
status=404)
|
py | 7df979045391f953ee2c7e9b5c681aa43553d1a9 | """
Details of how the data model objects are mapped onto the relational database
are encapsulated here.
"""
import logging
from sqlalchemy import ( and_, asc, Boolean, Column, DateTime, desc, false, ForeignKey, Integer,
MetaData, not_, Numeric, select, String, Table, Text, TEXT, true, Unicode, UniqueConstraint )
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.types import BigInteger
from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper, deferred
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
from galaxy.model.orm.engine_factory import build_engine
from galaxy.model.orm.now import now
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
from galaxy.model.base import ModelMapping
from galaxy.security import GalaxyRBACAgent
log = logging.getLogger( __name__ )
metadata = MetaData()
model.User.table = Table(
"galaxy_user", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "email", TrimmedString( 255 ), index=True, nullable=False ),
Column( "username", TrimmedString( 255 ), index=True, unique=True ),
Column( "password", TrimmedString( 255 ), nullable=False ),
Column( "external", Boolean, default=False ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "disk_usage", Numeric( 15, 0 ), index=True ) ,
Column( "active", Boolean, index=True, default=True, nullable=False ),
Column( "activation_token", TrimmedString( 64 ), nullable=True, index=True ) )
model.UserAddress.table = Table(
"user_address", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "desc", TrimmedString( 255 )),
Column( "name", TrimmedString( 255 ), nullable=False),
Column( "institution", TrimmedString( 255 )),
Column( "address", TrimmedString( 255 ), nullable=False),
Column( "city", TrimmedString( 255 ), nullable=False),
Column( "state", TrimmedString( 255 ), nullable=False),
Column( "postal_code", TrimmedString( 255 ), nullable=False),
Column( "country", TrimmedString( 255 ), nullable=False),
Column( "phone", TrimmedString( 255 )),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
model.UserOpenID.table = Table(
"galaxy_user_openid", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "openid", TEXT, index=True, unique=True ),
Column( "provider", TrimmedString( 255 ) ) )
model.PasswordResetToken.table = Table(
"password_reset_token", metadata,
Column( "token", String( 32 ), primary_key=True, unique=True, index=True ),
Column( "expiration_time", DateTime ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
model.History.table = Table(
"history", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "hid_counter", Integer, default=1 ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "importing", Boolean, index=True, default=False ),
Column( "genome_build", TrimmedString( 40 ) ),
Column( "importable", Boolean, default=False ),
Column( "slug", TEXT, index=True ),
Column( "published", Boolean, index=True, default=False ) )
model.HistoryUserShareAssociation.table = Table(
"history_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
model.HistoryDatasetAssociation.table = Table(
"history_dataset_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", TrimmedString( 64 ), index=True, key="_state" ),
Column( "copied_from_history_dataset_association_id", Integer,
ForeignKey( "history_dataset_association.id" ), nullable=True ),
Column( "copied_from_library_dataset_dataset_association_id", Integer,
ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "info", TrimmedString( 255 ) ),
Column( "blurb", TrimmedString( 255 ) ),
Column( "peek" , TEXT ),
Column( "tool_version" , TEXT ),
Column( "extension", TrimmedString( 64 ) ),
Column( "metadata", MetadataType(), key="_metadata" ),
Column( "parent_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
Column( "designation", TrimmedString( 255 ) ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "visible", Boolean ),
Column( "extended_metadata_id", Integer, ForeignKey( "extended_metadata.id" ), index=True ),
Column( "hid", Integer ),
Column( "purged", Boolean, index=True, default=False ),
Column( "hidden_beneath_collection_instance_id",
ForeignKey( "history_dataset_collection_association.id" ), nullable=True ) )
model.Dataset.table = Table(
"dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "state", TrimmedString( 64 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "purgable", Boolean, default=True ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "external_filename" , TEXT ),
Column( "_extra_files_path", TEXT ),
Column( 'file_size', Numeric( 15, 0 ) ),
Column( 'total_size', Numeric( 15, 0 ) ),
Column( 'uuid', UUIDType() ) )
# hda read access permission given by a user to a specific site (gen. for external display applications)
model.HistoryDatasetAssociationDisplayAtAuthorization.table = Table(
"history_dataset_association_display_at_authorization", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "site", TrimmedString( 255 ) ) )
model.HistoryDatasetAssociationSubset.table = Table(
"history_dataset_association_subset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "location", Unicode(255), index=True) )
model.ImplicitlyConvertedDatasetAssociation.table = Table(
"implicitly_converted_dataset_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "hda_parent_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "ldda_parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "metadata_safe", Boolean, index=True, default=True ),
Column( "type", TrimmedString( 255 ) ) )
model.ValidationError.table = Table(
"validation_error", metadata,
Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "message", TrimmedString( 255 ) ),
Column( "err_type", TrimmedString( 64 ) ),
Column( "attributes", TEXT ) )
model.Group.table = Table(
"galaxy_group", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True, unique=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.UserGroupAssociation.table = Table(
"user_group_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.UserRoleAssociation.table = Table(
"user_role_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.GroupRoleAssociation.table = Table(
"group_role_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.Role.table = Table(
"role", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True, unique=True ),
Column( "description", TEXT ),
Column( "type", String( 40 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.UserQuotaAssociation.table = Table(
"user_quota_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.GroupQuotaAssociation.table = Table(
"group_quota_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.Quota.table = Table(
"quota", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True, unique=True ),
Column( "description", TEXT ),
Column( "bytes", BigInteger ),
Column( "operation", String( 8 ) ),
Column( "deleted", Boolean, index=True, default=False ) )
model.DefaultQuotaAssociation.table = Table(
"default_quota_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "type", String( 32 ), index=True, unique=True ),
Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ) )
model.DatasetPermissions.table = Table(
"dataset_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryPermissions.table = Table(
"library_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryFolderPermissions.table = Table(
"library_folder_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryDatasetPermissions.table = Table(
"library_dataset_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryDatasetDatasetAssociationPermissions.table = Table(
"library_dataset_dataset_association_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_dataset_dataset_association_id", Integer,
ForeignKey("library_dataset_dataset_association.id" ),
nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.DefaultUserPermissions.table = Table(
"default_user_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "action", TEXT ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.DefaultHistoryPermissions.table = Table(
"default_history_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "action", TEXT ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryDataset.table = Table(
"library_dataset", metadata,
Column( "id", Integer, primary_key=True ),
# current version of dataset, if null, there is not a current version selected
Column( "library_dataset_dataset_association_id", Integer,
ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" ),
nullable=True, index=True ),
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
# not currently being used, but for possible future use
Column( "order_id", Integer ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
# when not None/null this will supercede display in library (but not when imported into user's history?)
Column( "name", TrimmedString( 255 ), key="_name", index=True ),
# when not None/null this will supercede display in library (but not when imported into user's history?)
Column( "info", TrimmedString( 255 ), key="_info" ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
model.LibraryDatasetDatasetAssociation.table = Table(
"library_dataset_dataset_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", TrimmedString( 64 ), index=True, key="_state" ),
Column( "copied_from_history_dataset_association_id", Integer,
ForeignKey( "history_dataset_association.id", use_alter=True, name='history_dataset_association_dataset_id_fkey' ),
nullable=True ),
Column( "copied_from_library_dataset_dataset_association_id", Integer,
ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name='library_dataset_dataset_association_id_fkey' ),
nullable=True ),
Column( "name", TrimmedString( 255 ), index=True ),
Column( "info", TrimmedString( 255 ) ),
Column( "blurb", TrimmedString( 255 ) ),
Column( "peek" , TEXT ),
Column( "tool_version" , TEXT ),
Column( "extension", TrimmedString( 64 ) ),
Column( "metadata", MetadataType(), key="_metadata" ),
Column( "parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
Column( "designation", TrimmedString( 255 ) ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "visible", Boolean ),
Column( "extended_metadata_id", Integer, ForeignKey( "extended_metadata.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "message", TrimmedString( 255 ) ) )
model.ExtendedMetadata.table = Table(
"extended_metadata", metadata,
Column( "id", Integer, primary_key=True ),
Column( "data", JSONType ) )
model.ExtendedMetadataIndex.table = Table(
"extended_metadata_index", metadata,
Column( "id", Integer, primary_key=True ),
Column( "extended_metadata_id", Integer,
ForeignKey("extended_metadata.id", onupdate="CASCADE", ondelete="CASCADE" ), index=True ),
Column( "path", String( 255 )),
Column( "value", TEXT))
model.Library.table = Table(
"library", metadata,
Column( "id", Integer, primary_key=True ),
Column( "root_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "description", TEXT ),
Column( "synopsis", TEXT ) )
model.LibraryFolder.table = Table(
"library_folder", metadata,
Column( "id", Integer, primary_key=True ),
Column( "parent_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TEXT, index=True ),
Column( "description", TEXT ),
Column( "order_id", Integer ), # not currently being used, but for possible future use
Column( "item_count", Integer ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "genome_build", TrimmedString( 40 ) ) )
model.LibraryInfoAssociation.table = Table(
"library_info_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "inheritable", Boolean, index=True, default=False ),
Column( "deleted", Boolean, index=True, default=False ) )
model.LibraryFolderInfoAssociation.table = Table(
"library_folder_info_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "inheritable", Boolean, index=True, default=False ),
Column( "deleted", Boolean, index=True, default=False ) )
model.LibraryDatasetDatasetInfoAssociation.table = Table(
"library_dataset_dataset_info_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_dataset_association_id", Integer,
ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.Job.table = Table(
"job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "tool_id", String( 255 ) ),
Column( "tool_version", TEXT, default="1.0.0" ),
Column( "state", String( 64 ), index=True ),
Column( "info", TrimmedString( 255 ) ),
Column( "command_line", TEXT ),
Column( "param_filename", String( 1024 ) ),
Column( "runner_name", String( 255 ) ),
Column( "stdout", TEXT ),
Column( "stderr", TEXT ),
Column( "exit_code", Integer, nullable=True ),
Column( "traceback", TEXT ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
Column( "job_runner_name", String( 255 ) ),
Column( "job_runner_external_id", String( 255 ) ),
Column( "destination_id", String( 255 ), nullable=True ),
Column( "destination_params", JSONType, nullable=True ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "imported", Boolean, default=False, index=True ),
Column( "params", TrimmedString(255), index=True ),
Column( "handler", TrimmedString( 255 ), index=True ) )
model.JobStateHistory.table = Table(
"job_state_history", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "state", String( 64 ), index=True ),
Column( "info", TrimmedString( 255 ) ) )
model.JobParameter.table = Table(
"job_parameter", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "name", String(255) ),
Column( "value", TEXT ) )
model.JobToInputDatasetAssociation.table = Table(
"job_to_input_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.JobToOutputDatasetAssociation.table = Table(
"job_to_output_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.JobToInputDatasetCollectionAssociation.table = Table(
"job_to_input_dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobToImplicitOutputDatasetCollectionAssociation.table = Table(
"job_to_implicit_output_dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobToOutputDatasetCollectionAssociation.table = Table(
"job_to_output_dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobToInputLibraryDatasetAssociation.table = Table(
"job_to_input_library_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.JobToOutputLibraryDatasetAssociation.table = Table(
"job_to_output_library_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.ImplicitlyCreatedDatasetCollectionInput.table = Table(
"implicitly_created_dataset_collection_inputs", metadata,
Column( "id", Integer, primary_key=True ),
Column( "dataset_collection_id", Integer,
ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "input_dataset_collection_id", Integer,
ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobExternalOutputMetadata.table = Table(
"job_external_output_metadata", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "history_dataset_association_id", Integer,
ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "library_dataset_dataset_association_id", Integer,
ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "is_valid", Boolean, default=True ),
Column( "filename_in", String( 255 ) ),
Column( "filename_out", String( 255 ) ),
Column( "filename_results_code", String( 255 ) ),
Column( "filename_kwds", String( 255 ) ),
Column( "filename_override_metadata", String( 255 ) ),
Column( "job_runner_external_pid", String( 255 ) ) )
model.JobExportHistoryArchive.table = Table(
"job_export_history_archive", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "compressed", Boolean, index=True, default=False ),
Column( "history_attrs_filename", TEXT ),
Column( "datasets_attrs_filename", TEXT ),
Column( "jobs_attrs_filename", TEXT ) )
model.JobImportHistoryArchive.table = Table(
"job_import_history_archive", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "archive_dir", TEXT ) )
JOB_METRIC_MAX_LENGTH = 1023
model.JobMetricText.table = Table(
"job_metric_text", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "plugin", Unicode(255) ),
Column( "metric_name", Unicode(255) ),
Column( "metric_value", Unicode(JOB_METRIC_MAX_LENGTH) ) )
model.TaskMetricText.table = Table(
"task_metric_text", metadata,
Column( "id", Integer, primary_key=True ),
Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
Column( "plugin", Unicode(255) ),
Column( "metric_name", Unicode(255) ),
Column( "metric_value", Unicode(JOB_METRIC_MAX_LENGTH) ) )
model.JobMetricNumeric.table = Table(
"job_metric_numeric", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "plugin", Unicode(255) ),
Column( "metric_name", Unicode(255) ),
Column( "metric_value", Numeric( 22, 7 ) ) )
model.TaskMetricNumeric.table = Table(
"task_metric_numeric", metadata,
Column( "id", Integer, primary_key=True ),
Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
Column( "plugin", Unicode(255) ),
Column( "metric_name", Unicode(255) ),
Column( "metric_value", Numeric( 22, 7 ) ) )
model.GenomeIndexToolData.table = Table(
"genome_index_tool_data", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "deferred_job_id", Integer, ForeignKey( "deferred_job.id" ), index=True ),
Column( "transfer_job_id", Integer, ForeignKey( "transfer_job.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "fasta_path", String( 255 ) ),
Column( "created_time", DateTime, default=now ),
Column( "modified_time", DateTime, default=now, onupdate=now ),
Column( "indexer", String( 64 ) ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
model.Task.table = Table(
"task", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "execution_time", DateTime ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", String( 64 ), index=True ),
Column( "command_line", TEXT ),
Column( "param_filename", String( 1024 ) ),
Column( "runner_name", String( 255 ) ),
Column( "stdout", TEXT ),
Column( "stderr", TEXT ),
Column( "exit_code", Integer, nullable=True ),
Column( "info", TrimmedString( 255 ) ),
Column( "traceback", TEXT ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ),
Column( "working_directory", String(1024)),
Column( "task_runner_name", String( 255 ) ),
Column( "task_runner_external_id", String( 255 ) ),
Column( "prepare_input_files_cmd", TEXT ) )
model.PostJobAction.table = Table(
"post_job_action", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
Column( "action_type", String(255), nullable=False ),
Column( "output_name", String(255), nullable=True ),
Column( "action_arguments", JSONType, nullable=True ) )
model.PostJobActionAssociation.table = Table(
"post_job_action_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ),
Column( "post_job_action_id", Integer, ForeignKey( "post_job_action.id" ), index=True, nullable=False ) )
model.DeferredJob.table = Table(
"deferred_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", String( 64 ), index=True ),
Column( "plugin", String( 128 ), index=True ),
Column( "params", JSONType ) )
model.TransferJob.table = Table(
"transfer_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", String( 64 ), index=True ),
Column( "path", String( 1024 ) ),
Column( "info", TEXT ),
Column( "pid", Integer ),
Column( "socket", Integer ),
Column( "params", JSONType ) )
model.DatasetCollection.table = Table(
"dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "collection_type", Unicode(255), nullable=False ),
Column( "populated_state", TrimmedString( 64 ), default='ok', nullable=False ),
Column( "populated_state_message", TEXT ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.HistoryDatasetCollectionAssociation.table = Table(
"history_dataset_collection_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "hid", Integer ),
Column( "visible", Boolean ),
Column( "deleted", Boolean, default=False ),
Column( "copied_from_history_dataset_collection_association_id", Integer,
ForeignKey( "history_dataset_collection_association.id" ), nullable=True ),
Column( "implicit_output_name", Unicode(255), nullable=True ) )
model.LibraryDatasetCollectionAssociation.table = Table(
"library_dataset_collection_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "deleted", Boolean, default=False ) )
model.DatasetCollectionElement.table = Table(
"dataset_collection_element", metadata,
Column( "id", Integer, primary_key=True ),
# Parent collection id describing what collection this element belongs to.
Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=False ),
# Child defined by this association - HDA, LDDA, or another dataset association...
Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "child_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=True ),
# Element index and identifier to define this parent-child relationship.
Column( "element_index", Integer ),
Column( "element_identifier", Unicode(255), ) )
model.Event.table = Table(
"event", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True, nullable=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
Column( "message", TrimmedString( 1024 ) ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
Column( "tool_id", String( 255 ) ) )
model.GalaxySession.table = Table(
"galaxy_session", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
Column( "remote_host", String( 255 ) ),
Column( "remote_addr", String( 255 ) ),
Column( "referer", TEXT ),
Column( "current_history_id", Integer, ForeignKey( "history.id" ), nullable=True ),
# unique 128 bit random number coerced to a string
Column( "session_key", TrimmedString( 255 ), index=True, unique=True ),
Column( "is_valid", Boolean, default=False ),
# saves a reference to the previous session so we have a way to chain them together
Column( "prev_session_id", Integer ),
Column( "disk_usage", Numeric( 15, 0 ), index=True ),
Column( "last_action", DateTime) )
model.GalaxySessionToHistoryAssociation.table = Table(
"galaxy_session_to_history", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
model.StoredWorkflow.table = Table(
"stored_workflow", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "latest_workflow_id", Integer,
ForeignKey( "workflow.id", use_alter=True, name='stored_workflow_latest_workflow_id_fk' ), index=True ),
Column( "name", TEXT ),
Column( "deleted", Boolean, default=False ),
Column( "importable", Boolean, default=False ),
Column( "slug", TEXT, index=True ),
Column( "published", Boolean, index=True, default=False ) )
model.Workflow.table = Table(
"workflow", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
# workflows will belong to either a stored workflow or a parent/nesting workflow.
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True, nullable=True ),
Column( "parent_workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=True ),
Column( "name", TEXT ),
Column( "has_cycles", Boolean ),
Column( "has_errors", Boolean ),
Column( "uuid", UUIDType, nullable=True ) )
model.WorkflowStep.table = Table(
"workflow_step", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
Column( "subworkflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=True ),
Column( "type", String(64) ),
Column( "tool_id", TEXT ),
# Reserved for future
Column( "tool_version", TEXT ),
Column( "tool_inputs", JSONType ),
Column( "tool_errors", JSONType ),
Column( "position", JSONType ),
Column( "config", JSONType ),
Column( "order_index", Integer ),
Column( "uuid", UUIDType ),
# Column( "input_connections", JSONType ),
Column( "label", Unicode(255) ) )
model.WorkflowRequestStepState.table = Table(
"workflow_request_step_states", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_invocation_id", Integer,
ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id" )),
Column( "value", JSONType ) )
model.WorkflowRequestInputParameter.table = Table(
"workflow_request_input_parameters", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_invocation_id", Integer,
ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
Column( "name", Unicode(255) ),
Column( "value", TEXT ),
Column( "type", Unicode(255) ) )
model.WorkflowRequestInputStepParmeter.table = Table(
"workflow_request_input_step_parameter", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
Column( "parameter_value", JSONType ),
)
model.WorkflowRequestToInputDatasetAssociation.table = Table(
"workflow_request_to_input_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "name", String(255) ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ) )
model.WorkflowRequestToInputDatasetCollectionAssociation.table = Table(
"workflow_request_to_input_collection_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "name", String(255) ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ) )
model.WorkflowStepConnection.table = Table(
"workflow_step_connection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "output_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "input_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "output_name", TEXT ),
Column( "input_name", TEXT ),
Column( "input_subworkflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
)
model.WorkflowOutput.table = Table(
"workflow_output", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True, nullable=False ),
Column( "output_name", String(255), nullable=True ),
Column( "label", Unicode(255) ),
Column( "uuid", UUIDType ),
)
model.WorkflowInvocation.table = Table(
"workflow_invocation", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
Column( "state", TrimmedString( 64 ), index=True ),
Column( "scheduler", TrimmedString( 255 ), index=True ),
Column( "handler", TrimmedString( 255 ), index=True ),
Column( 'uuid', UUIDType() ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
model.WorkflowInvocationStep.table = Table(
"workflow_invocation_step", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True, nullable=False ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=True ),
Column( "action", JSONType, nullable=True ) )
model.WorkflowInvocationToSubworkflowInvocationAssociation.table = Table(
"workflow_invocation_to_subworkflow_invocation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "subworkflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
)
model.StoredWorkflowUserShareAssociation.table = Table(
"stored_workflow_user_share_connection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
model.StoredWorkflowMenuEntry.table = Table(
"stored_workflow_menu_entry", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "order_index", Integer ) )
model.MetadataFile.table = Table(
"metadata_file", metadata,
Column( "id", Integer, primary_key=True ),
Column( "name", TEXT ),
Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "lda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
model.FormDefinitionCurrent.table = Table(
"form_definition_current", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "latest_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.FormDefinition.table = Table(
"form_definition", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "form_definition_current_id", Integer,
ForeignKey( "form_definition_current.id", name='for_def_form_def_current_id_fk', use_alter=True ), index=True ),
Column( "fields", JSONType() ),
Column( "type", TrimmedString( 255 ), index=True ),
Column( "layout", JSONType() ) )
model.ExternalService.table = Table(
"external_service", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "description", TEXT ),
Column( "external_service_type_id", TrimmedString( 255 ), nullable=False ),
Column( "version", TrimmedString( 255 ) ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.RequestType.table = Table(
"request_type", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "request_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "sample_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.RequestTypeExternalServiceAssociation.table = Table(
"request_type_external_service_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
model.RequestTypePermissions.table = Table(
"request_type_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.FormValues.table = Table(
"form_values", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "content", JSONType()) )
model.Request.table = Table(
"request", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "notification", JSONType() ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.RequestEvent.table = Table(
"request_event", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
Column( "state", TrimmedString( 255 ), index=True ),
Column( "comment", TEXT ) )
model.Sample.table = Table(
"sample", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
Column( "bar_code", TrimmedString( 255 ), index=True ),
Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "workflow", JSONType, nullable=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), nullable=True ) )
model.SampleState.table = Table(
"sample_state", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ) )
model.SampleEvent.table = Table(
"sample_event", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
Column( "comment", TEXT ) )
model.SampleDataset.table = Table(
"sample_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "file_path", TEXT ),
Column( "status", TrimmedString( 255 ), nullable=False ),
Column( "error_msg", TEXT ),
Column( "size", TrimmedString( 255 ) ),
Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
model.Run.table = Table(
"run", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "subindex", TrimmedString( 255 ), index=True ) )
model.RequestTypeRunAssociation.table = Table(
"request_type_run_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True, nullable=False ),
Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
model.SampleRunAssociation.table = Table(
"sample_run_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True, nullable=False ),
Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
model.Page.table = Table(
"page", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "latest_revision_id", Integer,
ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
Column( "title", TEXT ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "importable", Boolean, index=True, default=False ),
Column( "slug", TEXT, unique=True, index=True ),
Column( "published", Boolean, index=True, default=False ) )
model.PageRevision.table = Table(
"page_revision", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
Column( "title", TEXT ),
Column( "content", TEXT ) )
model.PageUserShareAssociation.table = Table(
"page_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
model.Visualization.table = Table(
"visualization", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "latest_revision_id", Integer,
ForeignKey( "visualization_revision.id", use_alter=True, name='visualization_latest_revision_id_fk' ), index=True ),
Column( "title", TEXT ),
Column( "type", TEXT ),
Column( "dbkey", TEXT, index=True ),
Column( "deleted", Boolean, default=False, index=True ),
Column( "importable", Boolean, default=False, index=True ),
Column( "slug", TEXT, index=True ),
Column( "published", Boolean, default=False, index=True ) )
model.VisualizationRevision.table = Table(
"visualization_revision", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True, nullable=False ),
Column( "title", TEXT ),
Column( "dbkey", TEXT, index=True ),
Column( "config", JSONType ) )
model.VisualizationUserShareAssociation.table = Table(
"visualization_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
# Data Manager tables
model.DataManagerHistoryAssociation.table = Table(
"data_manager_history_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
model.DataManagerJobAssociation.table = Table(
"data_manager_job_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "data_manager_id", TEXT, index=True ) )
# Tagging tables.
model.Tag.table = Table(
"tag", metadata,
Column( "id", Integer, primary_key=True ),
Column( "type", Integer ),
Column( "parent_id", Integer, ForeignKey( "tag.id" ) ),
Column( "name", TrimmedString(255) ),
UniqueConstraint( "name" ) )
model.HistoryTagAssociation.table = Table(
"history_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.DatasetTagAssociation.table = Table(
"dataset_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.HistoryDatasetAssociationTagAssociation.table = Table(
"history_dataset_association_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.StoredWorkflowTagAssociation.table = Table(
"stored_workflow_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", Unicode(255), index=True ),
Column( "value", Unicode(255), index=True ),
Column( "user_value", Unicode(255), index=True ) )
model.PageTagAssociation.table = Table(
"page_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.WorkflowStepTagAssociation.table = Table(
"workflow_step_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", Unicode(255), index=True ),
Column( "value", Unicode(255), index=True ),
Column( "user_value", Unicode(255), index=True ) )
model.VisualizationTagAssociation.table = Table(
"visualization_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.HistoryDatasetCollectionTagAssociation.table = Table(
"history_dataset_collection_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_collection_id", Integer,
ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.LibraryDatasetCollectionTagAssociation.table = Table(
"library_dataset_collection_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_collection_id", Integer,
ForeignKey( "library_dataset_collection_association.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
model.ToolTagAssociation.table = Table(
"tool_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "tool_id", TrimmedString(255), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True ),
Column( "value", TrimmedString(255), index=True ),
Column( "user_value", TrimmedString(255), index=True ) )
# Annotation tables.
model.HistoryAnnotationAssociation.table = Table(
"history_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.HistoryDatasetAssociationAnnotationAssociation.table = Table(
"history_dataset_association_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer,
ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.StoredWorkflowAnnotationAssociation.table = Table(
"stored_workflow_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.WorkflowStepAnnotationAssociation.table = Table(
"workflow_step_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.PageAnnotationAssociation.table = Table(
"page_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.VisualizationAnnotationAssociation.table = Table(
"visualization_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.HistoryDatasetCollectionAnnotationAssociation.table = Table(
"history_dataset_collection_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_collection_id", Integer,
ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
model.LibraryDatasetCollectionAnnotationAssociation.table = Table(
"library_dataset_collection_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_collection_id", Integer,
ForeignKey( "library_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True ) )
# Ratings tables.
model.HistoryRatingAssociation.table = Table( "history_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
model.HistoryDatasetAssociationRatingAssociation.table = Table(
"history_dataset_association_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer,
ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
model.StoredWorkflowRatingAssociation.table = Table(
"stored_workflow_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
model.PageRatingAssociation.table = Table(
"page_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
model.VisualizationRatingAssociation.table = Table(
"visualization_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
model.HistoryDatasetCollectionRatingAssociation.table = Table(
"history_dataset_collection_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_collection_id", Integer,
ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
model.LibraryDatasetCollectionRatingAssociation.table = Table(
"library_dataset_collection_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_collection_id", Integer,
ForeignKey( "library_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True ) )
# User tables.
model.UserPreference.table = Table(
"user_preference", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "name", Unicode( 255 ), index=True),
Column( "value", Text ) )
model.UserAction.table = Table(
"user_action", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "action", Unicode( 255 ) ),
Column( "context", Unicode( 512 ) ),
Column( "params", Unicode( 1024 ) ) )
model.APIKeys.table = Table(
"api_keys", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
# With the tables defined we can define the mappers and setup the
# relationships between the model objects.
def simple_mapping( model, **kwds ):
mapper( model, model.table, properties=kwds )
mapper( model.Sample, model.Sample.table, properties=dict(
events=relation( model.SampleEvent,
backref="sample",
order_by=desc( model.SampleEvent.table.c.update_time ) ),
datasets=relation( model.SampleDataset,
backref="sample",
order_by=desc( model.SampleDataset.table.c.update_time ) ),
values=relation( model.FormValues,
primaryjoin=( model.Sample.table.c.form_values_id == model.FormValues.table.c.id ) ),
request=relation( model.Request,
primaryjoin=( model.Sample.table.c.request_id == model.Request.table.c.id ) ),
folder=relation( model.LibraryFolder,
primaryjoin=( model.Sample.table.c.folder_id == model.LibraryFolder.table.c.id ) ),
library=relation( model.Library,
primaryjoin=( model.Sample.table.c.library_id == model.Library.table.c.id ) ),
history=relation( model.History,
primaryjoin=( model.Sample.table.c.history_id == model.History.table.c.id ) ),
) )
mapper( model.FormValues, model.FormValues.table, properties=dict(
form_definition=relation( model.FormDefinition,
primaryjoin=( model.FormValues.table.c.form_definition_id == model.FormDefinition.table.c.id ) )
) )
mapper( model.Request, model.Request.table, properties=dict(
values=relation( model.FormValues,
primaryjoin=( model.Request.table.c.form_values_id == model.FormValues.table.c.id )),
type=relation( model.RequestType,
primaryjoin=( model.Request.table.c.request_type_id == model.RequestType.table.c.id ) ),
user=relation( model.User,
primaryjoin=( model.Request.table.c.user_id == model.User.table.c.id ),
backref="requests" ),
samples=relation( model.Sample,
primaryjoin=( model.Request.table.c.id == model.Sample.table.c.request_id ),
order_by=asc( model.Sample.table.c.id ) ),
events=relation( model.RequestEvent,
backref="request",
order_by=desc( model.RequestEvent.table.c.update_time ) )
) )
mapper( model.RequestEvent, model.RequestEvent.table, properties=None )
mapper( model.ExternalService, model.ExternalService.table, properties=dict(
form_definition=relation( model.FormDefinition,
primaryjoin=( model.ExternalService.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
form_values=relation( model.FormValues,
primaryjoin=( model.ExternalService.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.RequestType, model.RequestType.table, properties=dict(
states=relation( model.SampleState,
backref="request_type",
primaryjoin=( model.RequestType.table.c.id == model.SampleState.table.c.request_type_id ),
order_by=asc( model.SampleState.table.c.update_time ) ),
request_form=relation( model.FormDefinition,
primaryjoin=( model.RequestType.table.c.request_form_id == model.FormDefinition.table.c.id ) ),
sample_form=relation( model.FormDefinition,
primaryjoin=( model.RequestType.table.c.sample_form_id == model.FormDefinition.table.c.id ) ),
) )
mapper( model.RequestTypeExternalServiceAssociation, model.RequestTypeExternalServiceAssociation.table, properties=dict(
request_type=relation( model.RequestType,
primaryjoin=( ( model.RequestTypeExternalServiceAssociation.table.c.request_type_id == model.RequestType.table.c.id ) ),
backref="external_service_associations" ),
external_service=relation( model.ExternalService,
primaryjoin=( model.RequestTypeExternalServiceAssociation.table.c.external_service_id == model.ExternalService.table.c.id ) )
) )
mapper( model.RequestTypePermissions, model.RequestTypePermissions.table, properties=dict(
request_type=relation( model.RequestType, backref="actions" ),
role=relation( model.Role, backref="request_type_actions" )
) )
mapper( model.FormDefinition, model.FormDefinition.table, properties=dict(
current=relation( model.FormDefinitionCurrent,
primaryjoin=( model.FormDefinition.table.c.form_definition_current_id == model.FormDefinitionCurrent.table.c.id ) )
) )
mapper( model.FormDefinitionCurrent, model.FormDefinitionCurrent.table, properties=dict(
forms=relation( model.FormDefinition,
backref='form_definition_current',
cascade="all, delete-orphan",
primaryjoin=( model.FormDefinitionCurrent.table.c.id == model.FormDefinition.table.c.form_definition_current_id ) ),
latest_form=relation( model.FormDefinition,
post_update=True,
primaryjoin=( model.FormDefinitionCurrent.table.c.latest_form_id == model.FormDefinition.table.c.id ) )
) )
mapper( model.SampleEvent, model.SampleEvent.table, properties=dict(
state=relation( model.SampleState,
primaryjoin=( model.SampleEvent.table.c.sample_state_id == model.SampleState.table.c.id ) ),
) )
mapper( model.SampleState, model.SampleState.table, properties=None )
mapper( model.SampleDataset, model.SampleDataset.table, properties=dict(
external_service=relation( model.ExternalService,
primaryjoin=( model.SampleDataset.table.c.external_service_id == model.ExternalService.table.c.id ) )
) )
mapper( model.SampleRunAssociation, model.SampleRunAssociation.table, properties=dict(
sample=relation( model.Sample, backref="runs", order_by=desc( model.Run.table.c.update_time ) ),
run=relation( model.Run, backref="samples", order_by=asc( model.Sample.table.c.id ) )
) )
mapper( model.RequestTypeRunAssociation, model.RequestTypeRunAssociation.table, properties=dict(
request_type=relation( model.RequestType, backref="run" ),
run=relation( model.Run, backref="request_type" )
) )
mapper( model.Run, model.Run.table, properties=dict(
template=relation( model.FormDefinition,
primaryjoin=( model.Run.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.Run.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.UserAddress, model.UserAddress.table, properties=dict(
user=relation( model.User,
primaryjoin=( model.UserAddress.table.c.user_id == model.User.table.c.id ),
backref='addresses',
order_by=desc(model.UserAddress.table.c.update_time ) ),
) )
mapper( model.UserOpenID, model.UserOpenID.table, properties=dict(
session=relation( model.GalaxySession,
primaryjoin=( model.UserOpenID.table.c.session_id == model.GalaxySession.table.c.id ),
backref='openids',
order_by=desc( model.UserOpenID.table.c.update_time ) ),
user=relation( model.User,
primaryjoin=( model.UserOpenID.table.c.user_id == model.User.table.c.id ),
backref='openids',
order_by=desc( model.UserOpenID.table.c.update_time ) )
) )
mapper( model.ValidationError, model.ValidationError.table )
simple_mapping( model.HistoryDatasetAssociation,
dataset=relation( model.Dataset,
primaryjoin=( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ), lazy=False ),
# .history defined in History mapper
copied_from_history_dataset_association=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id ==
model.HistoryDatasetAssociation.table.c.id ),
remote_side=[model.HistoryDatasetAssociation.table.c.id],
uselist=False ),
copied_to_history_dataset_associations=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id ==
model.HistoryDatasetAssociation.table.c.id ) ),
copied_from_library_dataset_dataset_association=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ),
uselist=False ),
copied_to_library_dataset_dataset_associations=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
implicitly_converted_datasets=relation( model.ImplicitlyConvertedDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id ==
model.HistoryDatasetAssociation.table.c.id ) ),
implicitly_converted_parent_datasets=relation( model.ImplicitlyConvertedDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id ==
model.HistoryDatasetAssociation.table.c.id ) ),
children=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.parent_id ==
model.HistoryDatasetAssociation.table.c.id ),
backref=backref( "parent",
primaryjoin=( model.HistoryDatasetAssociation.table.c.parent_id ==
model.HistoryDatasetAssociation.table.c.id ),
remote_side=[model.HistoryDatasetAssociation.table.c.id], uselist=False ) ),
visible_children=relation( model.HistoryDatasetAssociation,
primaryjoin=(
( model.HistoryDatasetAssociation.table.c.parent_id == model.HistoryDatasetAssociation.table.c.id ) &
( model.HistoryDatasetAssociation.table.c.visible == true() ) ),
remote_side=[model.HistoryDatasetAssociation.table.c.id] ),
tags=relation( model.HistoryDatasetAssociationTagAssociation,
order_by=model.HistoryDatasetAssociationTagAssociation.table.c.id,
backref='history_tag_associations' ),
annotations=relation( model.HistoryDatasetAssociationAnnotationAssociation,
order_by=model.HistoryDatasetAssociationAnnotationAssociation.table.c.id,
backref="hdas" ),
ratings=relation( model.HistoryDatasetAssociationRatingAssociation,
order_by=model.HistoryDatasetAssociationRatingAssociation.table.c.id,
backref="hdas" ),
extended_metadata=relation( model.ExtendedMetadata,
primaryjoin=( ( model.HistoryDatasetAssociation.table.c.extended_metadata_id ==
model.ExtendedMetadata.table.c.id ) ) ),
hidden_beneath_collection_instance=relation( model.HistoryDatasetCollectionAssociation,
primaryjoin=( ( model.HistoryDatasetAssociation.table.c.hidden_beneath_collection_instance_id ==
model.HistoryDatasetCollectionAssociation.table.c.id ) ),
uselist=False,
backref="hidden_dataset_instances"),
_metadata=deferred(model.HistoryDatasetAssociation.table.c._metadata)
)
simple_mapping( model.Dataset,
history_associations=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) ),
active_history_associations=relation( model.HistoryDatasetAssociation,
primaryjoin=(
( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) &
( model.HistoryDatasetAssociation.table.c.deleted == false() ) &
( model.HistoryDatasetAssociation.table.c.purged == false() ) ) ),
purged_history_associations=relation( model.HistoryDatasetAssociation,
primaryjoin=(
( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) &
( model.HistoryDatasetAssociation.table.c.purged == true() ) ) ),
library_associations=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id ) ),
active_library_associations=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=(
( model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id ) &
( model.LibraryDatasetDatasetAssociation.table.c.deleted == false() ) ) ),
tags=relation(model.DatasetTagAssociation,
order_by=model.DatasetTagAssociation.table.c.id,
backref='datasets')
)
mapper( model.HistoryDatasetAssociationDisplayAtAuthorization, model.HistoryDatasetAssociationDisplayAtAuthorization.table, properties=dict(
history_dataset_association=relation( model.HistoryDatasetAssociation ),
user=relation( model.User )
) )
mapper( model.HistoryDatasetAssociationSubset, model.HistoryDatasetAssociationSubset.table, properties=dict(
hda=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociationSubset.table.c.history_dataset_association_id ==
model.HistoryDatasetAssociation.table.c.id ) ),
subset=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociationSubset.table.c.history_dataset_association_subset_id ==
model.HistoryDatasetAssociation.table.c.id ) )
) )
mapper( model.ImplicitlyConvertedDatasetAssociation, model.ImplicitlyConvertedDatasetAssociation.table, properties=dict(
parent_hda=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id ==
model.HistoryDatasetAssociation.table.c.id ) ),
parent_ldda=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
dataset_ldda=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
dataset=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id ==
model.HistoryDatasetAssociation.table.c.id ) )
) )
mapper( model.History, model.History.table, properties=dict(
galaxy_sessions=relation( model.GalaxySessionToHistoryAssociation ),
datasets=relation( model.HistoryDatasetAssociation,
backref="history",
order_by=asc(model.HistoryDatasetAssociation.table.c.hid) ),
exports=relation( model.JobExportHistoryArchive,
primaryjoin=( model.JobExportHistoryArchive.table.c.history_id == model.History.table.c.id ),
order_by=desc( model.JobExportHistoryArchive.table.c.id ) ),
active_datasets=relation( model.HistoryDatasetAssociation,
primaryjoin=(
( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) &
not_( model.HistoryDatasetAssociation.table.c.deleted )
),
order_by=asc( model.HistoryDatasetAssociation.table.c.hid ),
viewonly=True ),
active_dataset_collections=relation( model.HistoryDatasetCollectionAssociation,
primaryjoin=(
( model.HistoryDatasetCollectionAssociation.table.c.history_id == model.History.table.c.id ) &
not_( model.HistoryDatasetCollectionAssociation.table.c.deleted )
),
order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ),
viewonly=True ),
visible_datasets=relation( model.HistoryDatasetAssociation,
primaryjoin=(
( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) &
not_( model.HistoryDatasetAssociation.table.c.deleted ) &
model.HistoryDatasetAssociation.table.c.visible
),
order_by=asc( model.HistoryDatasetAssociation.table.c.hid ),
viewonly=True ),
visible_dataset_collections=relation( model.HistoryDatasetCollectionAssociation,
primaryjoin=(
( model.HistoryDatasetCollectionAssociation.table.c.history_id == model.History.table.c.id ) &
not_( model.HistoryDatasetCollectionAssociation.table.c.deleted ) &
model.HistoryDatasetCollectionAssociation.table.c.visible
),
order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ),
viewonly=True ),
tags=relation( model.HistoryTagAssociation,
order_by=model.HistoryTagAssociation.table.c.id,
backref="histories" ),
annotations=relation( model.HistoryAnnotationAssociation,
order_by=model.HistoryAnnotationAssociation.table.c.id,
backref="histories" ),
ratings=relation( model.HistoryRatingAssociation,
order_by=model.HistoryRatingAssociation.table.c.id,
backref="histories" )
) )
# Set up proxy so that
# History.users_shared_with
# returns a list of users that history is shared with.
model.History.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.HistoryUserShareAssociation, model.HistoryUserShareAssociation.table, properties=dict(
user=relation( model.User, backref='histories_shared_by_others' ),
history=relation( model.History, backref='users_shared_with' )
) )
mapper( model.User, model.User.table, properties=dict(
histories=relation( model.History,
backref="user",
order_by=desc(model.History.table.c.update_time ) ),
active_histories=relation( model.History,
primaryjoin=(
( model.History.table.c.user_id == model.User.table.c.id ) &
( not_( model.History.table.c.deleted ) )
),
order_by=desc( model.History.table.c.update_time ) ),
galaxy_sessions=relation( model.GalaxySession,
order_by=desc( model.GalaxySession.table.c.update_time ) ),
stored_workflow_menu_entries=relation( model.StoredWorkflowMenuEntry,
backref="user",
cascade="all, delete-orphan",
collection_class=ordering_list( 'order_index' ) ),
_preferences=relation( model.UserPreference,
backref="user",
collection_class=attribute_mapped_collection('name')),
# addresses=relation( UserAddress,
# primaryjoin=( User.table.c.id == UserAddress.table.c.user_id ) ),
values=relation( model.FormValues,
primaryjoin=( model.User.table.c.form_values_id == model.FormValues.table.c.id ) ),
api_keys=relation( model.APIKeys,
backref="user",
order_by=desc( model.APIKeys.table.c.create_time ) ),
) )
mapper( model.PasswordResetToken, model.PasswordResetToken.table,
properties=dict( user=relation( model.User, backref="reset_tokens") ) )
# Set up proxy so that this syntax is possible:
# <user_obj>.preferences[pref_name] = pref_value
model.User.preferences = association_proxy('_preferences', 'value', creator=model.UserPreference)
mapper( model.Group, model.Group.table, properties=dict(
users=relation( model.UserGroupAssociation )
) )
mapper( model.UserGroupAssociation, model.UserGroupAssociation.table, properties=dict(
user=relation( model.User, backref="groups" ),
group=relation( model.Group, backref="members" )
) )
mapper( model.DefaultUserPermissions, model.DefaultUserPermissions.table, properties=dict(
user=relation( model.User, backref="default_permissions" ),
role=relation( model.Role )
) )
mapper( model.DefaultHistoryPermissions, model.DefaultHistoryPermissions.table, properties=dict(
history=relation( model.History, backref="default_permissions" ),
role=relation( model.Role )
) )
mapper( model.Role, model.Role.table, properties=dict(
users=relation( model.UserRoleAssociation ),
groups=relation( model.GroupRoleAssociation )
) )
mapper( model.UserRoleAssociation, model.UserRoleAssociation.table, properties=dict(
user=relation( model.User, backref="roles" ),
non_private_roles=relation(
model.User,
backref="non_private_roles",
primaryjoin=(
( model.User.table.c.id == model.UserRoleAssociation.table.c.user_id ) &
( model.UserRoleAssociation.table.c.role_id == model.Role.table.c.id ) &
not_( model.Role.table.c.name == model.User.table.c.email ) )
),
role=relation( model.Role )
) )
mapper( model.GroupRoleAssociation, model.GroupRoleAssociation.table, properties=dict(
group=relation( model.Group, backref="roles" ),
role=relation( model.Role )
) )
mapper( model.Quota, model.Quota.table, properties=dict(
users=relation( model.UserQuotaAssociation ),
groups=relation( model.GroupQuotaAssociation )
) )
mapper( model.UserQuotaAssociation, model.UserQuotaAssociation.table, properties=dict(
user=relation( model.User, backref="quotas" ),
quota=relation( model.Quota )
) )
mapper( model.GroupQuotaAssociation, model.GroupQuotaAssociation.table, properties=dict(
group=relation( model.Group, backref="quotas" ),
quota=relation( model.Quota )
) )
mapper( model.DefaultQuotaAssociation, model.DefaultQuotaAssociation.table, properties=dict(
quota=relation( model.Quota, backref="default" )
) )
mapper( model.DatasetPermissions, model.DatasetPermissions.table, properties=dict(
dataset=relation( model.Dataset, backref="actions" ),
role=relation( model.Role, backref="dataset_actions" )
) )
mapper( model.LibraryPermissions, model.LibraryPermissions.table, properties=dict(
library=relation( model.Library, backref="actions" ),
role=relation( model.Role, backref="library_actions" )
) )
mapper( model.LibraryFolderPermissions, model.LibraryFolderPermissions.table, properties=dict(
folder=relation( model.LibraryFolder, backref="actions" ),
role=relation( model.Role, backref="library_folder_actions" )
) )
mapper( model.LibraryDatasetPermissions, model.LibraryDatasetPermissions.table, properties=dict(
library_dataset=relation( model.LibraryDataset, backref="actions" ),
role=relation( model.Role, backref="library_dataset_actions" )
) )
mapper( model.LibraryDatasetDatasetAssociationPermissions, model.LibraryDatasetDatasetAssociationPermissions.table, properties=dict(
library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation, backref="actions" ),
role=relation( model.Role, backref="library_dataset_dataset_actions" )
) )
mapper( model.Library, model.Library.table, properties=dict(
root_folder=relation( model.LibraryFolder, backref=backref( "library_root" ) )
) )
mapper( model.ExtendedMetadata, model.ExtendedMetadata.table, properties=dict(
children=relation( model.ExtendedMetadataIndex,
primaryjoin=( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ),
backref=backref( "parent",
primaryjoin=( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) ) )
) )
mapper( model.ExtendedMetadataIndex, model.ExtendedMetadataIndex.table, properties=dict(
extended_metadata=relation( model.ExtendedMetadata,
primaryjoin=( ( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) ) )
) )
mapper( model.LibraryInfoAssociation, model.LibraryInfoAssociation.table, properties=dict(
library=relation( model.Library,
primaryjoin=(
( model.LibraryInfoAssociation.table.c.library_id == model.Library.table.c.id ) &
( not_( model.LibraryInfoAssociation.table.c.deleted ) )
),
backref="info_association" ),
template=relation( model.FormDefinition,
primaryjoin=( model.LibraryInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.LibraryInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.LibraryFolder, model.LibraryFolder.table, properties=dict(
folders=relation( model.LibraryFolder,
primaryjoin=( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ),
order_by=asc( model.LibraryFolder.table.c.name ),
backref=backref( "parent",
primaryjoin=( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ),
remote_side=[model.LibraryFolder.table.c.id] ) ),
active_folders=relation( model.LibraryFolder,
primaryjoin=(
( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ) &
( not_( model.LibraryFolder.table.c.deleted ) )
),
order_by=asc( model.LibraryFolder.table.c.name ),
# """sqlalchemy.exc.ArgumentError: Error creating eager relationship 'active_folders'
# on parent class '<class 'galaxy.model.LibraryFolder'>' to child class '<class 'galaxy.model.LibraryFolder'>':
# Cant use eager loading on a self referential relationship."""
lazy=True,
viewonly=True ),
datasets=relation( model.LibraryDataset,
primaryjoin=( ( model.LibraryDataset.table.c.folder_id == model.LibraryFolder.table.c.id ) ),
order_by=asc( model.LibraryDataset.table.c._name ),
lazy=True,
viewonly=True ),
active_datasets=relation( model.LibraryDataset,
primaryjoin=(
( model.LibraryDataset.table.c.folder_id == model.LibraryFolder.table.c.id ) &
( not_( model.LibraryDataset.table.c.deleted ) )
),
order_by=asc( model.LibraryDataset.table.c._name ),
lazy=True,
viewonly=True )
) )
mapper( model.LibraryFolderInfoAssociation, model.LibraryFolderInfoAssociation.table, properties=dict(
folder=relation( model.LibraryFolder,
primaryjoin=(
( model.LibraryFolderInfoAssociation.table.c.library_folder_id == model.LibraryFolder.table.c.id ) &
( not_( model.LibraryFolderInfoAssociation.table.c.deleted ) )
),
backref="info_association" ),
template=relation( model.FormDefinition,
primaryjoin=( model.LibraryFolderInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.LibraryFolderInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.LibraryDataset, model.LibraryDataset.table, properties=dict(
folder=relation( model.LibraryFolder ),
library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDataset.table.c.library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
expired_datasets=relation( model.LibraryDatasetDatasetAssociation,
foreign_keys=[model.LibraryDataset.table.c.id, model.LibraryDataset.table.c.library_dataset_dataset_association_id ],
primaryjoin=(
( model.LibraryDataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id ) &
( not_( model.LibraryDataset.table.c.library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) )
),
viewonly=True,
uselist=True )
) )
mapper( model.LibraryDatasetDatasetAssociation, model.LibraryDatasetDatasetAssociation.table, properties=dict(
dataset=relation( model.Dataset ),
library_dataset=relation( model.LibraryDataset,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id == model.LibraryDataset.table.c.id ) ),
# user=relation( model.User.mapper ),
user=relation( model.User ),
copied_from_library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ),
remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id],
uselist=False ),
copied_to_library_dataset_dataset_associations=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
copied_from_history_dataset_association=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_history_dataset_association_id ==
model.HistoryDatasetAssociation.table.c.id ),
uselist=False ),
copied_to_history_dataset_associations=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
implicitly_converted_datasets=relation( model.ImplicitlyConvertedDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) ),
children=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.parent_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ),
backref=backref( "parent",
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.parent_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ),
remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id] ) ),
visible_children=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=(
( model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ) &
( model.LibraryDatasetDatasetAssociation.table.c.visible == true() )
),
remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id] ),
extended_metadata=relation( model.ExtendedMetadata,
primaryjoin=( ( model.LibraryDatasetDatasetAssociation.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) )
),
_metadata=deferred(model.LibraryDatasetDatasetAssociation.table.c._metadata)
) )
mapper( model.LibraryDatasetDatasetInfoAssociation, model.LibraryDatasetDatasetInfoAssociation.table, properties=dict(
library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=(
( model.LibraryDatasetDatasetInfoAssociation.table.c.library_dataset_dataset_association_id ==
model.LibraryDatasetDatasetAssociation.table.c.id ) &
( not_( model.LibraryDatasetDatasetInfoAssociation.table.c.deleted ) )
),
backref="info_association" ),
template=relation( model.FormDefinition,
primaryjoin=( model.LibraryDatasetDatasetInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.LibraryDatasetDatasetInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.JobToInputDatasetAssociation, model.JobToInputDatasetAssociation.table, properties=dict(
job=relation( model.Job ),
dataset=relation( model.HistoryDatasetAssociation,
lazy=False,
backref="dependent_jobs" )
) )
mapper( model.JobToOutputDatasetAssociation, model.JobToOutputDatasetAssociation.table, properties=dict(
job=relation( model.Job ),
dataset=relation( model.HistoryDatasetAssociation,
lazy=False )
) )
mapper( model.JobToInputDatasetCollectionAssociation, model.JobToInputDatasetCollectionAssociation.table, properties=dict(
job=relation( model.Job ),
dataset_collection=relation( model.HistoryDatasetCollectionAssociation,
lazy=False,
backref="dependent_jobs" )
) )
mapper( model.JobToOutputDatasetCollectionAssociation, model.JobToOutputDatasetCollectionAssociation.table, properties=dict(
job=relation( model.Job ),
dataset_collection_instance=relation( model.HistoryDatasetCollectionAssociation,
lazy=False,
backref="output_dataset_collection_instances" )
) )
mapper( model.JobToImplicitOutputDatasetCollectionAssociation, model.JobToImplicitOutputDatasetCollectionAssociation.table, properties=dict(
job=relation( model.Job ),
dataset_collection=relation( model.DatasetCollection,
backref="output_dataset_collections" )
) )
mapper( model.JobToInputLibraryDatasetAssociation, model.JobToInputLibraryDatasetAssociation.table, properties=dict(
job=relation( model.Job ),
dataset=relation( model.LibraryDatasetDatasetAssociation,
lazy=False,
backref="dependent_jobs" )
) )
mapper( model.JobToOutputLibraryDatasetAssociation, model.JobToOutputLibraryDatasetAssociation.table, properties=dict(
job=relation( model.Job ),
dataset=relation( model.LibraryDatasetDatasetAssociation,
lazy=False )
) )
simple_mapping( model.JobStateHistory,
job=relation( model.Job, backref="state_history" ) )
simple_mapping( model.JobMetricText,
job=relation( model.Job, backref="text_metrics" ) )
simple_mapping( model.TaskMetricText,
task=relation( model.Task, backref="text_metrics" ) )
simple_mapping( model.JobMetricNumeric,
job=relation( model.Job, backref="numeric_metrics" ) )
simple_mapping( model.TaskMetricNumeric,
task=relation( model.Task, backref="numeric_metrics" ) )
simple_mapping( model.ImplicitlyCreatedDatasetCollectionInput,
input_dataset_collection=relation( model.HistoryDatasetCollectionAssociation,
primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.id ==
model.ImplicitlyCreatedDatasetCollectionInput.table.c.input_dataset_collection_id ) ),
# backref="implicitly_created_dataset_collections",
),
)
mapper( model.JobParameter, model.JobParameter.table )
mapper( model.JobExternalOutputMetadata, model.JobExternalOutputMetadata.table, properties=dict(
job=relation( model.Job ),
history_dataset_association=relation( model.HistoryDatasetAssociation, lazy=False ),
library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation, lazy=False )
) )
mapper( model.JobExportHistoryArchive, model.JobExportHistoryArchive.table, properties=dict(
job=relation( model.Job ),
history=relation( model.History ),
dataset=relation( model.Dataset )
) )
mapper( model.JobImportHistoryArchive, model.JobImportHistoryArchive.table, properties=dict(
job=relation( model.Job ),
history=relation( model.History )
) )
mapper( model.GenomeIndexToolData, model.GenomeIndexToolData.table, properties=dict(
job=relation( model.Job, backref='job' ),
dataset=relation( model.Dataset ),
user=relation( model.User ),
deferred=relation( model.DeferredJob, backref='deferred_job' ),
transfer=relation( model.TransferJob, backref='transfer_job' )
) )
mapper(model.PostJobAction, model.PostJobAction.table, properties=dict(
workflow_step=relation( model.WorkflowStep,
backref='post_job_actions',
primaryjoin=(model.WorkflowStep.table.c.id == model.PostJobAction.table.c.workflow_step_id ) )
) )
mapper( model.PostJobActionAssociation, model.PostJobActionAssociation.table, properties=dict(
job=relation( model.Job ),
post_job_action=relation( model.PostJobAction)
) )
mapper( model.Job, model.Job.table, properties=dict(
# user=relation( model.User.mapper ),
user=relation( model.User ),
galaxy_session=relation( model.GalaxySession ),
history=relation( model.History ),
library_folder=relation( model.LibraryFolder, lazy=True ),
parameters=relation( model.JobParameter, lazy=True ),
input_datasets=relation( model.JobToInputDatasetAssociation ),
output_datasets=relation( model.JobToOutputDatasetAssociation, lazy=True ),
output_dataset_collection_instances=relation( model.JobToOutputDatasetCollectionAssociation, lazy=True ),
output_dataset_collections=relation( model.JobToImplicitOutputDatasetCollectionAssociation, lazy=True ),
post_job_actions=relation( model.PostJobActionAssociation, lazy=False ),
input_library_datasets=relation( model.JobToInputLibraryDatasetAssociation ),
output_library_datasets=relation( model.JobToOutputLibraryDatasetAssociation, lazy=True ),
external_output_metadata=relation( model.JobExternalOutputMetadata, lazy=True ),
tasks=relation( model.Task )
) )
mapper( model.Task, model.Task.table, properties=dict(
job=relation( model.Job )
) )
mapper( model.DeferredJob, model.DeferredJob.table, properties={} )
mapper( model.TransferJob, model.TransferJob.table, properties={} )
simple_mapping( model.DatasetCollection,
elements=relation( model.DatasetCollectionElement,
primaryjoin=( model.DatasetCollection.table.c.id == model.DatasetCollectionElement.table.c.dataset_collection_id ),
remote_side=[ model.DatasetCollectionElement.table.c.dataset_collection_id ],
backref="collection",
order_by=model.DatasetCollectionElement.table.c.element_index )
)
simple_mapping( model.HistoryDatasetCollectionAssociation,
collection=relation( model.DatasetCollection ),
history=relation( model.History,
backref='dataset_collections' ),
copied_from_history_dataset_collection_association=relation( model.HistoryDatasetCollectionAssociation,
primaryjoin=( model.HistoryDatasetCollectionAssociation.table.c.copied_from_history_dataset_collection_association_id ==
model.HistoryDatasetCollectionAssociation.table.c.id ),
remote_side=[model.HistoryDatasetCollectionAssociation.table.c.id],
uselist=False ),
copied_to_history_dataset_collection_associations=relation( model.HistoryDatasetCollectionAssociation,
primaryjoin=( model.HistoryDatasetCollectionAssociation.table.c.copied_from_history_dataset_collection_association_id ==
model.HistoryDatasetCollectionAssociation.table.c.id ) ),
implicit_input_collections=relation( model.ImplicitlyCreatedDatasetCollectionInput,
primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.id ==
model.ImplicitlyCreatedDatasetCollectionInput.table.c.dataset_collection_id ) ),
backref="dataset_collection",
),
tags=relation( model.HistoryDatasetCollectionTagAssociation,
order_by=model.HistoryDatasetCollectionTagAssociation.table.c.id,
backref='dataset_collections' ),
annotations=relation( model.HistoryDatasetCollectionAnnotationAssociation,
order_by=model.HistoryDatasetCollectionAnnotationAssociation.table.c.id,
backref="dataset_collections" ),
ratings=relation( model.HistoryDatasetCollectionRatingAssociation,
order_by=model.HistoryDatasetCollectionRatingAssociation.table.c.id,
backref="dataset_collections" )
)
simple_mapping( model.LibraryDatasetCollectionAssociation,
collection=relation( model.DatasetCollection ),
folder=relation( model.LibraryFolder,
backref='dataset_collections' ),
tags=relation( model.LibraryDatasetCollectionTagAssociation,
order_by=model.LibraryDatasetCollectionTagAssociation.table.c.id,
backref='dataset_collections' ),
annotations=relation( model.LibraryDatasetCollectionAnnotationAssociation,
order_by=model.LibraryDatasetCollectionAnnotationAssociation.table.c.id,
backref="dataset_collections" ),
ratings=relation( model.LibraryDatasetCollectionRatingAssociation,
order_by=model.LibraryDatasetCollectionRatingAssociation.table.c.id,
backref="dataset_collections" ) )
simple_mapping( model.DatasetCollectionElement,
hda=relation( model.HistoryDatasetAssociation,
primaryjoin=( model.DatasetCollectionElement.table.c.hda_id == model.HistoryDatasetAssociation.table.c.id ) ),
ldda=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.DatasetCollectionElement.table.c.ldda_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
child_collection=relation( model.DatasetCollection,
primaryjoin=( model.DatasetCollectionElement.table.c.child_collection_id == model.DatasetCollection.table.c.id ) ) )
mapper( model.Event, model.Event.table, properties=dict(
history=relation( model.History ),
galaxy_session=relation( model.GalaxySession ),
# user=relation( model.User.mapper ) ) )
user=relation( model.User )
) )
mapper( model.GalaxySession, model.GalaxySession.table, properties=dict(
histories=relation( model.GalaxySessionToHistoryAssociation ),
current_history=relation( model.History ),
# user=relation( model.User.mapper ) ) )
user=relation( model.User )
) )
mapper( model.GalaxySessionToHistoryAssociation, model.GalaxySessionToHistoryAssociation.table, properties=dict(
galaxy_session=relation( model.GalaxySession ),
history=relation( model.History )
) )
mapper( model.Workflow, model.Workflow.table, properties=dict(
steps=relation( model.WorkflowStep,
backref='workflow',
primaryjoin=( ( model.Workflow.table.c.id == model.WorkflowStep.table.c.workflow_id ) ),
order_by=asc( model.WorkflowStep.table.c.order_index ),
cascade="all, delete-orphan",
lazy=False )
) )
mapper( model.WorkflowStep, model.WorkflowStep.table, properties=dict(
subworkflow=relation( model.Workflow,
primaryjoin=( ( model.Workflow.table.c.id == model.WorkflowStep.table.c.subworkflow_id ) ),
backref="parent_workflow_steps"),
tags=relation( model.WorkflowStepTagAssociation,
order_by=model.WorkflowStepTagAssociation.table.c.id,
backref="workflow_steps" ),
annotations=relation( model.WorkflowStepAnnotationAssociation,
order_by=model.WorkflowStepAnnotationAssociation.table.c.id,
backref="workflow_steps" )
) )
mapper( model.WorkflowOutput, model.WorkflowOutput.table, properties=dict(
workflow_step=relation( model.WorkflowStep,
backref='workflow_outputs',
primaryjoin=( model.WorkflowStep.table.c.id == model.WorkflowOutput.table.c.workflow_step_id ) )
) )
mapper( model.WorkflowStepConnection, model.WorkflowStepConnection.table, properties=dict(
input_step=relation( model.WorkflowStep,
backref="input_connections",
cascade="all",
primaryjoin=( model.WorkflowStepConnection.table.c.input_step_id == model.WorkflowStep.table.c.id ) ),
input_subworkflow_step=relation( model.WorkflowStep,
backref=backref("parent_workflow_input_connections", uselist=True),
primaryjoin=( model.WorkflowStepConnection.table.c.input_subworkflow_step_id == model.WorkflowStep.table.c.id ),
),
output_step=relation( model.WorkflowStep,
backref="output_connections",
cascade="all",
primaryjoin=( model.WorkflowStepConnection.table.c.output_step_id == model.WorkflowStep.table.c.id ) ),
) )
mapper( model.StoredWorkflow, model.StoredWorkflow.table, properties=dict(
user=relation( model.User,
primaryjoin=( model.User.table.c.id == model.StoredWorkflow.table.c.user_id ),
backref='stored_workflows' ),
workflows=relation( model.Workflow,
backref='stored_workflow',
cascade="all, delete-orphan",
primaryjoin=( model.StoredWorkflow.table.c.id == model.Workflow.table.c.stored_workflow_id ) ),
latest_workflow=relation( model.Workflow,
post_update=True,
primaryjoin=( model.StoredWorkflow.table.c.latest_workflow_id == model.Workflow.table.c.id ),
lazy=False ),
tags=relation( model.StoredWorkflowTagAssociation,
order_by=model.StoredWorkflowTagAssociation.table.c.id,
backref="stored_workflows" ),
owner_tags=relation( model.StoredWorkflowTagAssociation,
primaryjoin=(
and_( model.StoredWorkflow.table.c.id == model.StoredWorkflowTagAssociation.table.c.stored_workflow_id,
model.StoredWorkflow.table.c.user_id == model.StoredWorkflowTagAssociation.table.c.user_id )
),
order_by=model.StoredWorkflowTagAssociation.table.c.id ),
annotations=relation( model.StoredWorkflowAnnotationAssociation,
order_by=model.StoredWorkflowAnnotationAssociation.table.c.id,
backref="stored_workflows" ),
ratings=relation( model.StoredWorkflowRatingAssociation,
order_by=model.StoredWorkflowRatingAssociation.table.c.id,
backref="stored_workflows" )
) )
# Set up proxy so that
# StoredWorkflow.users_shared_with
# returns a list of users that workflow is shared with.
model.StoredWorkflow.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.StoredWorkflowUserShareAssociation, model.StoredWorkflowUserShareAssociation.table, properties=dict(
user=relation( model.User,
backref='workflows_shared_by_others' ),
stored_workflow=relation( model.StoredWorkflow,
backref='users_shared_with' )
) )
mapper( model.StoredWorkflowMenuEntry, model.StoredWorkflowMenuEntry.table, properties=dict(
stored_workflow=relation( model.StoredWorkflow )
) )
mapper( model.WorkflowInvocation, model.WorkflowInvocation.table, properties=dict(
history=relation( model.History ),
input_parameters=relation( model.WorkflowRequestInputParameter ),
step_states=relation( model.WorkflowRequestStepState ),
input_step_parameters=relation( model.WorkflowRequestInputStepParmeter ),
input_datasets=relation( model.WorkflowRequestToInputDatasetAssociation ),
input_dataset_collections=relation( model.WorkflowRequestToInputDatasetCollectionAssociation ),
subworkflow_invocations=relation( model.WorkflowInvocationToSubworkflowInvocationAssociation,
primaryjoin=( ( model.WorkflowInvocationToSubworkflowInvocationAssociation.table.c.workflow_invocation_id == model.WorkflowInvocation.table.c.id ) ),
backref=backref("parent_workflow_invocation", uselist=False),
uselist=True,
),
steps=relation( model.WorkflowInvocationStep,
backref='workflow_invocation',
lazy=False ),
workflow=relation( model.Workflow )
) )
mapper( model.WorkflowInvocationToSubworkflowInvocationAssociation, model.WorkflowInvocationToSubworkflowInvocationAssociation.table, properties=dict(
subworkflow_invocation=relation( model.WorkflowInvocation,
primaryjoin=( ( model.WorkflowInvocationToSubworkflowInvocationAssociation.table.c.subworkflow_invocation_id == model.WorkflowInvocation.table.c.id ) ),
backref="parent_workflow_invocation_association",
uselist=False,
),
workflow_step=relation( model.WorkflowStep ),
) )
mapper( model.WorkflowInvocationStep, model.WorkflowInvocationStep.table, properties=dict(
workflow_step=relation( model.WorkflowStep ),
job=relation( model.Job,
backref=backref( 'workflow_invocation_step',
uselist=False ) )
) )
simple_mapping( model.WorkflowRequestInputParameter,
workflow_invocation=relation( model.WorkflowInvocation ) )
simple_mapping( model.WorkflowRequestStepState,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ) )
simple_mapping( model.WorkflowRequestInputStepParmeter,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ) )
simple_mapping( model.WorkflowRequestToInputDatasetAssociation,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ),
dataset=relation( model.HistoryDatasetAssociation ) )
simple_mapping( model.WorkflowRequestToInputDatasetCollectionAssociation,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ),
dataset_collection=relation( model.HistoryDatasetCollectionAssociation ) )
mapper( model.MetadataFile, model.MetadataFile.table, properties=dict(
history_dataset=relation( model.HistoryDatasetAssociation ),
library_dataset=relation( model.LibraryDatasetDatasetAssociation )
) )
mapper( model.PageRevision, model.PageRevision.table )
mapper( model.Page, model.Page.table, properties=dict(
user=relation( model.User ),
revisions=relation( model.PageRevision,
backref='page',
cascade="all, delete-orphan",
primaryjoin=( model.Page.table.c.id == model.PageRevision.table.c.page_id ) ),
latest_revision=relation( model.PageRevision,
post_update=True,
primaryjoin=( model.Page.table.c.latest_revision_id == model.PageRevision.table.c.id ),
lazy=False ),
tags=relation(model.PageTagAssociation,
order_by=model.PageTagAssociation.table.c.id,
backref="pages" ),
annotations=relation( model.PageAnnotationAssociation,
order_by=model.PageAnnotationAssociation.table.c.id,
backref="pages" ),
ratings=relation( model.PageRatingAssociation,
order_by=model.PageRatingAssociation.table.c.id,
backref="pages" )
) )
# Set up proxy so that
# Page.users_shared_with
# returns a list of users that page is shared with.
model.Page.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.PageUserShareAssociation, model.PageUserShareAssociation.table,
properties=dict( user=relation( model.User, backref='pages_shared_by_others' ),
page=relation( model.Page, backref='users_shared_with' ) ) )
mapper( model.VisualizationRevision, model.VisualizationRevision.table )
mapper( model.Visualization, model.Visualization.table, properties=dict(
user=relation( model.User ),
revisions=relation( model.VisualizationRevision,
backref='visualization',
cascade="all, delete-orphan",
primaryjoin=( model.Visualization.table.c.id == model.VisualizationRevision.table.c.visualization_id ) ),
latest_revision=relation( model.VisualizationRevision,
post_update=True,
primaryjoin=( model.Visualization.table.c.latest_revision_id == model.VisualizationRevision.table.c.id ),
lazy=False ),
tags=relation( model.VisualizationTagAssociation,
order_by=model.VisualizationTagAssociation.table.c.id,
backref="visualizations" ),
annotations=relation( model.VisualizationAnnotationAssociation,
order_by=model.VisualizationAnnotationAssociation.table.c.id,
backref="visualizations" ),
ratings=relation( model.VisualizationRatingAssociation,
order_by=model.VisualizationRatingAssociation.table.c.id,
backref="visualizations" )
) )
# Set up proxy so that
# Visualization.users_shared_with
# returns a list of users that visualization is shared with.
model.Visualization.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.VisualizationUserShareAssociation, model.VisualizationUserShareAssociation.table, properties=dict(
user=relation( model.User,
backref='visualizations_shared_by_others' ),
visualization=relation( model.Visualization,
backref='users_shared_with' )
) )
# Tag tables.
simple_mapping( model.Tag,
children=relation( model.Tag, backref=backref( 'parent', remote_side=[model.Tag.table.c.id] ) ) )
def tag_mapping( tag_association_class, backref_name ):
simple_mapping( tag_association_class, tag=relation( model.Tag, backref=backref_name), user=relation( model.User ) )
tag_mapping( model.HistoryTagAssociation, "tagged_histories" )
tag_mapping( model.DatasetTagAssociation, "tagged_datasets" )
tag_mapping( model.HistoryDatasetAssociationTagAssociation, "tagged_history_dataset_associations" )
tag_mapping( model.PageTagAssociation, "tagged_pages" )
tag_mapping( model.StoredWorkflowTagAssociation, "tagged_workflows" )
tag_mapping( model.WorkflowStepTagAssociation, "tagged_workflow_steps" )
tag_mapping( model.VisualizationTagAssociation, "tagged_visualizations" )
tag_mapping( model.HistoryDatasetCollectionTagAssociation, "tagged_history_dataset_collections" )
tag_mapping( model.LibraryDatasetCollectionTagAssociation, "tagged_library_dataset_collections" )
tag_mapping( model.ToolTagAssociation, "tagged_tools" )
# Annotation tables.
def annotation_mapping( annotation_class, **kwds ):
kwds = dict( [ (key, relation( value ) ) for key, value in kwds.iteritems() ] )
simple_mapping( annotation_class, **dict(user=relation( model.User ), **kwds ) )
annotation_mapping( model.HistoryAnnotationAssociation, history=model.History )
annotation_mapping( model.HistoryDatasetAssociationAnnotationAssociation, hda=model.HistoryDatasetAssociation )
annotation_mapping( model.StoredWorkflowAnnotationAssociation, stored_workflow=model.StoredWorkflow )
annotation_mapping( model.WorkflowStepAnnotationAssociation, workflow_step=model.WorkflowStep )
annotation_mapping( model.PageAnnotationAssociation, page=model.Page )
annotation_mapping( model.VisualizationAnnotationAssociation, visualization=model.Visualization )
annotation_mapping( model.HistoryDatasetCollectionAnnotationAssociation,
history_dataset_collection=model.HistoryDatasetCollectionAssociation )
annotation_mapping( model.LibraryDatasetCollectionAnnotationAssociation,
library_dataset_collection=model.LibraryDatasetCollectionAssociation )
# Rating tables.
def rating_mapping( rating_class, **kwds ):
kwds = dict( [ (key, relation( value ) ) for key, value in kwds.iteritems() ] )
simple_mapping( rating_class, **dict(user=relation( model.User ), **kwds ) )
rating_mapping( model.HistoryRatingAssociation, history=model.History )
rating_mapping( model.HistoryDatasetAssociationRatingAssociation, hda=model.HistoryDatasetAssociation )
rating_mapping( model.StoredWorkflowRatingAssociation, stored_workflow=model.StoredWorkflow )
rating_mapping( model.PageRatingAssociation, page=model.Page )
rating_mapping( model.VisualizationRatingAssociation, visualizaiton=model.Visualization )
rating_mapping( model.HistoryDatasetCollectionRatingAssociation,
history_dataset_collection=model.HistoryDatasetCollectionAssociation )
rating_mapping( model.LibraryDatasetCollectionRatingAssociation,
libary_dataset_collection=model.LibraryDatasetCollectionAssociation )
# Data Manager tables
mapper( model.DataManagerHistoryAssociation, model.DataManagerHistoryAssociation.table, properties=dict(
history=relation( model.History ),
user=relation( model.User,
backref='data_manager_histories' )
) )
mapper( model.DataManagerJobAssociation, model.DataManagerJobAssociation.table, properties=dict(
job=relation( model.Job,
backref=backref( 'data_manager_association', uselist=False ),
uselist=False )
) )
# User tables.
mapper( model.UserPreference, model.UserPreference.table, properties={} )
mapper( model.UserAction, model.UserAction.table, properties=dict(
# user=relation( model.User.mapper )
user=relation( model.User )
) )
mapper( model.APIKeys, model.APIKeys.table, properties={} )
# model.HistoryDatasetAssociation.mapper.add_property( "creating_job_associations",
# relation( model.JobToOutputDatasetAssociation ) )
# model.LibraryDatasetDatasetAssociation.mapper.add_property( "creating_job_associations",
# relation( model.JobToOutputLibraryDatasetAssociation ) )
class_mapper( model.HistoryDatasetAssociation ).add_property(
"creating_job_associations", relation( model.JobToOutputDatasetAssociation ) )
class_mapper( model.LibraryDatasetDatasetAssociation ).add_property(
"creating_job_associations", relation( model.JobToOutputLibraryDatasetAssociation ) )
class_mapper( model.HistoryDatasetCollectionAssociation ).add_property(
"creating_job_associations", relation( model.JobToOutputDatasetCollectionAssociation ) )
# Helper methods.
def db_next_hid( self, n=1 ):
"""
db_next_hid( self )
Override __next_hid to generate from the database in a concurrency safe way.
Loads the next history ID from the DB and returns it.
It also saves the future next_id into the DB.
:rtype: int
:returns: the next history id
"""
conn = object_session( self ).connection()
table = self.table
trans = conn.begin()
try:
next_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
table.update( table.c.id == self.id ).execute( hid_counter=( next_hid + n ) )
trans.commit()
return next_hid
except:
trans.rollback()
raise
model.History._next_hid = db_next_hid
def init( file_path, url, engine_options={}, create_tables=False, map_install_models=False,
database_query_profiling_proxy=False, object_store=None, trace_logger=None, use_pbkdf2=True ):
"""Connect mappings to the database"""
# Connect dataset to the file path
model.Dataset.file_path = file_path
# Connect dataset to object store
model.Dataset.object_store = object_store
# Use PBKDF2 password hashing?
model.User.use_pbkdf2 = use_pbkdf2
# Load the appropriate db module
engine = build_engine( url, engine_options, database_query_profiling_proxy, trace_logger )
# Connect the metadata to the database.
metadata.bind = engine
model_modules = [model]
if map_install_models:
import galaxy.model.tool_shed_install.mapping # noqa
from galaxy.model import tool_shed_install
model_modules.append(tool_shed_install)
result = ModelMapping(model_modules, engine=engine)
# Create tables if needed
if create_tables:
metadata.create_all()
# metadata.engine.commit()
result.create_tables = create_tables
# load local galaxy security policy
result.security_agent = GalaxyRBACAgent( result )
return result
|
py | 7df9792fdbcff4f9569b93c020d587e06aced6a3 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test the case (reported by Jeff Petkau, bug #694744) where a target
is source for another target with a scanner, which used to cause us
to push the file to the CacheDir after the build signature had already
been cleared (as a sign that the built file should now be rescanned).
"""
import os.path
import shutil
import TestSCons
test = TestSCons.TestSCons()
test.subdir('cache')
test.write('SConstruct', """\
DefaultEnvironment(tools=[])
import SCons
CacheDir(r'%s')
def docopy(target,source,env):
data = source[0].get_contents()
f = open(target[0].rfile().get_abspath(), "wb")
f.write(data)
f.close()
def sillyScanner(node, env, dirs):
print('This is never called (unless we build file.out)')
return []
SillyScanner = SCons.Scanner.Base(function = sillyScanner, skeys = ['.res'])
env = Environment(tools=[],
SCANNERS = [SillyScanner],
BUILDERS = {})
r = env.Command('file.res', 'file.ma', docopy)
env.Command('file.out', r, docopy)
# make r the default. Note that we don't even try to build file.out,
# and so SillyScanner never runs. The bug is the same if we build
# file.out, though.
Default(r)
""" % test.workpath('cache'))
test.write('file.ma', "file.ma\n")
test.run()
test.must_not_exist(test.workpath('cache', 'N', 'None'))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
py | 7df979e3930de406a90c5e5a02c32ae968c6d3da | from cv2 import cv2
import os
import numpy as np
import gc
import shutil
Zh={
"็": 0,"ๆฒช": 1,"ๆดฅ": 2,"ๆธ": 3,"ๅ": 4,"ๆ": 5,"่": 6,"่พฝ": 7,"ๅ": 8,"้ป": 9,"่": 10,"ๆต": 11,
"ไบฌ": 12,"้ฝ": 13,"่ตฃ": 14,"้ฒ": 15,"่ฑซ": 16,"้": 17,"ๆน": 18,"็ฒค": 19,"ๆก": 20,"็ผ": 21,
"ๅท": 22,"่ดต": 23,"ไบ": 24,"่ฅฟ": 25,"้": 26,"็": 27,"้": 28,"ๅฎ": 29,"ๆฐ": 30
}
Char={
"A" : 0,"B" : 1,"C" : 2,"D" : 3,"E" : 4,"F" : 5,"G" : 6,"H" : 7,"J" : 8,"K" : 9,"L" : 10,"M" : 11,
"N" : 12,"P" : 13,"Q" : 14,"R" : 15,"S" : 16,"T" : 17,"U" : 18,"V" : 19,"W" : 20,"X": 21,
"Y" : 22,"Z" : 23,"0" : 24,"1" : 25,"2" : 26,"3" : 27,"4" : 28,"5" : 29,"6" : 30,"7" : 31,
"8" : 32,"9" : 33
}
def find_char(string,key,start,time):
temp=start+1
for t in range(0,time):
temp=string.find(key,temp+1)
return temp
def getKeysByValue(dictOfElements, valueToFind):
listOfKeys = str()
listOfItems = dictOfElements.items()
for item in listOfItems:
if item[1] == valueToFind:
listOfKeys=item[0]
return listOfKeys
def load_img(location,folder):
image=cv2.imread(location)
start=len(folder)+1
x_start=find_char(location,"-",start,2)
x_end=find_char(location,"&",start,1)
x=location[x_start+1:x_end]
x=int(x)
y_end=find_char(location,"_",start,2)
y=location[x_end+1:y_end]
y=int(y)
x1_end=find_char(location,"&",start,2)
x1=location[y_end+1:x1_end]
x1=int(x1)
y1_end=find_char(location,"-",start,3)
y1=location[x1_end+1:y1_end]
y1=int(y1)
image=image[y:y1,x:x1]
return image
def load_label(location):
key_list=[]
start=find_char(location,'-',-50,1)
end=find_char(location,'_',start,1)
key=location[start+1:end]
key=int(key)+34
key_list.append(key)
for i in range(0,5):
start=find_char(location,'_',start,1)
end=find_char(location,'_',start,1)
key=location[start+1:end]
key=int(key)
key_list.append(key)
last=find_char(location,'-',start,1)
key=location[end+1:last]
key=int(key)
key_list.append(key)
return key_list
def gen_dataset(path,count,write_flag):
img_path=[]
img_data=[]
for i in path:
num=count/len(path)
for root,dir,filenames in os.walk(i):
for name in filenames:
if num>0:
name=os.path.join(i,name)
img_path.append(name)
label_data=np.empty((len(img_path),7))
num=0
char_label=[]
if write_flag==1:
if not os.path.exists("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_train/"):
os.mkdir("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_train/")
write_path="/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_train/"
else:
shutil.rmtree("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_train/")
os.mkdir("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_train/")
write_path="/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_train/"
elif write_flag==2:
if not os.path.exists("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_test/"):
os.mkdir("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_test/")
write_path="/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_test/"
else:
shutil.rmtree("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_test/")
os.mkdir("/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_test/")
write_path="/home/ly0kos/WD/tensorflow/CCPD2019/ccpd_test/"
for loc in img_path:
path=os.path.abspath(os.path.dirname(loc) + os.path.sep + ".")
image=load_img(loc,path)
image=cv2.resize(image,(128,128))
image=cv2.GaussianBlur(image,(3,3),0)
img_data.append(image)
label=load_label(loc) #label of number
label=np.asarray(label)
label_data[num]=label
char=getKeysByValue(Zh,label[0]-34) #decode filename
for i in range(1,7):
char+=getKeysByValue(Char,label[i])
char_label.append(char) #list of decoded filename
if write_flag!=0:
filename=write_path+char+".jpg"
cv2.imwrite(filename,image)
num+=1
if count>1:
count-=1
else:
img_data=np.asarray(img_data)
if write_flag==1:
return img_data,label_data
elif write_flag==2:
return img_data,label_data
else:
return img_data,label_data
img_data=np.asarray(img_data)
gc.collect()
return img_data,label_data
|
py | 7df97c45175ff10f6ff43dda372ac79fc1a51aba | import asyncio
from typing import Optional
import discord
from app import utils
from app.classes.bot import Bot
from app.i18n import t_
ZERO_WIDTH_SPACE = "\u200B"
def get_plain_text(
starboard: dict, orig_message: dict, points: int, guild: discord.Guild
) -> str:
forced = starboard["id"] in orig_message["forced"]
frozen = orig_message["frozen"]
emoji = utils.pretty_emoji_string([starboard["display_emoji"]], guild)
channel = f"<#{orig_message['channel_id']}>"
mention = starboard["ping"]
return str(
f"**{emoji} {points} | {channel}"
+ (f" | <@{orig_message['author_id']}>" if mention else "")
+ f"{' ๐' if forced else ''}{' โ๏ธ' if frozen else ''}**"
)
async def sbemojis(bot: Bot, guild_id: int) -> list[str]:
_emojis = await bot.db.fetch(
"""SELECT star_emojis FROM starboards
WHERE guild_id=$1""",
guild_id,
)
if _emojis:
emojis = [
emoji for record in _emojis for emoji in record["star_emojis"]
]
else:
emojis = []
return emojis
async def orig_message(bot: Bot, message_id: int) -> Optional[dict]:
starboard_message = await bot.db.sb_messages.get(message_id)
if starboard_message is not None:
return await bot.db.messages.get(starboard_message["orig_id"])
return await bot.db.messages.get(message_id)
async def embed_message(
bot: Bot, message: discord.Message, color: str = None, files: bool = True
) -> tuple[discord.Embed, list[discord.File]]:
nsfw = message.channel.is_nsfw()
content = utils.escmask(message.system_content)
urls = []
extra_attachments = []
image_used = False
thumbnail_used = False
for attachment in message.attachments:
if files:
try:
f = await attachment.to_file()
except (discord.Forbidden, discord.HTTPException):
f = None
else:
f = None
urls.append(
{
"name": attachment.filename,
"display_url": attachment.url,
"url": attachment.url,
"type": "upload",
"spoiler": attachment.is_spoiler(),
"file": f,
"show_link": True,
"thumbnail_only": False,
}
)
embed: discord.Embed
for embed in message.embeds:
if embed.type in ["rich", "article", "link"]:
if embed.title != embed.Empty:
if embed.url == embed.Empty:
content += f"\n\n__**{utils.escmd(embed.title)}**__\n"
else:
content += (
f"\n\n__**[{utils.escmd(embed.title)}]({embed.url})"
"**__\n"
)
else:
content += "\n"
content += (
(f"{embed.description}\n")
if embed.description != embed.Empty
else ""
)
for field in embed.fields:
name = f"\n**{utils.escmd(field.name)}**\n"
value = f"{field.value}\n"
content += name + value
if embed.footer.text is not embed.Empty:
content += f"\n{utils.escmd(embed.footer.text)}\n"
if embed.image.url is not embed.Empty:
urls.append(
{
"name": "Embed Image",
"url": embed.image.url,
"display_url": embed.image.url,
"type": "image",
"spoiler": False,
"show_link": False,
"thumbnail_only": False,
}
)
if embed.thumbnail.url is not embed.Empty:
urls.append(
{
"name": "Embed Thumbnail",
"url": embed.thumbnail.url,
"display_url": embed.thumbnail.url,
"type": "image",
"spoiler": False,
"show_link": False,
"thumbnail_only": True,
}
)
elif embed.type == "image":
if embed.url is not embed.Empty:
urls.append(
{
"name": "Image",
"display_url": embed.thumbnail.url,
"url": embed.url,
"type": "image",
"spoiler": False,
"show_link": True,
"thumbnail_only": False,
}
)
elif embed.type == "gifv":
if embed.url is not embed.Empty:
urls.append(
{
"name": "GIF",
"display_url": embed.thumbnail.url,
"url": embed.url,
"type": "gif",
"spoiler": False,
"show_link": True,
"thumbnail_only": True,
}
)
elif embed.type == "video":
if embed.url is not embed.Empty:
urls.append(
{
"name": embed.title,
"display_url": embed.thumbnail.url,
"url": embed.url,
"type": "video",
"spoiler": False,
"show_link": True,
"thumbnail_only": False,
}
)
if len(content) > 2048:
to_remove = len(content + " ...") - 2048
content = content[:-to_remove]
embed = discord.Embed(
color=bot.theme_color
if color is None
else int(color.replace("#", ""), 16),
description=content,
).set_author(name=str(message.author), icon_url=message.author.avatar_url)
ref_message = None
ref_jump = None
ref_author = None
if message.reference is not None:
if message.reference.resolved is None:
ref_message = await bot.cache.fetch_message(
message.reference.guild_id,
message.reference.channel_id,
message.reference.message_id,
)
if ref_message is None:
ref_content = t_("*Message was deleted*")
else:
ref_author = str(ref_message.author)
ref_content = ref_message.system_content
else:
ref_message = message.reference.resolved
if type(message.reference.resolved) is discord.Message:
ref_content = message.reference.resolved.system_content
ref_author = str(ref_message.author)
else:
ref_content = t_("*Message was deleted*")
if ref_content == "":
ref_content = t_("*File Only*")
embed.add_field(
name=f'Replying to {ref_author or t_("Unknown")}',
value=ref_content,
inline=False,
)
if type(ref_message) is discord.Message:
ref_jump = t_("**[Replying to {0}]({1})**\n").format(
ref_author, ref_message.jump_url
)
else:
ref_jump = t_(
"**[Replying to Unknown (deleted)]"
"(https://discord.com/channels/{0.guild_id}/"
"{0.channel_id}/{0.message_id})**\n"
).format(message.reference)
embed.add_field(
name=ZERO_WIDTH_SPACE,
value=str(
str(ref_jump if ref_message else "")
+ t_("**[Jump to Message]({0})**").format(message.jump_url),
),
inline=False,
)
image_types = ["png", "jpg", "jpeg", "gif", "gifv", "svg", "webp"]
for data in urls:
if data["type"] == "upload":
is_image = False
for t in image_types:
if data["url"].endswith(t):
is_image = True
break
added = False
if is_image and not nsfw and not data["spoiler"]:
if not image_used:
embed.set_image(url=data["display_url"])
image_used = True
added = True
if not added and data["file"] is not None:
f: discord.File = data["file"]
if nsfw:
f.filename = "SPOILER_" + f.filename
extra_attachments.append(f)
elif not nsfw:
if data["thumbnail_only"]:
if not thumbnail_used:
embed.set_thumbnail(url=data["display_url"])
thumbnail_used = True
elif not image_used:
embed.set_image(url=data["display_url"])
image_used = True
to_show = str(
"\n".join(
f"**[{d['name']}]({d['url']})**" for d in urls if d["show_link"]
)
)
if len(to_show) != 0:
embed.add_field(name=ZERO_WIDTH_SPACE, value=to_show)
embed.timestamp = message.created_at
return embed, extra_attachments
async def update_message(bot: Bot, message_id: int, guild_id: int) -> None:
sql_message = await bot.db.messages.get(message_id)
if not sql_message:
return
sql_starboards = await bot.db.starboards.get_many(guild_id)
sql_author = await bot.db.users.get(sql_message["author_id"])
all_tasks = []
if not sql_message["trashed"]:
for s in sql_starboards:
all_tasks.append(
asyncio.create_task(
handle_starboard(bot, s, sql_message, sql_author)
)
)
for t in all_tasks:
await t
else:
for s in sql_starboards:
await handle_trashed_message(bot, s, sql_message, sql_author)
async def set_points(bot: Bot, points: int, message_id: int) -> None:
await bot.db.execute(
"""UPDATE starboard_messages
SET points=$1 WHERE id=$2""",
points,
message_id,
)
async def calculate_points(bot: Bot, message: dict, starboard: dict) -> int:
_reactions = await bot.db.fetch(
"""SELECT * FROM reactions
WHERE message_id=$1""",
message["id"],
)
reaction_users = await bot.db.fetch(
"""SELECT * FROM reaction_users
WHERE reaction_id=any($1::BIGINT[])""",
[r["id"] for r in _reactions],
)
reactions = {}
for r in _reactions:
reactions[int(r["id"])] = r["emoji"]
used_users = set()
points = 0
for r in reaction_users:
if r["user_id"] in used_users:
continue
if reactions[int(r["reaction_id"])] not in starboard["star_emojis"]:
continue
if starboard["self_star"] is False:
if r["user_id"] == message["author_id"]:
continue
used_users.add(r["user_id"])
points += 1
return points
async def handle_trashed_message(
bot: Bot, sql_starboard: dict, sql_message: dict, sql_author: dict
) -> None:
sql_starboard_message = await bot.db.fetchrow(
"""SELECT * FROM starboard_messages
WHERE orig_id=$1 AND starboard_id=$2""",
sql_message["id"],
sql_starboard["id"],
)
if not sql_starboard_message:
return
starboard_message = await bot.cache.fetch_message(
sql_message["guild_id"],
sql_starboard_message["starboard_id"],
sql_starboard_message["id"],
)
if starboard_message is None:
return
embed = discord.Embed(
title=t_("Trashed Message"),
description=t_(
"This message was trashed by a moderator. To untrash it, "
"run ```\nuntrash {0}-{1}```\nReason:```\n{2}```"
).format(
sql_message["channel_id"],
sql_message["id"],
utils.escmd(sql_message["trash_reason"]),
),
)
try:
await starboard_message.edit(embed=embed)
except discord.errors.NotFound:
pass
def try_regex(
bot: Bot, pattern: str, message: discord.Message
) -> Optional[bool]:
string = message.system_content
jump = message.jump_url
try:
if utils.safe_regex(string, pattern):
return True
except TimeoutError:
bot.dispatch(
"guild_log",
t_(
"I tried to match `{0}` to "
"[a message]({1}), but it took too long. "
"Try improving the efficiency of your regex. If "
"you need help, feel free to join the support server."
).format(pattern, jump),
"error",
message.guild,
)
return None
return False
async def handle_starboard(
bot: Bot, sql_starboard: dict, sql_message: dict, sql_author: dict
) -> None:
guild = bot.get_guild(sql_starboard["guild_id"])
starboard: discord.TextChannel = guild.get_channel(
int(sql_starboard["id"])
)
sql_starboard_message = await bot.db.fetchrow(
"""SELECT * FROM starboard_messages
WHERE orig_id=$1 AND starboard_id=$2""",
sql_message["id"],
sql_starboard["id"],
)
if not sql_message["frozen"] or sql_starboard_message is None:
points = await calculate_points(bot, sql_message, sql_starboard)
else:
points = sql_starboard_message["points"]
if sql_starboard_message is not None:
await set_points(bot, points, sql_starboard_message["id"])
message = await bot.cache.fetch_message(
int(sql_message["guild_id"]),
int(sql_message["channel_id"]),
int(sql_message["id"]),
)
blacklisted = sql_message["channel_id"] in sql_starboard["channel_bl"]
whitelisted = sql_message["channel_id"] in sql_starboard["channel_wl"]
if whitelisted:
blacklisted = False
add = False
edit = sql_starboard["link_edits"]
delete = False
if points >= sql_starboard["required"]:
add = True
elif points <= sql_starboard["required_remove"]:
delete = True
if (not sql_starboard["allow_bots"]) and sql_author["is_bot"]:
delete = True
add = False
if sql_starboard["link_deletes"] and (message is None):
delete = True
add = False
if blacklisted:
add = False
delete = True
if sql_message["is_nsfw"] and not starboard.is_nsfw() and not whitelisted:
add = False
delete = True
if message is not None:
if sql_starboard["regex"] != "":
if try_regex(bot, sql_starboard["regex"], message) is False:
add = False
delete = True
if sql_starboard["exclude_regex"] != "":
if try_regex(bot, sql_starboard["exclude_regex"], message) is True:
add = False
delete = True
if sql_message["frozen"]:
add = False
delete = False
if sql_starboard["id"] in sql_message["forced"]:
add = True
delete = False
if sql_starboard_message is not None:
starboard_message = await bot.cache.fetch_message(
int(sql_message["guild_id"]),
int(sql_starboard_message["starboard_id"]),
int(sql_starboard_message["id"]),
)
if starboard_message is None:
await bot.db.sb_messages.delete(sql_starboard_message["id"])
sql_starboard_message = None
else:
starboard_message = None
if delete and starboard_message is not None:
await bot.db.sb_messages.delete(starboard_message.id)
try:
await starboard_message.delete()
except discord.errors.NotFound:
pass
elif not delete:
guild = bot.get_guild(int(sql_message["guild_id"]))
plain_text = get_plain_text(sql_starboard, sql_message, points, guild)
if starboard_message is None and add and message:
embed, attachments = await embed_message(
bot, message, color=sql_starboard["color"]
)
# starboard = guild.get_channel(int(sql_starboard["id"]))
try:
m = await starboard.send(
plain_text,
embed=embed,
files=attachments,
allowed_mentions=discord.AllowedMentions(users=True),
)
except discord.Forbidden:
bot.dispatch(
"guild_log",
t_(
"I tried to send a starboard message to "
"{0}, but I'm missing the "
"proper permissions. Please make sure I have "
"the `Send Messages` permission."
).format(starboard.mention),
"error",
guild,
)
return
await bot.db.sb_messages.create(
m.id, message.id, sql_starboard["id"]
)
await set_points(bot, points, m.id)
if sql_starboard["autoreact"] is True:
for emoji in sql_starboard["star_emojis"]:
try:
emoji_id = int(emoji)
except ValueError:
emoji_id = None
if emoji_id:
emoji = discord.utils.get(guild.emojis, id=emoji_id)
try:
await m.add_reaction(emoji)
except discord.Forbidden:
bot.dispatch(
"guild_log",
t_(
"I tried to autoreact to a message on the "
"starboard, but I'm missing the proper "
"permissions. If you don't want me to "
"autoreact to messages, set the AutoReact "
"setting to False with `starboards cs "
"#{0} --autoReact False`."
).format(starboard.name),
"error",
guild,
)
elif starboard_message is not None and message:
try:
if edit:
embed, _ = await embed_message(
bot, message, color=sql_starboard["color"], files=False
)
await starboard_message.edit(
content=plain_text, embed=embed
)
else:
await starboard_message.edit(content=plain_text)
except discord.errors.NotFound:
pass
elif starboard_message is not None:
try:
await starboard_message.edit(content=plain_text)
except discord.errors.NotFound:
pass
|
py | 7df97c9b7eb799b2746ad20a9dec0455f7a73b3d | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-27 18:24
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0023_checkin_app_version'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
|
py | 7df97d0554f74b7e1de7978d608ad3c14b246fbb | import tkinter
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
import nltk
from nltk.corpus import stopwords
from nltk.classify import SklearnClassifier
from wordcloud import WordCloud, STOPWORDS
import matplotlib.pyplot as plt
data = pd.read_csv('sentiment.csv')
# Keeping only the neccessary columns
data = data[['text','sentiment']]
data.head()
data.describe()
data.sentiment.unique()
data['sentiment'].value_counts()
# Splitting the dataset into train and test set
train, test = train_test_split(data,test_size = 0.2)
# Removing neutral sentiments
train = train[train.sentiment != "Neutral"]
train.describe()
train.sentiment.value_counts()
test.describe()
test.sentiment.value_counts()
train.sentiment.unique()
test.sentiment.unique()
train_pos = train[ train['sentiment'] == 'Positive']
train_pos = train_pos['text']
train_neg = train[ train['sentiment'] == 'Negative']
train_neg = train_neg['text']
def wordcloud_draw(data, color = 'black'):
words = ' '.join(data)
cleaned_word = " ".join([word for word in words.split()
if 'http' not in word
and not word.startswith('@')
and not word.startswith('#')
and word != 'RT'
])
wordcloud = WordCloud(stopwords=STOPWORDS,
background_color=color,
width=2500,
height=2000
).generate(cleaned_word)
plt.figure(1,figsize=(13, 13))
plt.imshow(wordcloud)
plt.axis('off')
plt.show()
print("Positive words")
wordcloud_draw(train_pos,'white')
print("Negative words")
wordcloud_draw(train_neg)
tweets = []
stopwords_set = set(stopwords.words("english"))
for index, row in train.iterrows():
words_filtered = [e.lower() for e in row.text.split() if len(e) >= 3]
words_cleaned = [word for word in words_filtered
if 'http' not in word
and not word.startswith('@')
and not word.startswith('#')
and word != 'RT']
words_without_stopwords = [word for word in words_cleaned if not word in stopwords_set]
tweets.append((words_without_stopwords, row.sentiment))
test_pos = test[ test['sentiment'] == 'Positive']
test_pos = test_pos['text']
test_neg = test[ test['sentiment'] == 'Negative']
test_neg = test_neg['text']
# Extracting word features
def get_words_in_tweets(tweets):
all = []
for (words, sentiment) in tweets:
all.extend(words)
return all
def get_word_features(wordlist):
wordlist = nltk.FreqDist(wordlist)
features = wordlist.keys()
return features
w_features = get_word_features(get_words_in_tweets(tweets))
def extract_features(document):
document_words = set(document)
features = {}
for word in w_features:
features['contains(%s)' % word] = (word in document_words)
return features
wordcloud_draw(w_features)
# Training the Naive Bayes classifier
training_set = nltk.classify.apply_features(extract_features,tweets)
classifier = nltk.NaiveBayesClassifier.train(training_set)
neg_cnt = 0
pos_cnt = 0
for obj in test_neg:
res = classifier.classify(extract_features(obj.split()))
if(res == 'Negative'):
neg_cnt = neg_cnt + 1
for obj in test_pos:
res = classifier.classify(extract_features(obj.split()))
if(res == 'Positive'):
pos_cnt = pos_cnt + 1
print('[Negative]: %s/%s ' % (len(test_neg),neg_cnt))
print('[Positive]: %s/%s ' % (len(test_pos),pos_cnt))
# [Negative]: 1714/1617
# [Positive]: 431/155
|
py | 7df97dc6dda557102a1b1cd8e7b68a0ef5963b7d | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from x2paddle.decoder.tf_decoder import TFGraph, TFGraphNode
from x2paddle.core.program import PaddleGraph
from x2paddle.core.op_mapper import OpMapper
from x2paddle.core.util import *
from x2paddle import program
import traceback
import math
import inspect
import numpy
import sys
name_counter = dict()
def gen_name(op_name, var_name):
name = "{}_{}".format(op_name, var_name)
if name not in name_counter:
name_counter[name] = 0
else:
name_counter[name] += 1
name = name + '_' + str(name_counter[name])
return name
# compute padding size for SAME mode
def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size
if pad_size < 0:
pad_size = 0
pad0 = int(pad_size / 2)
pad1 = pad_size - pad0
return [pad0, pad1]
class TFOpMapper(OpMapper):
directly_map_ops = {
'Relu': ['paddle.nn.functional.relu'],
'Relu6': ['paddle.nn.functional.relu6'],
'Abs': ['paddle.abs'],
'Sigmoid': ['paddle.nn.functional.sigmoid'],
'Softmax': ['paddle.nn.functional.softmax'],
'Exp': ['paddle.exp'],
'Rsqrt': ['paddle.rsqrt'],
'Sqrt': ['paddle.sqrt'],
'swish_f32': ['paddle.nn.functional.swish'],
'Tanh': ['paddle.tanh'],
'Softplus': ['paddle.nn.functional.softplus'],
'LeakyRelu': ['paddle.nn.functional.leaky_relu',
dict(alpha='negative_slope')],
'Floor': ['paddle.floor'],
'Erf': ['paddle.erf'],
'Square': ['paddle.square']
}
elementwise_ops = {
'Add': 'paddle.add',
'AddV2': 'paddle.add',
'RealDiv': 'paddle.divide',
'DivNoNan': 'paddle.divide',
# TODO (syf): replace
'Sub': 'paddle.subtract',
'Maximum': 'paddle.maximum',
'Minimum': 'paddle.minimum',
'Mul': 'paddle.multiply',
'FloorDiv': 'paddle.floor_divide',
'FloorMod': 'paddle.floor_mod',
'LogicalAnd': 'logical_and',
}
bool_ops = {
'LessEqual': 'paddle.less_equal',
'GreaterEqual': 'paddle.greater_equal',
'Greater': 'paddle.greater_than',
'NotEqual': 'paddle.not_equal',
'Equal': 'paddle.equal',
}
def __init__(self, decoder):
super(TFOpMapper, self).__init__()
self.decoder = decoder
self.graph = decoder.tf_graph
if not self.op_checker():
raise Exception("Model is not supported yet.")
self.params = dict()
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="tf")
self.params_output2id = dict()
not_placeholder = list()
for name in self.graph.input_nodes:
if self.graph.get_node(
name).layer_type != "Placeholder" and self.graph.get_node(
name
).layer_type != "OneShotIterator" and self.graph.get_node(
name).layer_type != "IteratorV2":
not_placeholder.append(name)
for name in not_placeholder:
idx = self.graph.input_nodes.index(name)
del self.graph.input_nodes[idx]
self.paddle_graph.inputs = self.graph.input_nodes
self.paddle_graph.outputs = self.graph.output_nodes
print("Total nodes: {}".format(
sum([
isinstance(node, TFGraphNode)
for name, node in self.graph.node_map.items()
])))
print("Nodes converting ...")
for i, node_name in enumerate(self.graph.topo_sort):
sys.stderr.write("\rConverting node {} ... ".format(i + 1))
node = self.graph.get_node(node_name)
op = node.layer_type
if op in self.directly_map_ops:
self.directly_map(node)
elif op in self.elementwise_ops:
self.elementwise_map(node)
elif op in self.bool_ops:
self.bool_map(node)
elif hasattr(self, op):
func = getattr(self, op)
func(node)
print("\nNodes converted.")
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.params)
def op_checker(self):
unsupported_ops = set()
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(self, op) and \
op not in self.directly_map_ops and \
op not in self.elementwise_ops and \
op not in self.bool_ops:
unsupported_ops.add(op)
if len(unsupported_ops) == 0:
return True
else:
if len(unsupported_ops) > 0:
print("\n========= {} OPs are not supported yet ===========".format(
len(unsupported_ops)))
for op in unsupported_ops:
print("========== {} ============".format(op))
return False
def directly_map(self, node):
assert node.layer_type in self.directly_map_ops
op_info = self.directly_map_ops[node.layer_type]
input = self.graph.get_node(node.layer.input[0])
attr = dict()
for param in op_info[1:]:
tf_param_name = list(param.keys())[0]
pd_param_name = list(param.values())[0]
tf_param = node.get_attr(tf_param_name)
attr[pd_param_name] = tf_param
self.paddle_graph.add_layer(
kernel=op_info[0],
inputs={"x": input.name},
outputs=[node.name],
**attr)
def elementwise_map(self, node, op_type=None):
if op_type is None:
assert node.layer_type in self.elementwise_ops
op_type = self.elementwise_ops[node.layer_type]
x = self.graph.get_node(node.layer.input[0])
y = self.graph.get_node(node.layer.input[1])
x_shape = x.out_shapes[0]
y_shape = y.out_shapes[0]
layer_id = self.paddle_graph.add_layer(
kernel=op_type,
inputs={"x": x.name,
"y": y.name},
outputs=[node.name])
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def bool_map(self, node):
op_type = self.bool_ops[node.layer_type]
self.elementwise_map(node, op_type)
node.set_dtype("bool")
def Placeholder(self, node):
shape = node.out_shapes[0]
assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
node.layer_name)
dtype = node.dtype
self.paddle_graph.add_layer(
kernel="paddle.static.data",
inputs={},
outputs=[node.name],
dtype=string(dtype),
shape=shape,
name=string(node.name))
def Const(self, node):
shape = node.out_shapes[0]
dtype = node.dtype
value = node.value
if len(shape) == 0:
assert value.size == 1, "Unexpected situation happend"
shape = [1]
if value == float('inf'):
value = "float('inf')"
self.paddle_graph.add_layer(
kernel="paddle.full",
inputs={},
outputs=[node.name],
dtype=string(dtype),
shape=[1],
fill_value=value)
return
self.params[node.name] = node.value
layer_id = self.paddle_graph.add_layer(
kernel="paddle.static.create_parameter",
inputs={},
outputs=[node.name],
dtype=string(dtype),
shape=shape,
name=string(node.name),
default_initializer="paddle.nn.initializer.Constant(value=0.0)")
self.params_output2id[node.name] = layer_id
def Transpose(self, node):
input = self.graph.get_node(node.layer.input[0])
perm = self.graph.get_node(node.layer.input[1])
assert perm.layer_type == "Const", "Perm of transpose OP should be Const"
perm = perm.value.tolist()
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[node.name],
perm=perm)
def Fill(self, node):
dims = self.graph.get_node(node.layer.input[0])
input_value = self.graph.get_node(node.layer.input[1])
inputs = dict()
attr = dict()
assert input_value.layer_type == "Const", "Value of fill OP should be Const"
if dims.layer_type == "Const":
attr["shape"] = dims.value.tolist()
else:
inputs["shape"] = dims.name
attr["dtype"] = string(input_value.dtype)
attr["fill_value"] = input_value.value
self.paddle_graph.add_layer(
"paddle.full",
inputs=inputs,
outputs=[node.name],
**attr)
if dims.layer_type != "Const":
self.paddle_graph.add_layer(
"paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=node.out_shapes[0])
def DepthToSpace(self, node):
input = self.graph.get_node(node.layer.input[0])
block_size = node.get_attr("block_size")
data_format = node.get_attr("data_format").decode()
if data_format == "NHWC":
n, h, w, c = input.out_shapes[0]
else:
n, c, h, w = input.out_shapes[0]
input_name = input.name
if data_format == "NHWC":
transpose_name = gen_name("depth_to_space", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
input_name = transpose_name
shape = [0, block_size * block_size, -1, h, w]
reshape_name = gen_name("depth_to_space", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[reshape_name],
shape=shape)
transpose_name = gen_name("depth_to_space", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": reshape_name},
outputs=[transpose_name],
perm=[0, 2, 1, 3, 4])
reshape_name = gen_name("depth_to_space", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": transpose_name},
outputs=[reshape_name],
shape=[0, c, h, w])
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.pixel_shuffle",
inputs={"x": reshape_name},
outputs=[node.name],
upscale_factor=block_size)
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Where(self, node):
if len(node.layer.input) == 1:
cond = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
"paddle.nonzero",
inputs={"x": cond.name},
outputs=[node.name])
else:
cond = self.graph.get_input_node(node, 0)
x = self.graph.get_input_node(node, 1)
y = self.graph.get_input_node(node, 2)
self.paddle_graph.add_layer(
"paddle.where",
inputs={"condition": cond.name,
"x": x.name,
"y": y.name},
outputs=[node.name])
def Neg(self, node):
input = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
"paddle.scale",
inputs={"x": input.name},
outputs=[node.name],
scale=-1)
def MaxPool(self, node):
input = self.graph.get_node(node.layer.input[0])
k_size = node.get_attr("ksize")
strides = node.get_attr("strides")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
input_name = input.name
if data_format == "NHWC":
transpose_name = gen_name("max_pool", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
strides = [strides[i] for i in [0, 3, 1, 2]]
k_size = [k_size[i] for i in [0, 3, 1, 2]]
input_name = transpose_name
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.max_pool2d",
inputs={"x": input_name},
outputs=[node.name],
kernel_size=k_size[2:4],
stride=strides[2:4],
padding=string(pad_mode))
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Conv2D(self, node):
input = self.graph.get_node(node.layer.input[0])
kernel = self.graph.get_node(node.layer.input[1])
k_size = kernel.out_shapes[0]
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
if data_format == "NHWC":
n, h, w, c = input.out_shapes[0]
else:
n, c, h, w = input.out_shapes[0]
if kernel.layer_type == 'Const':
kernel_value = kernel.value
kernel_weight_name = kernel.name.replace('/', '_')
else:
kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
if kernel.layer_type == 'Split':
kernel_weight_name = "{}_{}_kernel".format(node.name,
kernel.name)
else:
kernel_weight_name = kernel.name.replace('/', '_')
self.params[kernel_weight_name] = numpy.transpose(kernel_value,
(3, 2, 0, 1))
self.paddle_graph.add_layer(
kernel="paddle.static.nn.create_parameter",
inputs={},
outputs=[kernel_weight_name],
shape=self.params[kernel_weight_name].shape,
dtype=string(str(self.params[kernel_weight_name].dtype)),
name=string(kernel_weight_name))
input_name = input.name
if data_format == "NHWC":
strides = [strides[i] for i in [0, 3, 1, 2]]
dilations = [dilations[i] for i in [0, 3, 1, 2]]
transpose_name = gen_name("conv2d", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
input_name = transpose_name
if c == -1:
attr = {"shape": [0, k_size[2], 0, 0]}
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[input_name],
shape=[0, k_size[2], 0, 0])
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.conv2d",
inputs={"x": input_name, "weight": kernel_weight_name},
outputs=[node.name],
bias=None,
stride=strides[2:4],
dilation=dilations[2:4],
padding=string(pad_mode))
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Conv3D(self, node):
input = self.graph.get_input_node(node, 0)
kernel = self.graph.get_input_node(node, 1)
k_size = kernel.out_shapes[0]
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
if data_format == "NDHWC":
n, d, h, w, c = input.out_shapes[0]
else:
n, c, d, h, w = input.out_shapes[0]
if kernel.layer_type == 'Const':
kernel_value = kernel.value
kernel_weight_name = kernel.name.replace('/', '_')
else:
kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
if kernel.layer_type == 'Split':
kernel_weight_name = "{}_{}_kernel".format(node.name,
kernel.name)
else:
kernel_weight_name = kernel.name.replace('/', '_')
self.params[kernel_weight_name] = numpy.transpose(kernel_value,
(4, 3, 0, 1, 2))
self.paddle_graph.add_layer(
kernel="paddle.static.nn.create_parameter",
inputs={},
outputs=[kernel_weight_name],
shape=self.params[kernel_weight_name].shape,
dtype=string(str(self.params[kernel_weight_name].dtype)),
name=string(kernel_weight_name))
input_name = input.name
if data_format == "NDHWC":
strides = [strides[i] for i in [0, 4, 1, 2, 3]]
dilations = [dilations[i] for i in [0, 4, 1, 2, 3]]
transpose_name = gen_name("conv3d", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 4, 1, 2, 3])
input_name = transpose_name
if c == -1:
attr = {"shape": [0, k_size[2], 0, 0, 0]}
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[input_name],
shape=[0, k_size[2], 0, 0, 0])
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.conv3d",
inputs={"x": input_name, "weight": kernel_weight_name},
outputs=[node.name],
bias=None,
stride=strides[2:5],
dilation=dilations[2:5],
padding=string(pad_mode))
if data_format == "NDHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 4, 1])
def BiasAdd(self, node):
input = self.graph.get_node(node.layer.input[0])
bias = self.graph.get_node(node.layer.input[1])
self.paddle_graph.add_layer(
kernel="paddle.add",
inputs={"x": input.name,
"y": bias.name},
outputs=[node.name])
def FusedBatchNorm(self, node):
input = self.graph.get_node(node.layer.input[0])
gamma = self.graph.get_node(node.layer.input[1])
beta = self.graph.get_node(node.layer.input[2])
moving_mean = self.graph.get_node(node.layer.input[3])
moving_var = self.graph.get_node(node.layer.input[4])
data_format = node.get_attr("data_format").decode()
assert gamma.layer_type == "Const"
assert beta.layer_type == "Const"
assert moving_mean.layer_type == "Const"
assert moving_var.layer_type == "Const"
input_name = input.name
if data_format == "NHWC":
transpose_name = gen_name("batch_norm", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
input_name = transpose_name
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.batch_norm",
inputs={"x": input_name,
"running_mean": moving_mean.name,
"running_var": moving_var.name,
"weight": gamma.name,
"bias": beta.name},
outputs=[node.name],
epsilon=node.get_attr("epsilon"))
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def FusedBatchNormV3(self, node):
self.FusedBatchNorm(node)
def Mean(self, node):
input = self.graph.get_node(node.layer.input[0])
reduce_idx = self.graph.get_node(node.layer.input[1])
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
dims = reduce_idx.value.tolist()
keep_dims = node.get_attr("keep_dims")
self.paddle_graph.add_layer(
kernel="paddle.mean",
inputs={"x": input.name},
outputs=[node.name],
axis=dims,
keepdim=keep_dims)
def Reshape(self, node):
input = self.graph.get_input_node(node, 0)
param = self.graph.get_input_node(node, 1)
input_name = input.name
if param.layer_type == "Const":
shape = param.value.tolist()
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[node.name],
shape=shape)
else:
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name,
"shape": param.name},
outputs=[node.name])
if param.layer_type != "Const":
out_shape = numpy.array(node.out_shapes[0])
if (out_shape > 0).any():
out_shape[out_shape < 0] = 0
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=out_shape.tolist())
def Pad(self, node):
input = self.graph.get_input_node(node, 0)
paddings = self.graph.get_input_node(node, 1)
assert paddings.layer_type == "Const", "Padding should be Const"
paddings = paddings.value.flatten().tolist()
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.pad",
inputs={"x": input.name},
outputs=[node.name],
pad=paddings)
def MirrorPad(self, node):
self.Pad(node)
def PadV2(self, node):
self.Pad(node)
def Squeeze(self, node):
input = self.graph.get_input_node(node, 0)
squeeze_dims = node.get_attr('squeeze_dims')
self.paddle_graph.add_layer(
kernel="paddle.squeeze",
inputs={"x": input.name},
outputs=[node.name],
axis=squeeze_dims)
def Shape(self, node):
input = self.graph.get_input_node(node, 0)
input_name = input.name
self.paddle_graph.add_layer(
kernel="paddle.shape",
inputs={"input": input_name},
outputs=[node.name])
def Size(self, node):
input = self.graph.get_input_node(node, 0)
input_name = input.name
self.paddle_graph.add_layer(
kernel="paddle.shape",
inputs={"input": input_name},
outputs=[node.name])
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": node.name},
outputs=[node.name])
def Ceil(self, node):
input = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
kernel="paddle.ceil",
inputs={"x": input.name},
outputs=[node.name])
def ArgMax(self, node):
input = self.graph.get_input_node(node, 0)
axis = self.graph.get_input_node(node, 1)
assert axis.layer_type == "Const", "ArgMax only support Const parameter"
axis = axis.value
self.paddle_graph.add_layer(
kernel="paddle.argmax",
inputs={"x": input.name},
outputs=[node.name],
axis=axis)
def TopKV2(self, node):
input = self.graph.get_input_node(node, 0)
k = self.graph.get_input_node(node, 1)
assert k.layer_type == "Const", "ArgMax only support Const parameter"
k = k.value
sort = node.get_attr('sorted')
self.paddle_graph.add_layer(
kernel="paddle.topk",
inputs={"x": input.name},
outputs=[node.name],
k=k,
sorted=sort)
def MatMul(self, node):
x = self.graph.get_input_node(node, 0)
y = self.graph.get_input_node(node, 1)
transpose_a = node.get_attr('transpose_a')
transpose_b = node.get_attr('transpose_b')
if transpose_a is None:
transpose_a = node.get_attr('adj_x')
if transpose_b is None:
transpose_b = node.get_attr('adj_y')
self.paddle_graph.add_layer(
kernel="paddle.matmul",
inputs={"x": x.name,
"y": y.name},
outputs=[node.name],
transpose_x=transpose_a,
transpose_y=transpose_b)
def BatchMatMul(self, node):
return self.MatMul(node)
def BatchMatMulV2(self, node):
return self.MatMul(node)
def DepthwiseConv2dNative(self, node):
input = self.graph.get_node(node.layer.input[0])
kernel = self.graph.get_node(node.layer.input[1])
assert kernel.layer_type == "Const", "Kernel of DepthwiseConv2DNative should be Const"
in_shape = input.out_shapes[0]
k_size = kernel.out_shapes[0]
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
if len(kernel.outputs) == 1:
self.params[kernel.name] = numpy.transpose(self.params[kernel.name],
(2, 3, 0, 1))
layer = self.paddle_graph.layers[self.params_output2id[kernel.name]]
layer.attrs["shape"] = self.params[kernel.name].shape
else:
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": kernel.name},
outputs=[kernel.name],
perm=[2, 3, 0, 1])
input_name = input.name
if data_format == "NHWC":
in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
strides = [strides[i] for i in [0, 3, 1, 2]]
dilations = [dilations[i] for i in [0, 3, 1, 2]]
transpose_name = gen_name('depthwise_conv2d', 'transpose')
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
input_name = transpose_name
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.conv2d",
inputs={"x": input_name,
"weight": kernel.name},
outputs=[node.name],
stride=strides[2:4],
dilation=dilations[2:4],
groups=k_size[3] * in_shape[1],
padding=string(pad_mode),
bias=None)
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def AvgPool(self, node):
input = self.graph.get_input_node(node, 0)
k_size = node.get_attr("ksize")
strides = node.get_attr("strides")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
input_name = input.name
if data_format == "NHWC":
transpose_name = gen_name("avg_pool", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
strides = [strides[i] for i in [0, 3, 1, 2]]
k_size = [k_size[i] for i in [0, 3, 1, 2]]
input_name = transpose_name
# TODO(syf): The op has diff.
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.avg_pool2d",
inputs={"x": input_name},
outputs=[node.name],
kernel_size=k_size[2:4],
stride=strides[2:4],
padding=string(pad_mode))
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Pack(self, node):
inputs_list = list()
for i in range(len(node.inputs)):
inputs_list.append(self.graph.get_input_node(node, i))
input_names = [i.name for i in inputs_list]
axis = node.get_attr("axis")
self.paddle_graph.add_layer(
kernel="paddle.stack",
inputs={"x": input_names},
outputs=[node.name],
axis=axis)
if len(node.out_shapes[0]) == 1:
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=[-1])
def Unpack(self, node):
input = self.graph.get_input_node(node, 0)
axis = node.get_attr("axis")
num = node.get_attr("num")
shape = input.out_shapes[0]
input_name = input.name
if len(shape) == 1:
if shape[0] > 0 and num == shape[0]:
self.paddle_graph.add_layer(
kernel="paddle.unsqueeze",
inputs={"x": input.name},
outputs=[node.name],
axis=[0])
input_name = node.name
axis = 1
else:
raise Exception("Unexpected situation happend in Unpack OP")
layer_outputs = ["{}_p{}".format(node.layer_name, i) for i in range(num)]
if len(layer_outputs) == 1:
layer_outputs[0] = "[{}]".format(node.layer_name)
self.paddle_graph.add_layer(
kernel="paddle.unstack",
inputs={"x": input_name},
outputs=layer_outputs,
axis=axis,
num=num)
def ConcatV2(self, node):
inputs_list = list()
for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
axis = self.graph.get_input_node(node, -1)
assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const"
axis = axis.value
if axis < 0:
axis += len(inputs_list[0].out_shapes[0])
input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer(
kernel="paddle.concat",
inputs={"x": input_names},
outputs=[node.name],
axis=axis)
def Concat(self, node):
inputs_list = list()
for i in range(1, len(node.inputs)):
inputs_list.append(self.graph.get_input_node(node, i))
axis = self.graph.get_input_node(node, 0)
assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const"
axis = axis.value
if axis < 0:
axis += len(inputs_list[0].out_shapes[0])
input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer(
kernel="paddle.concat",
inputs={"x": input_names},
outputs=[node.name],
axis=axis)
def AddN(self, node):
inputs_list = list()
for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer(
kernel="paddle.add_n",
inputs={"inputs": input_names},
outputs=[node.name])
def StridedSlice(self, node):
input = self.graph.get_input_node(node, 0)
begin = self.graph.get_input_node(node, 1)
end = self.graph.get_input_node(node, 2)
strides = self.graph.get_input_node(node, 3)
if strides.layer_type == "Const":
strides = strides.value.tolist()
else:
strides = self.decoder.infer_tensor(strides)
if begin.layer_type == "Const":
begin = begin.value.tolist()
else:
begin = self.decoder.infer_tensor(begin)
if end.layer_type == "Const":
end = end.value.tolist()
else:
end = self.decoder.infer_tensor(end)
assert len(set(strides)) == 1 and strides[
0] == 1, "Only support strides be 1 in StridedSlice OP"
if len(begin) < len(input.out_shapes[0]):
begin = begin + [0] * (len(input.out_shapes[0]) - len(begin))
if len(end) < len(input.out_shapes[0]):
end = end + [0] * (len(input.out_shapes[0]) - len(end))
for i in range(len(end)):
if end[i] == 0:
end[i] = 999999
begin_mask = node.get_attr('begin_mask')
end_mask = node.get_attr('end_mask')
ellipsis_mask = node.get_attr('ellipsis_mask')
new_axis_mask = node.get_attr('new_axis_mask')
shrink_axis_mask = node.get_attr('shrink_axis_mask')
assert ellipsis_mask == 0, "(OP:{} Name:{})Only support ellipsis_mask be 0[now: {}] n StridedSlice OP".format(
node.layer_type, node.layer.name, ellipsis_mask)
# TODO codes without validation
# Use it carefully
new_begin = list()
new_end = list()
new_axes = list()
shrink_axes = list()
for i, item in enumerate(begin):
mask = (new_axis_mask >> i) & 1
if mask != 0:
new_axes.append(i)
continue
mask = (shrink_axis_mask >> i) & 1
if mask != 0:
shrink_axes.append(i)
mask = (begin_mask >> i) & 1
if mask != 0:
new_begin.append(0)
else:
new_begin.append(item)
mask = (end_mask >> i) & 1
if mask != 0:
new_end.append(999999)
else:
new_end.append(end[i])
if input.dtype == "bool":
self.paddle_graph.add_layer(
"paddle.cast",
inputs={"x": input.name},
outputs=[input.name],
dtype=string("int32"))
self.paddle_graph.add_layer(
kernel="paddle.slice",
inputs={"input": input.name},
outputs=[node.name],
axes=[i for i in range(len(new_begin))],
starts=new_begin,
ends=new_end)
if input.dtype == "bool":
self.paddle_graph.add_layer(
"paddle.cast",
inputs={"x": node.name},
outputs=[node.name],
dtype=string("bool"))
if len(new_axes) > 0:
self.paddle_graph.add_layer(
kernel="paddle.unsqueeze",
inputs={"x": node.name},
outputs=[node.name],
axis=new_axes)
if len(shrink_axes) > 0:
if len(input.out_shapes[0]) + len(new_axes) <= 1:
pass
else:
self.paddle_graph.add_layer(
kernel="paddle.squeeze",
inputs={"x": node.name},
outputs=[node.name],
axis=shrink_axes)
def Prod(self, node):
input = self.graph.get_input_node(node, 0)
reduction_indices = self.graph.get_input_node(node, 1)
assert reduction_indices.layer_type == "Const"
keep_dims = node.get_attr('keep_dims')
axis = reduction_indices.value
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": input.name},
outputs=[node.layer_name],
keepdim=keep_dims,
axis=axis)
def Split(self, node):
dim = self.graph.get_input_node(node, 0)
input = self.graph.get_input_node(node, 1)
assert dim.layer_type == "Const"
num_split = node.get_attr('num_split')
dim = dim.value
self.paddle_graph.add_layer(
kernel="paddle.split",
inputs={"x": input.name},
outputs=[
"{}_p{}".format(node.layer_name, i) for i in range(num_split)
],
num_or_sections=num_split,
axis=dim)
def SplitV(self, node):
input = self.graph.get_input_node(node, 0)
size_splits = self.graph.get_input_node(node, 1)
assert size_splits.layer_type == "Const", "size_splits of SplitV OP should be Const"
size_splits = size_splits.value.tolist()
dim = self.graph.get_input_node(node, 2)
assert dim.layer_type == "Const", "dim of SplitV OP should be Const"
dim = dim.value
self.paddle_graph.add_layer(
kernel="paddle.split",
inputs={"x": input.name},
outputs=[
"{}_p{}".format(node.layer_name, i) for i in range(len(size_splits))
],
num_or_sections=size_splits,
axis=dim)
def Slice(self, node):
input = self.graph.get_input_node(node, 0)
begin = self.graph.get_input_node(node, 1)
size = self.graph.get_input_node(node, 2)
inputs = {"x": input.name}
attrs = {}
if begin.layer_type == "Const":
begin = begin.value.tolist()
attrs['offsets'] = begin
else:
# shape = begin.out_shapes[0]
# reshape_name = gen_name("slice", "reshape")
# self.paddle_graph.add_layer(
# kernel="fluid.layers.reshape",
# inputs={"x": begin.name},
# outputs=[reshape_name],
# shape=shape)
# inputs['offsets'] = reshape_name
begin = self.decoder.infer_tensor(begin, use_diff_inputs=False).tolist()
attrs['offsets'] = begin
if size.layer_type == "Const":
size = size.value.tolist()
attrs['shape'] = size
else:
shape = size.out_shapes[0]
reshape_name = gen_name("slice", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": size.name},
outputs=[reshape_name],
shape=shape)
inputs['shape'] = reshape_name
self.paddle_graph.add_layer(
kernel="paddle.crop",
inputs=inputs,
outputs=[node.name],
**attrs)
def ResizeNearestNeighbor(self, node):
input = self.graph.get_input_node(node, 0)
resize_shape = self.graph.get_input_node(node, 1)
data_format = "NHWC"
inputs = {"x": input.name}
attrs = {"align_corners": node.get_attr("align_corners"),
"mode": string("nearest"),
"align_mode": 1}
if resize_shape.layer_type == "Const":
resize_shape = resize_shape.value.tolist()
attrs["size"] = resize_shape
else:
shape = resize_shape.out_shapes[0]
reshape_name = gen_name("resize_nearest", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": resize_shape.name},
outputs=[reshape_name],
shape=shape)
inputs["size"] = reshape_name
if data_format == "NHWC":
transpose_name = gen_name("resize_nearest", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
inputs["x"] = transpose_name
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.interpolate",
inputs=inputs,
outputs=[node.name],
**attrs)
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def ResizeBilinear(self, node):
input = self.graph.get_input_node(node, 0)
resize_shape = self.graph.get_input_node(node, 1)
data_format = "NHWC"
inputs = {"x": input.name}
attrs = {"align_corners": node.get_attr("align_corners"),
"mode": string("bilinear"),
"align_mode": 1}
if resize_shape.layer_type == "Const":
resize_shape = resize_shape.value.tolist()
attrs["size"] = resize_shape
else:
shape = resize_shape.out_shapes[0]
reshape_name = gen_name("resize_bilinear", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": resize_shape.name},
outputs=[reshape_name],
shape=shape)
inputs["size"] = reshape_name
if data_format == "NHWC":
transpose_name = gen_name("resize_bilinear", "reshape")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
inputs["x"] = transpose_name
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.interpolate",
inputs=inputs,
outputs=[node.name],
**attrs)
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Cast(self, node):
input = self.graph.get_input_node(node, 0)
dtype = node.dtype
self.paddle_graph.add_layer(
kernel="paddle.cast",
inputs={"x": input.name},
outputs=[node.name],
dtype=string(dtype))
def Sum(self, node):
input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
keep_dims = node.get_attr("keep_dims")
dim = reduce_idx.value.tolist()
self.paddle_graph.add_layer(
kernel="paddle.sum",
inputs={"x": input.name},
outputs=[node.name],
axis=dim,
keepdim=keep_dims)
def Max(self, node):
input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
keep_dims = node.get_attr("keep_dims")
dim = reduce_idx.value.tolist()
self.paddle_graph.add_layer(
kernel="paddle.max",
inputs={"x": input.name},
outputs=[node.name],
axis=dim,
keepdim=keep_dims)
def RandomUniform(self, node):
shape = self.graph.get_input_node(node, 0)
if shape.layer_type == "Const":
shape = shape.value.tolist()
self.paddle_graph.add_layer(
kernel="paddle.uniform",
inputs={},
outputs=[node.name],
shape=shape,
min=0.0,
max=0.9999)
else:
self.paddle_graph.add_layer(
kernel="paddle.uniform",
inputs={'shape': shape.name},
outputs=[node.name],
min=0.0,
max=0.9999)
def Conv2DBackpropInput(self, node):
out_shape = self.graph.get_input_node(node, 0)
kernel = self.graph.get_input_node(node, 1)
input = self.graph.get_input_node(node, 2)
assert kernel.layer_type == "Const", "Kernel of Conv2DBackpropInput should be Const"
if out_shape.layer_type == "Const":
out_shape = out_shape.value.tolist()
else:
out_shape = self.decoder.infer_tensor(out_shape,
out_shape=node.out_shapes[0])
in_shape = input.out_shapes[0]
if in_shape.count(-1) > 2:
in_shape = self.decoder.infer_tensor(input, use_diff_inputs=False).shape
k_size = kernel.out_shapes[0]
if k_size.count(-1) > 2:
k_size = self.decoder.infer_tensor(kernel, use_diff_inputs=False).shape
pad_mode = node.get_attr("padding").decode()
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
kernel_name = node.name + ".weight"
self.params[kernel_name] = numpy.transpose(kernel.value, (3, 2, 0, 1))
input_name = input.name
if data_format == "NHWC":
in_shape = [in_shape[i] for i in [0, 3, 1, 2]]
strides = [strides[i] for i in [0, 3, 1, 2]]
dilations = [dilations[i] for i in [0, 3, 1, 2]]
transpose_name = gen_name("conv2dbackpropinput", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
input_name = transpose_name
self.paddle_graph.add_layer(
kernel="paddle.static.create_parameter",
inputs={},
outputs=["{}_{}".format(node.name, kernel_name).replace(".", "_")],
dtype=string(str(self.params[kernel_name].dtype)),
shape=self.params[kernel_name].shape,
name=string(kernel_name))
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.conv2d_transpose",
inputs={"x": input_name,
"weight": "{}_{}".format(node.name, kernel_name).replace(".", "_")},
outputs=[node.name],
bias=None,
stride=strides[2:4],
dilation=dilations[2:4],
padding=string(pad_mode),
output_size=out_shape[1:3])
if data_format == "NHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Tile(self, node):
input = self.graph.get_node(node.layer.input[0])
repeat_times = self.graph.get_node(node.layer.input[1])
inputs = {"x": input.name}
attr = dict()
if repeat_times.layer_type == "Const":
repeat_times = repeat_times.value.tolist()
attr["repeat_times"] = repeat_times
else:
inputs["repeat_times"] = repeat_times.name
self.paddle_graph.add_layer(
kernel="paddle.tile",
inputs=inputs,
outputs=[node.name],
**attr)
if not isinstance(repeat_times, list) and repeat_times.layer_type != "Const":
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=node.out_shapes[0])
def Range(self, node):
start = self.graph.get_node(node.layer.input[0])
limit = self.graph.get_node(node.layer.input[1])
delta = self.graph.get_node(node.layer.input[2])
inputs = dict()
attr = dict()
dtype = 'int32'
if start.dtype.startswith('float'):
dtype = start.dtype
if start.layer_type == "Const":
attr["start"] = start.value
else:
inputs["start"] = start.name
if limit.dtype.startswith('float'):
dtype = limit.dtype
if limit.layer_type == "Const":
attr["end"] = limit.value
else:
inputs["end"] = limit.name
if delta.dtype.startswith('float'):
dtype = delta.dtype
if delta.layer_type == "Const":
attr["step"] = delta.value
else:
inputs["step"] = delta.name
node.set_dtype(dtype)
attr["dtype"] = string(node.dtype)
self.paddle_graph.add_layer(
kernel="paddle.arange",
inputs=inputs,
outputs=[node.name],
**attr)
if start.layer_type != "Const" or \
limit.layer_type != "Const" or \
delta.layer_type != "Const":
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=node.out_shapes[0])
def SquaredDifference(self, node):
x = self.graph.get_input_node(node, 0)
y = self.graph.get_input_node(node, 1)
inputs = {"x": x.name, "y": y.name}
x_shape = x.out_shapes[0]
y_shape = y.out_shapes[0]
# TODO(syf)
layer_id = self.paddle_graph.add_layer(
"paddle.subtract", inputs=inputs, outputs=[node.name])
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
inputs = {"x": node.name, "y": node.name}
x_shape = node.out_shapes[0]
y_shape = node.out_shapes[0]
layer_id = self.paddle_graph.add_layer(
"paddle.multiply", inputs=inputs, outputs=[node.name])
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def OneHot(self, node):
input = self.graph.get_input_node(node, 0)
depth = self.graph.get_input_node(node, 1)
on_value = self.graph.get_input_node(node, 2)
off_value = self.graph.get_input_node(node, 3)
assert depth.layer_type == 'Const', 'Parameter depth should be Const in OneHot'
assert on_value.layer_type == 'Const', 'Parameter on_value should be Const in OneHot'
assert off_value.layer_type == 'Const', 'Parameter off_value should be Const in OneHot'
attr = {'depth': depth.value}
on_value = on_value.value
off_value = off_value.value
assert math.fabs(on_value -
1.0) < 1e-06, "on_value should be 1 in OneHot"
assert math.fabs(off_value -
0.0) < 1e-06, "off_value should be 0 in OneHot"
self.paddle_graph.add_layer(
"paddle.nn.functional.one_hot",
inputs={"x": input.name},
outputs=[node.name],
num_classes=depth.value)
def Pow(self, node):
x = self.graph.get_input_node(node, 0)
factor = self.graph.get_input_node(node, 1)
inputs = {"x": x.name}
attr = dict()
if factor.layer_type == 'Const':
attr["y"] = factor.value.tolist()
else:
inputs["y"] = factor.name
self.paddle_graph.add_layer(
"paddle.pow", inputs=inputs, outputs=[node.name], **attr)
def All(self, node):
input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
attr = dict()
attr["axis"] = reduce_idx.value.tolist()
attr["keepdim"] = node.get_attr("keep_dims")
input_name = input.name
if input.dtype != "bool":
input_name = gen_name("all", "cast")
self.paddle_graph.add_layer(
"paddle.cast",
inputs={"x": input.name},
outputs=[input_name],
dtype=string("bool"))
self.paddle_graph.add_layer(
"paddle.all",
inputs={"x": input_name},
outputs=[node.name],
**attr)
node.layer.attr['dtype'].type = 10
def GatherV2(self, node):
embeddings = self.graph.get_input_node(node, 0)
index = self.graph.get_input_node(node, 1)
axis = self.graph.get_input_node(node, 2)
assert axis.layer_type == 'Const', "Only support Const parameter[axis]"
axis = axis.value
index_name = index.name
if len(index.out_shapes[0]) != 1:
reshape_name = gen_name("gather", "reshape")
index_name = reshape_name
self.paddle_graph.add_layer(
"paddle.reshape",
inputs={"x": index.name},
outputs=[reshape_name],
shape=[-1])
inputs = {'x': embeddings.name, 'index': index_name}
self.paddle_graph.add_layer(
"paddle.gather",
inputs=inputs,
outputs=[node.name],
axis=axis)
if len(index.out_shapes[0]) != 1:
out_shape = node.out_shapes[0]
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=out_shape)
def GatherNd(self, node):
x = self.graph.get_input_node(node, 0)
index = self.graph.get_input_node(node, 1)
inputs = {'x': x.name, 'index': index.name}
self.paddle_graph.add_layer(
"paddle.gather_nd",
inputs=inputs,
outputs=[node.name])
def ExpandDims(self, node):
x = self.graph.get_input_node(node, 0, copy=True)
y = self.graph.get_input_node(node, 1, copy=True)
inputs = {"x": x.name}
attr = dict()
if y.layer_type == 'Const':
dim = y.value.tolist()
if not isinstance(dim, list):
dim = [dim]
attr['axis'] = dim
else:
inputs['axis'] = y.name
self.paddle_graph.add_layer(
"paddle.unsqueeze",
inputs=inputs,
outputs=[node.name],
**attr)
|
py | 7df97f078ad2c5d1b8b3a69957265365c4d7ac87 | from textwrap import dedent
import pytest
from pylox.lox import Lox
TEST_SRC = dedent(
"""\
var a = "abc123";
print len(a); // expect: 6
var arr = array(0);
arr.append(1);
arr.append(2);
arr.append(2);
arr.append(3);
print len(arr); // expect: 4
print len(42); // expect: Object of type 'int' has no length.
"""
)
EXPECTED_STDOUTS = ["6", "4", "12:13: LoxRuntimeError: Object of type 'int' has no length."]
def test_len_builtin(capsys: pytest.CaptureFixture) -> None:
interpreter = Lox()
interpreter.run(TEST_SRC)
assert interpreter.had_error
assert interpreter.had_runtime_error
all_out = capsys.readouterr().out.splitlines()
assert all_out == EXPECTED_STDOUTS
|
py | 7df97f9d3cf9f901f3d8dcafafa7e4864861a725 | from __future__ import absolute_import, division, print_function
import random
import pandas as pd
from torch.nn import MSELoss, CrossEntropyLoss
from torch.utils.data import (DataLoader, RandomSampler, SequentialSampler,
TensorDataset)
from tqdm import tqdm_notebook as tqdm
from tqdm import trange
from nltk.tokenize import sent_tokenize
from finbert.utils import *
import numpy as np
import logging
from transformers.optimization import AdamW, get_linear_schedule_with_warmup
from transformers import AutoTokenizer
logger = logging.getLogger(__name__)
class Config(object):
"""The configuration class for training."""
def __init__(self,
data_dir,
bert_model,
model_dir,
max_seq_length=64,
train_batch_size=32,
eval_batch_size=32,
learning_rate=5e-5,
num_train_epochs=10.0,
warm_up_proportion=0.1,
no_cuda=False,
do_lower_case=True,
seed=42,
local_rank=-1,
gradient_accumulation_steps=1,
fp16=False,
output_mode='classification',
discriminate=True,
gradual_unfreeze=True,
encoder_no=12,
base_model='bert-base-uncased'):
"""
Parameters
----------
data_dir: str
Path for the training and evaluation datasets.
bert_model: BertModel
The BERT model to be used. For example: BertForSequenceClassification.from_pretrained(...)
model_dir: str
The path where the resulting model will be saved.
max_seq_length: int
The maximum length of the sequence to be used. Default value is 64.
train_batch_size: int
The batch size for the training. Default value is 32.
eval_batch_size: int
The batch size for the evaluation. Default value is 32.
learning_rate: float
The learning rate. Default value is 5e5.
num_train_epochs: int
Number of epochs to train. Default value is 4.
warm_up_proportion: float
During the training, the learning rate is linearly increased. This value determines when the learning rate
reaches the intended learning rate. Default value is 0.1.
no_cuda: bool
Determines whether to use gpu. Default is False.
do_lower_case: bool
Determines whether to make all training and evaluation examples lower case. Default is True.
seed: int
Random seed. Defaults to 42.
local_rank: int
Used for number of gpu's that will be utilized. If set -1, no distributed training will be done. Default
value is -1.
gradient_accumulation_steps: int
Number of gradient accumulations steps. Defaults to 1.
fp16: bool
Determines whether to use 16 bits for floats, instead of 32.
output_mode: 'classification' or 'regression'
Determines whether the task is classification or regression.
discriminate: bool
Determines whether to apply discriminative fine-tuning.
gradual_unfreeze: bool
Determines whether to gradually unfreeze lower and lower layers as the training goes on.
encoder_no: int
Starting from which layer the model is going to be finetuned. If set 12, whole model is going to be
fine-tuned. If set, for example, 6, only the last 6 layers will be fine-tuned.
"""
self.data_dir = data_dir
self.bert_model = bert_model
self.model_dir = model_dir
self.do_lower_case = do_lower_case
self.max_seq_length = max_seq_length
self.train_batch_size = train_batch_size
self.local_rank = local_rank
self.eval_batch_size = eval_batch_size
self.learning_rate = learning_rate
self.num_train_epochs = num_train_epochs
self.warm_up_proportion = warm_up_proportion
self.no_cuda = no_cuda
self.seed = seed
self.gradient_accumulation_steps = gradient_accumulation_steps
self.output_mode = output_mode
self.fp16 = fp16
self.discriminate = discriminate
self.gradual_unfreeze = gradual_unfreeze
self.encoder_no = encoder_no
self.base_model = base_model
class FinBert(object):
"""
The main class for FinBERT.
"""
def __init__(self,
config):
self.config = config
def prepare_model(self, label_list):
"""
Sets some of the components of the model: Dataset processor, number of labels, usage of gpu and distributed
training, gradient accumulation steps and tokenizer.
Parameters
----------
label_list: list
The list of labels values in the dataset. For example: ['positive','negative','neutral']
"""
self.processors = {
"finsent": FinSentProcessor
}
self.num_labels_task = {
'finsent': 2
}
if self.config.local_rank == -1 or self.config.no_cuda:
self.device = torch.device("cuda" if torch.cuda.is_available() and not self.config.no_cuda else "cpu")
self.n_gpu = torch.cuda.device_count()
else:
torch.cuda.set_device(self.config.local_rank)
self.device = torch.device("cuda", self.config.local_rank)
self.n_gpu = 1
# Initializes the distributed backend which will take care of sychronizing nodes/GPUs
torch.distributed.init_process_group(backend='nccl')
logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format(
self.device, self.n_gpu, bool(self.config.local_rank != -1), self.config.fp16))
if self.config.gradient_accumulation_steps < 1:
raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format(
self.config.gradient_accumulation_steps))
self.config.train_batch_size = self.config.train_batch_size // self.config.gradient_accumulation_steps
random.seed(self.config.seed)
np.random.seed(self.config.seed)
torch.manual_seed(self.config.seed)
if self.n_gpu > 0:
torch.cuda.manual_seed_all(self.config.seed)
if os.path.exists(self.config.model_dir) and os.listdir(self.config.model_dir):
raise ValueError("Output directory ({}) already exists and is not empty.".format(self.config.model_dir))
if not os.path.exists(self.config.model_dir):
os.makedirs(self.config.model_dir)
self.processor = self.processors['finsent']()
self.num_labels = len(label_list)
self.label_list = label_list
self.tokenizer = AutoTokenizer.from_pretrained(self.base_model, do_lower_case=self.config.do_lower_case)
def get_data(self, phase):
"""
Gets the data for training or evaluation. It returns the data in the format that pytorch will process. In the
data directory, there should be a .csv file with the name <phase>.csv
Parameters
----------
phase: str
Name of the dataset that will be used in that phase. For example if there is a 'train.csv' in the data
folder, it should be set to 'train'.
Returns
-------
examples: list
A list of InputExample's. Each InputExample is an object that includes the information for each example;
text, id, label...
"""
self.num_train_optimization_steps = None
examples = None
examples = self.processor.get_examples(self.config.data_dir, phase)
self.num_train_optimization_steps = int(
len(
examples) / self.config.train_batch_size / self.config.gradient_accumulation_steps) * self.config.num_train_epochs
if phase == 'train':
train = pd.read_csv(os.path.join(self.config.data_dir, 'train.csv'), sep='\t', index_col=False)
weights = list()
labels = self.label_list
class_weights = [train.shape[0] / train[train.label == label].shape[0] for label in labels]
self.class_weights = torch.tensor(class_weights)
return examples
def create_the_model(self):
"""
Creates the model. Sets the model to be trained and the optimizer.
"""
model = self.config.bert_model
model.to(self.device)
# Prepare optimizer
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
lr = self.config.learning_rate
dft_rate = 1.2
if self.config.discriminate:
# apply the discriminative fine-tuning. discrimination rate is governed by dft_rate.
encoder_params = []
for i in range(12):
encoder_decay = {
'params': [p for n, p in list(model.bert.encoder.layer[i].named_parameters()) if
not any(nd in n for nd in no_decay)],
'weight_decay': 0.01,
'lr': lr / (dft_rate ** (12 - i))}
encoder_nodecay = {
'params': [p for n, p in list(model.bert.encoder.layer[i].named_parameters()) if
any(nd in n for nd in no_decay)],
'weight_decay': 0.0,
'lr': lr / (dft_rate ** (12 - i))}
encoder_params.append(encoder_decay)
encoder_params.append(encoder_nodecay)
optimizer_grouped_parameters = [
{'params': [p for n, p in list(model.bert.embeddings.named_parameters()) if
not any(nd in n for nd in no_decay)],
'weight_decay': 0.01,
'lr': lr / (dft_rate ** 13)},
{'params': [p for n, p in list(model.bert.embeddings.named_parameters()) if
any(nd in n for nd in no_decay)],
'weight_decay': 0.0,
'lr': lr / (dft_rate ** 13)},
{'params': [p for n, p in list(model.bert.pooler.named_parameters()) if
not any(nd in n for nd in no_decay)],
'weight_decay': 0.01,
'lr': lr},
{'params': [p for n, p in list(model.bert.pooler.named_parameters()) if
any(nd in n for nd in no_decay)],
'weight_decay': 0.0,
'lr': lr},
{'params': [p for n, p in list(model.classifier.named_parameters()) if
not any(nd in n for nd in no_decay)],
'weight_decay': 0.01,
'lr': lr},
{'params': [p for n, p in list(model.classifier.named_parameters()) if any(nd in n for nd in no_decay)],
'weight_decay': 0.0,
'lr': lr}]
optimizer_grouped_parameters.extend(encoder_params)
else:
param_optimizer = list(model.named_parameters())
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)],
'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
schedule = "warmup_linear"
self.num_warmup_steps = int(float(self.num_train_optimization_steps) * self.config.warm_up_proportion)
self.optimizer = AdamW(optimizer_grouped_parameters,
lr=self.config.learning_rate,
correct_bias=False)
self.scheduler = get_linear_schedule_with_warmup(self.optimizer,
num_warmup_steps=self.num_warmup_steps,
num_training_steps=self.num_train_optimization_steps)
return model
def get_loader(self, examples, phase):
"""
Creates a data loader object for a dataset.
Parameters
----------
examples: list
The list of InputExample's.
phase: 'train' or 'eval'
Determines whether to use random sampling or sequential sampling depending on the phase.
Returns
-------
dataloader: DataLoader
The data loader object.
"""
features = convert_examples_to_features(examples, self.label_list,
self.config.max_seq_length,
self.tokenizer,
self.config.output_mode)
# Log the necessasry information
logger.info("***** Loading data *****")
logger.info(" Num examples = %d", len(examples))
logger.info(" Batch size = %d", self.config.train_batch_size)
logger.info(" Num steps = %d", self.num_train_optimization_steps)
# Load the data, make it into TensorDataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_attention_mask = torch.tensor([f.attention_mask for f in features], dtype=torch.long)
all_token_type_ids = torch.tensor([f.token_type_ids for f in features], dtype=torch.long)
if self.config.output_mode == "classification":
all_label_ids = torch.tensor([f.label_id for f in features], dtype=torch.long)
elif self.config.output_mode == "regression":
all_label_ids = torch.tensor([f.label_id for f in features], dtype=torch.float)
try:
all_agree_ids = torch.tensor([f.agree for f in features], dtype=torch.long)
except:
all_agree_ids = torch.tensor([0.0 for f in features], dtype=torch.long)
data = TensorDataset(all_input_ids, all_attention_mask, all_token_type_ids, all_label_ids, all_agree_ids)
# Distributed, if necessary
if phase == 'train':
my_sampler = RandomSampler(data)
elif phase == 'eval':
my_sampler = SequentialSampler(data)
dataloader = DataLoader(data, sampler=my_sampler, batch_size=self.config.train_batch_size)
return dataloader
def train(self, train_examples, model):
"""
Trains the model.
Parameters
----------
examples: list
Contains the data as a list of InputExample's
model: BertModel
The Bert model to be trained.
weights: list
Contains class weights.
Returns
-------
model: BertModel
The trained model.
"""
validation_examples = self.get_data('validation')
global_step = 0
self.validation_losses = []
# Training
train_dataloader = self.get_loader(train_examples, 'train')
model.train()
step_number = len(train_dataloader)
i = 0
for _ in trange(int(self.config.num_train_epochs), desc="Epoch"):
model.train()
tr_loss = 0
nb_tr_examples, nb_tr_steps = 0, 0
for step, batch in enumerate(tqdm(train_dataloader, desc='Iteration')):
if (self.config.gradual_unfreeze and i == 0):
for param in model.bert.parameters():
param.requires_grad = False
if (step % (step_number // 3)) == 0:
i += 1
if (self.config.gradual_unfreeze and i > 1 and i < self.config.encoder_no):
for k in range(i - 1):
try:
for param in model.bert.encoder.layer[self.config.encoder_no - 1 - k].parameters():
param.requires_grad = True
except:
pass
if (self.config.gradual_unfreeze and i > self.config.encoder_no + 1):
for param in model.bert.embeddings.parameters():
param.requires_grad = True
batch = tuple(t.to(self.device) for t in batch)
input_ids, attention_mask, token_type_ids, label_ids, agree_ids = batch
logits = model(input_ids, attention_mask, token_type_ids)[0]
weights = self.class_weights.to(self.device)
if self.config.output_mode == "classification":
loss_fct = CrossEntropyLoss(weight=weights)
loss = loss_fct(logits.view(-1, self.num_labels), label_ids.view(-1))
elif self.config.output_mode == "regression":
loss_fct = MSELoss()
loss = loss_fct(logits.view(-1), label_ids.view(-1))
if self.config.gradient_accumulation_steps > 1:
loss = loss / self.config.gradient_accumulation_steps
else:
loss.backward()
tr_loss += loss.item()
nb_tr_examples += input_ids.size(0)
nb_tr_steps += 1
if (step + 1) % self.config.gradient_accumulation_steps == 0:
if self.config.fp16:
lr_this_step = self.config.learning_rate * warmup_linear(
global_step / self.num_train_optimization_steps, self.config.warm_up_proportion)
for param_group in self.optimizer.param_groups:
param_group['lr'] = lr_this_step
torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
self.optimizer.step()
self.scheduler.step()
self.optimizer.zero_grad()
global_step += 1
# Validation
validation_loader = self.get_loader(validation_examples, phase='eval')
model.eval()
valid_loss, valid_accuracy = 0, 0
nb_valid_steps, nb_valid_examples = 0, 0
for input_ids, attention_mask, token_type_ids, label_ids, agree_ids in tqdm(validation_loader, desc="Validating"):
input_ids = input_ids.to(self.device)
attention_mask = attention_mask.to(self.device)
token_type_ids = token_type_ids.to(self.device)
label_ids = label_ids.to(self.device)
agree_ids = agree_ids.to(self.device)
with torch.no_grad():
logits = model(input_ids, attention_mask, token_type_ids)[0]
if self.config.output_mode == "classification":
loss_fct = CrossEntropyLoss(weight=weights)
tmp_valid_loss = loss_fct(logits.view(-1, self.num_labels), label_ids.view(-1))
elif self.config.output_mode == "regression":
loss_fct = MSELoss()
tmp_valid_loss = loss_fct(logits.view(-1), label_ids.view(-1))
valid_loss += tmp_valid_loss.mean().item()
nb_valid_steps += 1
valid_loss = valid_loss / nb_valid_steps
self.validation_losses.append(valid_loss)
print("Validation losses: {}".format(self.validation_losses))
if valid_loss == min(self.validation_losses):
try:
os.remove(self.config.model_dir / ('temporary' + str(best_model)))
except:
print('No best model found')
torch.save({'epoch': str(i), 'state_dict': model.state_dict()},
self.config.model_dir / ('temporary' + str(i)))
best_model = i
# Save a trained model and the associated configuration
checkpoint = torch.load(self.config.model_dir / ('temporary' + str(best_model)))
model.load_state_dict(checkpoint['state_dict'])
model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self
output_model_file = os.path.join(self.config.model_dir, WEIGHTS_NAME)
torch.save(model_to_save.state_dict(), output_model_file)
output_config_file = os.path.join(self.config.model_dir, CONFIG_NAME)
with open(output_config_file, 'w') as f:
f.write(model_to_save.config.to_json_string())
os.remove(self.config.model_dir / ('temporary' + str(best_model)))
return model
def evaluate(self, model, examples):
"""
Evaluate the model.
Parameters
----------
model: BertModel
The model to be evaluated.
examples: list
Evaluation data as a list of InputExample's/
Returns
-------
evaluation_df: pd.DataFrame
A dataframe that includes for each example predicted probability and labels.
"""
eval_loader = self.get_loader(examples, phase='eval')
logger.info("***** Running evaluation ***** ")
logger.info(" Num examples = %d", len(examples))
logger.info(" Batch size = %d", self.config.eval_batch_size)
model.eval()
eval_loss, eval_accuracy = 0, 0
nb_eval_steps, nb_eval_examples = 0, 0
predictions = []
labels = []
agree_levels = []
text_ids = []
for input_ids, attention_mask, token_type_ids, label_ids, agree_ids in tqdm(eval_loader, desc="Testing"):
input_ids = input_ids.to(self.device)
attention_mask = attention_mask.to(self.device)
token_type_ids = token_type_ids.to(self.device)
label_ids = label_ids.to(self.device)
agree_ids = agree_ids.to(self.device)
with torch.no_grad():
logits = model(input_ids, attention_mask, token_type_ids)[0]
if self.config.output_mode == "classification":
loss_fct = CrossEntropyLoss()
tmp_eval_loss = loss_fct(logits.view(-1, self.num_labels), label_ids.view(-1))
elif self.config.output_mode == "regression":
loss_fct = MSELoss()
tmp_eval_loss = loss_fct(logits.view(-1), label_ids.view(-1))
np_logits = logits.cpu().numpy()
if self.config.output_mode == 'classification':
prediction = np.array(np_logits)
elif self.config.output_mode == "regression":
prediction = np.array(np_logits)
for agree_id in agree_ids:
agree_levels.append(agree_id.item())
for label_id in label_ids:
labels.append(label_id.item())
for pred in prediction:
predictions.append(pred)
text_ids.append(input_ids)
# tmp_eval_loss = loss_fct(logits.view(-1, self.num_labels), label_ids.view(-1))
# tmp_eval_loss = model(input_ids, token_type_ids, attention_mask, label_ids)
eval_loss += tmp_eval_loss.mean().item()
nb_eval_steps += 1
# logits = logits.detach().cpu().numpy()
# label_ids = label_ids.to('cpu').numpy()
# tmp_eval_accuracy = accuracy(logits, label_ids)
# eval_loss += tmp_eval_loss.mean().item()
# eval_accuracy += tmp_eval_accuracy
evaluation_df = pd.DataFrame({'predictions': predictions, 'labels': labels, "agree_levels": agree_levels})
return evaluation_df
def predict(text, model, write_to_csv=False, path=None):
"""
Predict sentiments of sentences in a given text. The function first tokenizes sentences, make predictions and write
results.
Parameters
----------
text: string
text to be analyzed
model: BertForSequenceClassification
path to the classifier model
write_to_csv (optional): bool
path (optional): string
path to write the string
"""
model.eval()
tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased')
sentences = sent_tokenize(text)
label_list = ['positive', 'negative', 'neutral']
label_dict = {0: 'positive', 1: 'negative', 2: 'neutral'}
result = pd.DataFrame(columns=['sentence', 'logit', 'prediction', 'sentiment_score'])
for batch in chunks(sentences, 5):
examples = [InputExample(str(i), sentence) for i, sentence in enumerate(batch)]
features = convert_examples_to_features(examples, label_list, 64, tokenizer)
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_attention_mask = torch.tensor([f.attention_mask for f in features], dtype=torch.long)
all_token_type_ids = torch.tensor([f.token_type_ids for f in features], dtype=torch.long)
with torch.no_grad():
logits = model(all_input_ids, all_attention_mask, all_token_type_ids)[0]
logging.info(logits)
logits = softmax(np.array(logits))
sentiment_score = pd.Series(logits[:, 0] - logits[:, 1])
predictions = np.squeeze(np.argmax(logits, axis=1))
batch_result = {'sentence': batch,
'logit': list(logits),
'prediction': predictions,
'sentiment_score': sentiment_score}
batch_result = pd.DataFrame(batch_result)
result = pd.concat([result, batch_result], ignore_index=True)
result['prediction'] = result.prediction.apply(lambda x: label_dict[x])
if write_to_csv:
result.to_csv(path, sep=',', index=False)
return result
|
py | 7df9800240d89d5cd042296c76b2223a72639826 | from mindsdb.libs.helpers.general_helpers import evaluate_accuracy, get_value_bucket
from mindsdb.libs.phases.stats_generator.stats_generator import StatsGenerator
from mindsdb.libs.data_types.transaction_data import TransactionData
from mindsdb.libs.constants.mindsdb import *
from mindsdb.libs.helpers.general_helpers import disable_console_output
import pandas as pd
class ColumnEvaluator():
"""
# The Hypothesis Executor is responsible for testing out various scenarios
regarding the model, in order to determine things such as the importance of
input variables or the variability of output values
"""
def __init__(self, transaction):
self.transaction = transaction
def get_column_importance(self, model, output_columns, input_columns, full_dataset, stats):
columnless_prediction_distribution = {}
all_columns_prediction_distribution = {}
normal_predictions = model.predict('validate')
normal_accuracy = evaluate_accuracy(normal_predictions, full_dataset, stats, output_columns)
column_importance_dict = {}
buckets_stats = {}
# Histogram for when all columns are present, in order to plot the force vectors
for output_column in output_columns:
# @TODO: Running stats generator just to get the histogram is very inefficient, change this
validation_set_output_column_histogram, _ = StatsGenerator.get_histogram(normal_predictions[output_column], data_type=stats[output_column]['data_type'],data_subtype=stats[output_column]['data_subtype'])
if validation_set_output_column_histogram is not None:
all_columns_prediction_distribution[output_column] = validation_set_output_column_histogram
ignorable_input_columns = []
for input_column in input_columns:
if stats[input_column]['data_type'] != DATA_TYPES.FILE_PATH and input_column not in [x[0] for x in self.transaction.lmd['model_order_by']]:
ignorable_input_columns.append(input_column)
for input_column in ignorable_input_columns:
# See what happens with the accuracy of the outputs if only this column is present
ignore_columns = [col for col in ignorable_input_columns if col != input_column]
col_only_predictions = model.predict('validate', ignore_columns)
col_only_accuracy = evaluate_accuracy(col_only_predictions, full_dataset, stats, output_columns)
# See what happens with the accuracy if all columns but this one are present
ignore_columns = [input_column]
col_missing_predictions = model.predict('validate', ignore_columns)
col_missing_accuracy = evaluate_accuracy(col_missing_predictions, full_dataset, stats, output_columns)
combined_column_accuracy = ((normal_accuracy - col_missing_accuracy) + col_only_accuracy)/2
if combined_column_accuracy < 0:
combined_column_accuracy = 0
column_importance = 10*(1 - (normal_accuracy - combined_column_accuracy)/normal_accuracy)
if column_importance < 1:
column_importance = 1
column_importance_dict[input_column] = column_importance
# Histogram for when the column is missing, in order to plot the force vectors
for output_column in output_columns:
if output_column not in columnless_prediction_distribution:
columnless_prediction_distribution[output_column] = {}
# @TODO: Running stats generator just to get the histogram is very inefficient, change this
col_missing_output_histogram, _ = StatsGenerator.get_histogram(col_missing_predictions[output_column], data_type=stats[output_column]['data_type'],data_subtype=stats[output_column]['data_subtype'])
if col_missing_output_histogram is not None:
columnless_prediction_distribution[output_column][input_column] = col_missing_output_histogram
# @TODO should be go back to generating this information based on the buckets of the input columns ? Or just keep doing the stats generation for the input columns based on the indexes of the buckets for the output column
for output_column in output_columns:
buckets_stats[output_column] = {}
bucket_indexes = {}
for index,row in full_dataset.iterrows():
value = row[output_column]
if 'percentage_buckets' in stats[output_column]:
percentage_buckets = stats[output_column]['percentage_buckets']
else:
percentage_buckets = None
value_bucket = get_value_bucket(value, percentage_buckets, stats[output_column], self.transaction.hmd)
if value_bucket not in bucket_indexes:
bucket_indexes[value_bucket] = []
bucket_indexes[value_bucket].append(index)
for bucket in bucket_indexes:
buckets_stats[output_column][bucket] = {}
input_data = TransactionData()
input_data.data_frame = full_dataset.loc[bucket_indexes[bucket]]
input_data.columns = input_data.data_frame.columns
stats_generator = StatsGenerator(session=None, transaction=self.transaction)
try:
with disable_console_output():
col_buckets_stats = stats_generator.run(input_data=input_data, modify_light_metadata=False, print_logs=False)
buckets_stats[output_column][bucket].update(col_buckets_stats)
except:
pass
# @TODO Is this worth informing the user about ?
#print('Cloud not generate bucket stats for sub-bucket: {}'.format(bucket))
return column_importance_dict, buckets_stats, columnless_prediction_distribution, all_columns_prediction_distribution
def get_column_influence(self):
pass
#
|
py | 7df980840cfefa9bdb3e400cf2994226653f97ff | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PictureItem'
db.create_table(u'contentitem_contentplugins_pictureitem', (
(u'contentitem_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['fluent_contents.ContentItem'], unique=True, primary_key=True)),
('image', self.gf('sorl.thumbnail.fields.ImageField')(max_length=100)),
('align', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal(u'contentplugins', ['PictureItem'])
def backwards(self, orm):
# Deleting model 'PictureItem'
db.delete_table(u'contentitem_contentplugins_pictureitem')
models = {
u'contentplugins.pictureitem': {
'Meta': {'ordering': "('placeholder', 'sort_order')", 'object_name': 'PictureItem', 'db_table': "u'contentitem_contentplugins_pictureitem'", '_ormbases': ['fluent_contents.ContentItem']},
'align': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'contentitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['fluent_contents.ContentItem']", 'unique': 'True', 'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'fluent_contents.contentitem': {
'Meta': {'ordering': "('placeholder', 'sort_order')", 'object_name': 'ContentItem'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'parent_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contentitems'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['fluent_contents.Placeholder']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_fluent_contents.contentitem_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'})
},
'fluent_contents.placeholder': {
'Meta': {'unique_together': "(('parent_type', 'parent_id', 'slot'),)", 'object_name': 'Placeholder'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'parent_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'m'", 'max_length': '1'}),
'slot': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['contentplugins'] |
py | 7df980a7af65ab77a8ba55fe0430103295875202 | """
Question 78 :
Write a program to print the running time of execution of "1 + 1"
for 100 times.
Hints : Use timeit() function to measure the running time.
"""
# Solution :
from timeit import Timer
t = Timer("for i in range(100) : 1 + 1")
print(t.timeit())
"""
timeit :
This module provides a simple eay to find the execution time
of small bits of python code
"""
"""
Output :
1.6696240000000002
""" |
py | 7df980cf46cba342b5daf1638b4ebc4ea78c9065 | w=input()
w=w.replace("a","")
w=w.replace("i","")
w=w.replace("u","")
w=w.replace("e","")
w=w.replace("o","")
print(w) |
py | 7df982732e8a0852f43cb8993ad7b98038d6875a | #!/usr/bin/env python
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 71
SCRIPT_ADDRESS = 5
PUBKEY_ADDRESS_TEST = 74
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 199
PRIVKEY_TEST = 202
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
valid = False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
|
py | 7df982c7296b615b673c796696f040d837aa821a | #-*- coding: utf-8 -*-
import re
from markdown.extensions import Extension
from markdown.preprocessors import Preprocessor
class ImagifyExtension(Extension):
def extendMarkdown(self, md, md_globals):
md.registerExtension(self)
md.preprocessors.add('imagify',
ImagifyPreprocessor(md),
'_end')
class ImagifyPreprocessor(Preprocessor):
def run(self, lines):
new_lines = []
def imagify(match):
return u'' % match.group(0)
for line in lines:
if line.strip():
line = re.sub(ur'^https?://[^\s]+/(?P<image_name>[^\s]+)\.'
ur'(?P<extension>png|jpg|jpeg|gif|bmp|tif|tiff)'
ur'(\?[^\s]+)?$', imagify, line, flags=re.UNICODE)
new_lines.append(line)
return new_lines
def makeExtension(configs=None):
return ImagifyExtension(configs=configs) |
py | 7df982de10fd800506552182ac24149bf37bdc28 | """Defines the URLs for the RESTful ingest and Strike services"""
from django.conf.urls import url
import ingest.views as views
urlpatterns = [
# Ingest views
url(r'^ingests/$', views.IngestsView.as_view(), name='ingests_view'),
url(r'^ingests/status/$', views.IngestsStatusView.as_view(), name='ingests_status_view'),
url(r'^ingests/(?P<ingest_id>\d+)/$', views.IngestDetailsView.as_view(), name='ingest_details_view'),
url(r'^ingests/(?P<file_name>[\w.-]{0,250})/$', views.IngestDetailsView.as_view(), name='ingest_details_view'),
# Scan views
url(r'^scans/$', views.ScansView.as_view(), name='scans_view'),
url(r'^scans/(\d+)/$', views.ScansDetailsView.as_view(), name='scans_details_view'),
url(r'^scans/(\d+)/process/$', views.ScansProcessView.as_view(), name='scans_process_view'),
url(r'^scans/validation/$', views.ScansValidationView.as_view(), name='scans_validation_view'),
url(r'^scans/cancel/(\d+)/$', views.CancelScansView.as_view(), name='cancel_scans_view'),
# Strike views
url(r'^strikes/$', views.StrikesView.as_view(), name='strikes_view'),
url(r'^strikes/(\d+)/$', views.StrikeDetailsView.as_view(), name='strike_details_view'),
url(r'^strikes/validation/$', views.StrikesValidationView.as_view(), name='strikes_validation_view'),
]
|
py | 7df9835d805111ecf5919f7961bb5604842128ee | """Helper methods to raise responses for the various HTTP status codes.
The motivation for these methods is to be able to easily document the HTTP
response headers which are highly recommended or required. For example the
``Location`` header which should be set for 201 responses.
The following status codes don't have a method here:
"""
from six.moves.urllib.parse import urljoin
from wsgiservice.exceptions import ResponseException
def raise_200(instance):
"""Abort the current request with a 200 (OK) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 200
"""
instance.response.status = 200
raise ResponseException(instance.response)
def raise_201(instance, location):
"""Abort the current request with a 201 (Created) response code. Sets the
Location header correctly. If the location does not start with a slash,
the path of the current request is prepended.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 201
"""
_set_location(instance, location)
instance.response.status = 201
raise ResponseException(instance.response)
def raise_202(instance):
"""Abort the current request with a 202 (Accepted) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 202
"""
instance.response.status = 202
raise ResponseException(instance.response)
def raise_204(instance):
"""Abort the current request with a 204 (No Content) response code. Clears
out the body of the response.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 204
"""
instance.response.status = 204
instance.response.body = b''
instance.response.body_raw = None
raise ResponseException(instance.response)
def raise_205(instance):
"""Abort the current request with a 205 (Reset Content) response code.
Clears out the body of the response.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 205
"""
instance.response.status = 205
instance.response.body = b''
instance.response.body_raw = None
raise ResponseException(instance.response)
def raise_300(instance):
"""Abort the current request with a 300 (Multiple Choices) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 300
"""
instance.response.status = 300
raise ResponseException(instance.response)
def raise_301(instance, location):
"""Abort the current request with a 301 (Moved Permanently) response code.
Sets the Location header correctly. If the location does not start with a
slash, the path of the current request is prepended.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 301
"""
_set_location(instance, location)
instance.response.status = 301
raise ResponseException(instance.response)
def raise_302(instance, location):
"""Abort the current request with a 302 (Found) response code. Sets the
Location header correctly. If the location does not start with a slash,
the path of the current request is prepended.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 302
"""
_set_location(instance, location)
instance.response.status = 302
raise ResponseException(instance.response)
def raise_303(instance, location):
"""Abort the current request with a 303 (See Other) response code. Sets
the Location header correctly. If the location does not start with a
slash, the path of the current request is prepended.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 303
"""
_set_location(instance, location)
instance.response.status = 303
raise ResponseException(instance.response)
def raise_304(instance):
"""Abort the current request with a 304 (Not Modified) response code.
Clears out the body of the response.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 304
.. todo: The following headers MUST be output: Date, ETag and/or
Content-Location, Expires, Cache-Control, Vary. See :rfc:`2616`
section 10.3.5.
"""
instance.response.status = 304
instance.response.body = b''
instance.response.body_raw = None
raise ResponseException(instance.response)
def raise_305(instance, location):
"""Abort the current request with a 305 (Use Proxy) response code. Sets
the Location header correctly. If the location does not start with a
slash, the path of the current request is prepended.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 305
"""
_set_location(instance, location)
instance.response.status = 305
raise ResponseException(instance.response)
def raise_307(instance, location):
"""Abort the current request with a 307 (Temporary Redirect) response
code. Sets the Location header correctly. If the location does not start
with a slash, the path of the current request is prepended.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 307
"""
_set_location(instance, location)
instance.response.status = 307
raise ResponseException(instance.response)
def raise_400(instance, msg=None):
"""Abort the current request with a 400 (Bad Request) response code. If
the message is given it's output as an error message in the response body
(correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 400
"""
instance.response.status = 400
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_401(instance, authenticate, msg=None):
"""Abort the current request with a 401 (Unauthorized) response code. If
the message is given it's output as an error message in the response body
(correctly converted to the requested MIME type). Outputs the
WWW-Authenticate header as given by the authenticate parameter.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 401
"""
instance.response.status = 401
instance.response.headers['WWW-Authenticate'] = authenticate
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_402(instance, msg=None):
"""Abort the current request with a 402 (Payment Required) response code.
If the message is given it's output as an error message in the response
body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 402
"""
instance.response.status = 402
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_403(instance, msg=None):
"""Abort the current request with a 403 (Forbidden) response code. If the
message is given it's output as an error message in the response body
(correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 403
"""
instance.response.status = 403
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_404(instance):
"""Abort the current request with a 404 (Not Found) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 404
"""
instance.response.status = 404
raise ResponseException(instance.response)
def raise_405(instance):
"""Abort the current request with a 405 (Method Not Allowed) response
code. Sets the ``Allow`` response header to the return value of the
:func:`Resource.get_allowed_methods` function.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 405
"""
instance.response.status = 405
instance.response.headers['Allow'] = instance.get_allowed_methods()
raise ResponseException(instance.response)
def raise_406(instance):
"""Abort the current request with a 406 (Not Acceptable) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 406
"""
instance.response.status = 406
raise ResponseException(instance.response)
def raise_409(instance):
"""Abort the current request with a 409 (Conflict) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 409
"""
instance.response.status = 409
raise ResponseException(instance.response)
def raise_410(instance):
"""Abort the current request with a 410 (Gone) response code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 410
"""
instance.response.status = 410
raise ResponseException(instance.response)
def raise_412(instance, msg=None):
"""Abort the current request with a 412 (Precondition Failed) response
code. If the message is given it's output as an error message in the
response body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 412
"""
instance.response.status = 412
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_415(instance, msg=None):
"""Abort the current request with a 415 (Unsupported Media Type) response
code. If the message is given it's output as an error message in the
response body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 415
"""
instance.response.status = 415
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_500(instance, msg=None):
"""Abort the current request with a 500 (Internal Server Error) response
code. If the message is given it's output as an error message in the
response body (correctly converted to the requested MIME type).
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 500
"""
instance.response.status = 500
if msg:
instance.response.body_raw = {'error': msg}
raise ResponseException(instance.response)
def raise_501(instance):
"""Abort the current request with a 501 (Not Implemented) response code.
Sets the ``Allow`` response header to the return value of the
:func:`Resource.get_allowed_methods` function.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 501
"""
instance.response.status = 501
instance.response.headers['Allow'] = instance.get_allowed_methods()
raise ResponseException(instance.response)
def raise_503(instance):
"""Abort the current request with a 503 (Service Unavailable) response
code.
:param instance: Resource instance (used to access the response)
:type instance: :class:`webob.resource.Resource`
:raises: :class:`webob.exceptions.ResponseException` of status 503
"""
instance.response.status = 503
raise ResponseException(instance.response)
def _set_location(instance, location):
"""Sets a ``Location`` response header. If the location does not start with
a slash, the path of the current request is prepended.
:param instance: Resource instance (used to access the request and
response)
:type instance: :class:`webob.resource.Resource`
"""
location = str(location)
if not location.startswith('/'):
location = urljoin(instance.request_path.rstrip('/') + '/', location)
instance.response.location = location
|
py | 7df984301f2c65d353d2dd17507a869a628a339f | """
Test all of the Django views used by deckr.
"""
import tempfile
from django.core.urlresolvers import reverse
from django.test import Client, TestCase
from django.test.utils import override_settings
import deckr.views
from deckr.models import GameDefinition, GameRoom, Player
from mock import MagicMock
MOCK_GAME = "engine/tests/mock_game"
class IndexTestCase(TestCase):
"""
Test the index page to make sure it's working as intended.
"""
def setUp(self):
self.client = Client()
def test_can_access(self):
"""
Make sure we can access the page.
"""
response = self.client.get(reverse('deckr.index'))
self.assertEqual(response.status_code, 200)
class CreateGameTestCase(TestCase):
"""
Test the create game page to make sure it's working as intended.
"""
def setUp(self):
self.client = Client()
# Make sure we have a game definition
self.game_def = GameDefinition.objects.create(name="test",
path=MOCK_GAME)
def test_can_access(self):
"""
Make sure we can access the page.
"""
response = self.client.get(reverse('deckr.create_game_room'))
self.assertEqual(response.status_code, 200)
def test_create_game_form(self):
"""
Make sure that the form submits, and that it will reject invalid
input.
"""
form_data = {'game_id': self.game_def.pk}
response = self.client.post(reverse('deckr.create_game_room'),
form_data)
self.assertTrue(GameRoom.objects.all().count() > 0)
game = list(GameRoom.objects.all())[-1]
self.assertRedirects(response,
reverse('deckr.game_room_staging_area',
args=(game.id,)))
# Test invalid form
response = self.client.post(reverse('deckr.create_game_room'),
{})
self.assertFormError(response, 'form', 'game_id',
'This field is required.')
response = self.client.post(reverse('deckr.create_game_room'),
{'game_id': 0})
self.assertFormError(response, 'form', 'game_id',
"Select a valid choice. That choice is not one" +
" of the available choices.")
class CreatePlayerTestCase(TestCase):
"""
Test the create player form to join a game room
"""
def setUp(self):
self.client = Client()
# Make sure we have a game definition
self.game_def = GameDefinition.objects.create(name="test",
path=MOCK_GAME)
self.game_room = GameRoom.objects.create(game_definition=self.game_def,
room_id=1,
max_players=1)
# Mock out the add player function
deckr.views.game_runner = MagicMock()
def test_can_access(self):
"""
Make sure we can access the form.
"""
response = self.client.get(reverse('deckr.game_room_staging_area',
args=(self.game_room.pk,)))
self.assertEqual(response.status_code, 200)
def test_out_of_sync_engine(self):
"""
Make sure that even if the engine throws an internal error
while creating a player that we catch it and return a valid
page instead of throwing a stacktrace.
"""
error_msg = "Failed to join"
deckr.views.game_runner.add_player.side_effect = ValueError(error_msg)
form_data = {'nickname': "Player 1"}
response = self.client.post(reverse('deckr.game_room_staging_area',
args=(self.game_room.pk,)),
form_data)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'nickname', error_msg)
def test_create_player_form(self):
"""
Check form validations and player creation
"""
deckr.views.game_runner.add_player.return_value = 1
form_data = {'nickname': "Player 1"}
response = self.client.post(reverse('deckr.game_room_staging_area',
args=(self.game_room.pk,)),
form_data)
player = list(Player.objects.all())[-1]
self.assertTrue(Player.objects.all().count() > 0)
self.assertRedirects(response,
reverse('deckr.game_room',
args=(self.game_room.id,)) +
"?player_id=" + str(player.pk))
old_count = Player.objects.all().count()
response = self.client.post(reverse('deckr.game_room_staging_area',
args=(self.game_room.pk,)),
form_data)
self.assertEqual(Player.objects.all().count(), old_count)
self.assertFormError(response, 'form', 'nickname',
'Nickname is already in use')
response = self.client.post(reverse('deckr.game_room_staging_area',
args=(self.game_room.pk,)),
{})
self.assertFormError(response, 'form', 'nickname',
'This field is required.')
self.assertEqual(Player.objects.all().count(), old_count)
old_count = Player.objects.all().count()
form_data = {'nickname': "Player 2"}
response = self.client.post(reverse('deckr.game_room_staging_area',
args=(self.game_room.pk,)),
form_data)
self.assertEqual(Player.objects.all().count(), old_count)
self.assertFormError(response, 'form', 'nickname',
'Cannot join full room')
class GamePageTestCase(TestCase):
"""
Test the game page to make sure that it's working as intended.
NOTE: This only tests the view functionality; a lot of this page
is in websockets.
"""
def setUp(self):
self.client = Client()
self.game_def = GameDefinition.objects.create(name="test",
path=MOCK_GAME)
self.game_room = GameRoom.objects.create(room_id=1,
game_definition=self.game_def)
self.player = Player.objects.create(game_room=self.game_room,
player_id=1,
nickname="Player 1")
def test_can_access(self):
"""
Make sure we can access the page.
"""
response = self.client.get(reverse('deckr.game_room',
args=(self.game_room.pk,)),
{'player_id': self.player.id})
self.assertEqual(response.status_code, 200)
class UploadGameDefTestCase(TestCase):
"""
Test the upload game page to submit a zipped file and create a game
definition
"""
def setUp(self):
self.client = Client()
def test_can_access(self):
"""
Make sure we can access the page.
"""
response = self.client.get(reverse('deckr.upload_game_definition'))
self.assertEqual(response.status_code, 200)
@override_settings(GAME_DEFINITION_PATH=tempfile.mkdtemp())
def test_can_upload(self):
"""
Make sure the form submits or displays validation
"""
old_count = GameDefinition.objects.all().count()
form_data = {'game_name': 'Solitaire',
'file': open('deckr/tests/solitaire.zip')}
response = self.client.post(reverse('deckr.upload_game_definition',),
form_data)
self.assertEqual(GameDefinition.objects.all().count(), old_count + 1)
# Test a game name must be unique
form_data = {'game_name': 'Solitaire',
'file': open('deckr/tests/solitaire.zip')}
response = self.client.post(reverse('deckr.upload_game_definition',),
form_data)
self.assertFormError(response, 'form', 'game_name',
'Game Definition already exists')
old_count = GameDefinition.objects.all().count()
response = self.client.post(reverse('deckr.upload_game_definition',),
{})
self.assertFormError(response, 'form', 'file',
'This field is required.')
self.assertEqual(GameDefinition.objects.all().count(), old_count)
# Test a zipped file that is missing the layout.html
form_data = {'game_name': 'Solitaire2',
'file': open('deckr/tests/failing_solitaire.zip')}
response = self.client.post(reverse('deckr.upload_game_definition',),
form_data)
self.assertEqual(GameDefinition.objects.all().count(), old_count)
# Test a non-zipped file
form_data = {'game_name': 'Solitaire2',
'file': open('deckr/tests/test_models.py')}
response = self.client.post(reverse('deckr.upload_game_definition',),
form_data)
self.assertEqual(GameDefinition.objects.all().count(), old_count)
self.assertFormError(response, 'form', 'file',
'File is not a zip file')
|
py | 7df985af5d38fec95b2bd369374dae294fb137e7 | """
Offset Mirror Classes
This module contains all the classes relating to the offset mirrors used in the
FEE and XRT. Each offset mirror contains a stepper motor and piezo motor to
control the pitch, and two pairs of motors to control the horizontal and
vertical gantries.
"""
import logging
import numpy as np
from ophyd import (Device, EpicsSignal, EpicsSignalRO, Component as Cpt,
PVPositioner, FormattedComponent as FCpt)
from .doc_stubs import basic_positioner_init
from .inout import InOutRecordPositioner
from .mv_interface import FltMvInterface
logger = logging.getLogger(__name__)
class OMMotor(FltMvInterface, PVPositioner):
"""
Base class for each motor in the LCLS offset mirror system.
"""
__doc__ += basic_positioner_init
# position
readback = Cpt(EpicsSignalRO, ':RBV', auto_monitor=True, kind='hinted')
setpoint = Cpt(EpicsSignal, ':VAL', limits=True, kind='normal')
done = Cpt(EpicsSignalRO, ':DMOV', auto_monitor=True, kind='omitted')
motor_egu = Cpt(EpicsSignal, ':RBV.EGU', kind='omitted')
# status
interlock = Cpt(EpicsSignalRO, ':INTERLOCK', kind='omitted')
enabled = Cpt(EpicsSignalRO, ':ENABLED', kind='omitted')
# limit switches
low_limit_switch = Cpt(EpicsSignalRO, ":LLS", kind='omitted')
high_limit_switch = Cpt(EpicsSignalRO, ":HLS", kind='omitted')
@property
def egu(self):
"""
Engineering units of the readback PV, as reported by EPICS.
Returns
-------
egu: ``str``
"""
return self.motor_egu.get()
def check_value(self, position):
"""
Checks to make sure the inputted value is both valid and within the
soft limits of the motor.
Parameters
----------
position: ``float``
Position to check for validity
Raises
------
``ValueError``
If position is ``None``, ``NaN`` or ``Inf``
``LimitError``
If the position is outside the soft limits
"""
# Check that we do not have a NaN or an Inf as those will
# will make the PLC very unhappy ...
if position is None or np.isnan(position) or np.isinf(position):
raise ValueError("Invalid value inputted: '{0}'".format(position))
# Use the built-in PVPositioner check_value
super().check_value(position)
class Pitch(OMMotor):
"""
HOMS Pitch Mechanism
The axis is actually a piezo actuator and a stepper motor in series, and
this is reflected in the PV naming
"""
__doc__ += basic_positioner_init
piezo_volts = FCpt(EpicsSignalRO, "{self._piezo}:VRBV", kind='normal')
stop_signal = FCpt(EpicsSignal, "{self._piezo}:STOP", kind='omitted')
# TODO: Limits will be added soon, but not present yet
def __init__(self, prefix, **kwargs):
# Predict the prefix of all piezo pvs
self._piezo = prefix.replace('MIRR', 'PIEZO')
super().__init__(prefix, **kwargs)
class Gantry(OMMotor):
"""
Gantry Axis
The horizontal and vertical motion of the OffsetMirror are controlled by
two coupled stepper motors. Instructions are sent to both by simply
requesting a move on the primary so they are represented here as a single
motor with additional diagnostics and interlock
Parameters
----------
prefix : str
Base prefix for both stepper motors i.e XRT:M1H. Do not include the "P"
or "S" to indicate primary or secondary steppers
gantry_prefix : str, optional
Prefix for the shared gantry diagnostics if it is different than the
stepper motor prefix
"""
# Readbacks for gantry information
gantry_difference = FCpt(EpicsSignalRO, "{self.gantry_prefix}:GDIF",
kind='normal')
decoupled = FCpt(EpicsSignalRO, "{self.gantry_prefix}:DECOUPLE",
kind='config')
# Readbacks for the secondary motor
follower_readback = FCpt(EpicsSignalRO, "{self.follow_prefix}:RBV",
kind='normal')
follower_low_limit_switch = FCpt(EpicsSignalRO, "{self.follow_prefix}:LLS",
kind='omitted')
follower_high_limit_switch = FCpt(EpicsSignalRO,
"{self.follow_prefix}:HLS",
kind='omitted')
def __init__(self, prefix, *, gantry_prefix=None, **kwargs):
self.gantry_prefix = gantry_prefix or 'GANTRY:' + prefix
self.follow_prefix = prefix + ':S'
super().__init__(prefix + ':P', **kwargs)
def check_value(self, pos):
"""
Add additional check for the gantry coupling
This is not a safety measure, but instead just here largely
for bookkeeping and to give the operator further feedback on why the
requested move is not completed.
"""
# Check that the gantry is not decoupled
if self.decoupled.get():
raise PermissionError("The gantry is not currently coupled")
# Allow OMMotor to check the value
super().check_value(pos)
class OffsetMirror(Device):
"""
X-Ray offset mirror class.
This is for each individual mirror system used in the FEE
and XRT. Controls for the pitch, and primary gantry x and y motors are
included.
When controlling the pitch motor, if the piezo is set to 'PID' mode, then
the pitch mechanism is setup to first move the stepper as close to the
desired position, then the piezo will kick in to constantly try and correct
any positional changes.
Parameters
----------
prefix : str
The EPICS base PV of the pitch motor
prefix_xy : str
The EPICS base PV of the gantry x and y gantry motors
xgantry_prefix : str
The name of the horizontal gantry if not identical to the prefix
name : str
The name of the offset mirror
"""
# Pitch Motor
pitch = FCpt(Pitch, "MIRR:{self.prefix}", kind='hinted')
# Gantry motors
xgantry = FCpt(Gantry, "{self._prefix_xy}:X",
gantry_prefix="{self._xgantry}",
add_prefix=['suffix', 'gantry_prefix'],
kind='normal')
ygantry = FCpt(Gantry, "{self._prefix_xy}:Y",
gantry_prefix='GANTRY:{self.prefix}:Y',
add_prefix=['suffix', 'gantry_prefix'],
kind='config')
# Transmission for Lightpath Interface
transmission = 1.0
# QIcon for UX
_icon = 'fa.minus-square'
# Subscription types
SUB_STATE = 'state'
def __init__(self, prefix, *, prefix_xy=None,
xgantry_prefix=None, **kwargs):
# Handle prefix mangling
self._prefix_xy = prefix_xy or prefix
self._xgantry = xgantry_prefix or 'GANTRY:' + prefix + ':X'
super().__init__(prefix, **kwargs)
@property
def inserted(self):
"""
Treat OffsetMirror as always inserted
"""
return True
@property
def removed(self):
"""
Treat OffsetMirror as always inserted
"""
return False
class PointingMirror(InOutRecordPositioner, OffsetMirror):
"""
Retractable `OffsetMirror`
Both XRT M1H and XRT M2H can be completely removed from the beam depending
on the beam destination. In this case, the X gantry can be controlled via
the standard PCDS states record. This class has all the functionality of
`OffsetMirror` with the addition of the records that control the
overall state.
Parameters
----------
in_lines: ``list``, optional
List of beamlines that are delivered beam when the mirror is in
out_lines: ``list``, optional
List of beamlines thate are delivered beam when the mirror is out
"""
# Reverse state order as PointingMirror is non-standard
states_list = ['OUT', 'IN']
def __init__(self, prefix, *, out_lines=None, in_lines=None, **kwargs):
# Branching pattern
self.in_lines = in_lines or list()
self.out_lines = out_lines or list()
super().__init__(prefix, **kwargs)
@property
def destination(self):
"""
Current list of destinations the mirror currently supports
"""
# Inserted
if self.inserted and not self.removed:
return self.in_lines
# Removed
elif self.removed and not self.inserted:
return self.out_lines
# Unknown
else:
return []
@property
def branches(self):
"""
Return all possible beamlines for mirror destinations
"""
return self.in_lines + self.out_lines
def check_value(self, pos):
"""
Check that our gantry is coupled before state moves
"""
# Check the X gantry
if self.xgantry.decoupled.get():
raise PermissionError("Can not move the horizontal gantry is "
"uncoupled")
# Allow StatePositioner to check the state
return super().check_value(pos)
|
py | 7df985e6852d4ba80fc56bc53f3baa5dbd973df0 | import os
from os import path
from os import listdir
from os.path import join, isfile
import datetime
import re
import time
from enum import Enum
import subprocess
import signal
import numpy as np
from _thread import start_new_thread
from threading import Lock
import tailer
from system_hotkey import SystemHotkey
import pyautogui
from hslog import LogParser
from hslog.export import FriendlyPlayerExporter
from libs.hslog_exporters import LastTurnExporter
from random import randrange
from libs import screen
from libs.template import create_battlefield
class Position(Enum):
FATAL_ERROR = -1
UNKNOWN = 0
HUB = 1
ADVENTURE = 2
DRAFT = 3
TAVERN_BRAWL = 4
COLLECTIONMANAGER = 5
PACKOPENING = 5
GAMEPLAY = 6
ARENA = 7
TOURNAMENT = 8
class Game:
position = Position.UNKNOWN
path = None
regex_loading = re.compile(r"LoadingScreen.OnSceneUnloaded\(\) - prevMode=.* nextMode=(.*) ")
def __init__(self, log_path):
self.thread = None
self.path = log_path
self.collector = BaseCollector(self)
self.lock = Lock()
self.p = None
self.running = True
self.hk = SystemHotkey()
self.hk.register(('control', 'alt', 'z'), callback=self.delete_last_img)
self.images = []
def delete_last_img(self, evnt):
if self.images:
os.remove(self.images[-1])
os.remove(self.images[-1].replace(".png", ".xml"))
if self.p:
self.p.terminate()
self.p.wait()
self.images.pop(-1)
if self.images and self.images[-1]:
self.p = subprocess.Popen(["python3", "labelImg/labelImg.py", self.images[-1]])
def get_position(self, lines):
""" set self.position and returns true if the position is new"""
for line in reversed(lines):
match = self.regex_loading.search(line)
if(match and self.position != Position[match.group(1)]):
self.position = Position[match.group(1)]
return True ## new position true
return False ## not a new position
def set_collector(self):
if(self.position is Position.ARENA):
self.collector = ArenaCollector(self)
elif(self.position is Position.GAMEPLAY):
self.collector = BattlefieldCollector(self)
elif(self.position is Position.COLLECTIONMANAGER):
self.collector = CollectionCollertor(self)
else:
self.collector = BaseCollector(self)
def show_img(self, path):
self.images.append(path)
if(len(self.images) >= 5):
self.images.remove(self.images[0])
if(self.p):
self.p.terminate()
self.p.wait()
self.p = subprocess.Popen(["python3", "labelImg/labelImg.py", path])
def run(self):
## get current position
with open(path.join(self.path, "LoadingScreen.log")) as f:
if self.get_position(f.readlines()):
self.set_collector()
start_new_thread(self.run_collector, ())
# watch file
for line in tailer.follow(open(path.join(self.path, "LoadingScreen.log"))):
if self.get_position([line]):
self.lock.acquire()
self.set_collector()
self.lock.release()
def terminate(self, signal, frame):
self.lock.acquire()
self.running = False
self.p.terminate()
self.p.wait()
self.lock.release()
if self.p:
self.p.terminate()
self.p.wait()
exit()
def run_collector(self):
while(self.running):
self.lock.acquire()
self.collector.run()
self.lock.release()
class BaseCollector:
def __init__(self, game):
self.game = game
def run(self):
pass
class CollectionCollertor(BaseCollector):
def __init__(self, game):
super().__init__(game)
game.hk.register(('control', 'alt', 'g'), callback=self.start)
game.hk.register(('control', 'alt', 's'), callback=self.stop)
print("CollectionCollertor: Start process with ctrl+alt+g")
print("CollectionCollertor: Stop process with ctrl+alt+s")
self.last_img = None
self.idx = 0
self.running = False
def start(self, evnt):
self.idx = 0
print("Start CollectionCollertor ...")
self.running = True
def stop(self, evnt):
print("Stop CollectionCollertor ...")
self.running = False
def run(self):
if(self.running):
img = screen.shot()
if(self.last_img and screen.simple_compare(np.array(img), np.array(self.last_img))):
self.running = False
print("Finish!")
else:
self.last_img = img
screen.save(img, "images/collection_{}.png".format(self.idx))
self.idx += 1
pyautogui.click(1223, 521)
time.sleep(1.2)
## build xml
## click next
class ArenaCollector(BaseCollector):
pass
class BattlefieldCollector(BaseCollector):
def __init__(self, game):
super().__init__(game)
self.path = path.join(game.path, "Power.log")
print(game.path)
self.last_ts = datetime.time(0)
self.last_turn = None
self.parser = LogParser()
def run(self):
with open(self.path) as f:
self.parser.read(f)
self.parser.flush()
tps = self.parser.games
fp_id = FriendlyPlayerExporter(tps[-1]).export()
turn = LastTurnExporter(tps[-1], self.last_ts, fp_id).export()
if not self.last_turn:
self.last_turn = turn
if(turn.ts > self.last_turn.ts and turn.player):
## GET MINIONS / HAND_CARDS / HERO_POWERS
time.sleep(1.2)
img = screen.shot()
print("EMinions {} - PMinions {} - HandCards {}".format(turn.enemy_minions, turn.player_minions, turn.hand_cards))
base_name = "em{}_pm{}_hc{}".format(turn.enemy_minions, turn.player_minions, turn.hand_cards)
count = len([f for f in listdir("images") if isfile(join("images", f)) and base_name in f and ".png" in f])
if(count > 2):
count = randrange(0, 2)
base_name = "images/{}_{}".format(count, base_name)
img_name = base_name + ".png"
xml_name = base_name + ".xml"
temp = create_battlefield(img, turn.hand_cards, turn.enemy_minions, turn.player_minions, turn.enemy_power, turn.player_power)
temp.save(xml_name)
screen.save(img, img_name)
self.game.show_img(img_name)
self.last_turn = turn
game = Game("/home/dee/.PlayOnLinux/wineprefix/hs/drive_c/Program Files/Hearthstone/Logs/")
# signal.signal(signal.SIGINT, game.terminate)
game.run()
|
py | 7df9864fc41dc85403092cc57dc7c097a33b716a | m = float(input("Digite um valor em metros para ser convertido em jardas: "))
j=(m/0.91)
print("O quantidade de metros({:.2f}) para jardas รฉ: {:.2f}".format(m,j)) |
py | 7df9876b29fa8105d00a70d3bc2d9a7506357c0b | from django.http import JsonResponse
from ..models.Index_model import model_list
def handle(request):
"""[Get models list]
Returns:
[JsonResponse]: [Json which contains the list of inference models ]
"""
method = request.method
if(method == 'GET'):
list = model_list
print(list)
return JsonResponse(list) |
py | 7df988f18dad60c430e458d81bd4b130f4fd7dfa | class AuthenticationError(Exception):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return 'AuthenticationError : {0} '.format(self.message)
else:
return ("AuthenticationError - Username or Password is incorrect. "
"To try again execute "
"FitnessFirstSG.auth(username, password)")
class MissingCredentialsError(Exception):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return 'MissingCredentialsError : {0} '.format(self.message)
else:
return ("MissingCredentialsError - "
"Please input username and password."
"To try again execute "
"FitnessFirstSG.auth(username, password)")
class RequestsError(Exception):
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return ('RequestsError : Request returned with status code {0} '
).format(self.message)
else:
return ('RequestsError - '
'Requests returned with status code not equal to 200')
|
py | 7df98914d6b58e802690c16d5d1a2fc8b7d30619 | # Within-snp gene changes
import matplotlib
matplotlib.use('Agg')
import config
###
#
# For today while the new data processes
#
import os
#parse_midas_data.data_directory = os.path.expanduser("~/ben_nandita_hmp_data_062517/")
#########################################
import pylab
import sys
import numpy
from utils import diversity_utils, gene_diversity_utils, stats_utils
from parsers import parse_HMP_data, parse_midas_data
import matplotlib.colors as colors
import matplotlib.cm as cmx
from math import log10,ceil
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from numpy.random import randint, choice
mpl.rcParams['font.size'] = 5
mpl.rcParams['lines.linewidth'] = 0.5
mpl.rcParams['legend.frameon'] = False
mpl.rcParams['legend.fontsize'] = 'small'
species_name = "Bacteroides_vulgatus_57955"
################################################################################
#
# Standard header to read in argument information
#
################################################################################
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--debug", help="Loads only a subset of SNPs for speed", action="store_true")
parser.add_argument("--chunk-size", type=int, help="max number of records to load", default=1000000000)
parser.add_argument('--other-species', type=str, help='Run the script for a different species')
args = parser.parse_args()
debug = args.debug
chunk_size = args.chunk_size
other_species = args.other_species
if other_species:
species_name = other_species
other_species_str = "_%s" % species_name
else:
other_species_str = ""
debug=True
################################################################################
min_coverage = config.min_median_coverage
alpha = 0.5 # Confidence interval range for rate estimates
low_pi_threshold = 1e-03
clade_divergence_threshold = 1e-02
modification_divergence_threshold = 1e-03
min_change = 0.8
include_high_copynum = False
#include_high_copynum = True
# Load subject and sample metadata
sys.stderr.write("Loading sample metadata...\n")
subject_sample_map = parse_HMP_data.parse_subject_sample_map()
sample_country_map = parse_HMP_data.parse_sample_country_map()
sample_order_map = parse_HMP_data.parse_sample_order_map()
sys.stderr.write("Done!\n")
# Only plot samples above a certain depth threshold that are "haploids"
snp_samples = diversity_utils.calculate_haploid_samples(species_name, debug=debug)
# Only use the subset from North America
# The only temporal samples are from here, best not contaminate the between-subject
# comparisons with out of sample effects
#snp_samples = snp_samples[parse_HMP_data.calculate_country_samples(sample_country_map, sample_list=snp_samples, allowed_countries=set(["United States"]))]
####################################################
#
# Set up Figure (4 panels, arranged in 2x2 grid)
#
####################################################
pylab.figure(1,figsize=(3.42,2))
# Load SNP information for species_name
sys.stderr.write("Loading SNPs for %s...\n" % species_name)
sys.stderr.write("(not just core genes...)\n")
pi_matrix_syn = numpy.array([])
avg_pi_matrix_syn = numpy.array([])
snp_difference_matrix = numpy.array([])
snp_difference_matrix_mutation = numpy.array([])
snp_difference_matrix_reversion = numpy.array([])
snp_opportunity_matrix = numpy.array([])
final_line_number = 0
while final_line_number >= 0:
sys.stderr.write("Loading chunk starting @ %d...\n" % final_line_number)
dummy_samples, allele_counts_map, passed_sites_map, final_line_number = parse_midas_data.parse_snps(species_name, debug=debug, allowed_samples=snp_samples, chunk_size=chunk_size, initial_line_number=final_line_number)
sys.stderr.write("Done! Loaded %d genes\n" % len(allele_counts_map.keys()))
print len(dummy_samples), "dummy samples!"
# Calculate fixation matrix
sys.stderr.write("Calculating matrix of snp differences...\n")
chunk_snp_difference_matrix_mutation, chunk_snp_difference_matrix_reversion, chunk_snp_opportunity_matrix = diversity_utils.calculate_fixation_matrix_mutation_reversion(allele_counts_map, passed_sites_map, min_change=min_change) #
sys.stderr.write("Done!\n")
if snp_difference_matrix.shape[0]==0:
snp_difference_matrix = numpy.zeros_like(chunk_snp_difference_matrix_mutation)*1.0
snp_difference_matrix_mutation = numpy.zeros_like(snp_difference_matrix)*1.0
snp_difference_matrix_reversion = numpy.zeros_like(snp_difference_matrix)*1.0
snp_opportunity_matrix = numpy.zeros_like(snp_difference_matrix)*1.0
snp_difference_matrix += chunk_snp_difference_matrix_mutation+chunk_snp_difference_matrix_reversion
snp_difference_matrix_mutation += chunk_snp_difference_matrix_mutation
snp_difference_matrix_reversion += chunk_snp_difference_matrix_reversion
snp_opportunity_matrix += chunk_snp_opportunity_matrix
snp_samples = dummy_samples
snp_substitution_rate = snp_difference_matrix*1.0/(snp_opportunity_matrix+(snp_opportunity_matrix==0))
sys.stderr.write("Done!\n")
# Load gene coverage information for species_name
sys.stderr.write("Loading pangenome data for %s...\n" % species_name)
gene_samples, gene_names, gene_presence_matrix, gene_depth_matrix, marker_coverages, gene_reads_matrix = parse_midas_data.parse_pangenome_data(species_name, allowed_samples=snp_samples)
sys.stderr.write("Done!\n")
sys.stderr.write("Loaded gene info for %d samples\n" % len(gene_samples))
gene_copynum_matrix = gene_depth_matrix*1.0/(marker_coverages+(marker_coverages==0))
prevalence_idxs = (parse_midas_data.calculate_unique_samples(subject_sample_map, gene_samples)) * (marker_coverages >= min_coverage)
prevalences = gene_diversity_utils.calculate_fractional_gene_prevalences(gene_depth_matrix[:, prevalence_idxs], marker_coverages[prevalence_idxs])
pangenome_prevalences = numpy.array(prevalences,copy=True)
pangenome_prevalences.sort()
# Now need to make the gene samples and snp samples match up
desired_samples = gene_samples
num_haploids = len(desired_samples)
# Calculate which pairs of idxs belong to the same sample, which to the same subject
# and which to different subjects
#desired_same_sample_idxs, desired_same_subject_idxs, desired_diff_subject_idxs = parse_midas_data.calculate_subject_pairs(subject_sample_map, desired_samples)
# Calculate which pairs of idxs belong to the same sample, which to the same subject
# and which to different subjects
desired_same_sample_idxs, desired_same_subject_idxs, desired_diff_subject_idxs = parse_midas_data.calculate_ordered_subject_pairs(sample_order_map, desired_samples)
sys.stderr.write("%d temporal samples\n" % len(desired_same_subject_idxs[0]))
snp_sample_idx_map = parse_midas_data.calculate_sample_idx_map(desired_samples, snp_samples)
gene_sample_idx_map = parse_midas_data.calculate_sample_idx_map(desired_samples, gene_samples)
same_sample_snp_idxs = parse_midas_data.apply_sample_index_map_to_indices(snp_sample_idx_map, desired_same_sample_idxs)
same_sample_gene_idxs = parse_midas_data.apply_sample_index_map_to_indices(gene_sample_idx_map, desired_same_sample_idxs)
same_subject_snp_idxs = parse_midas_data.apply_sample_index_map_to_indices(snp_sample_idx_map, desired_same_subject_idxs)
same_subject_gene_idxs = parse_midas_data.apply_sample_index_map_to_indices(gene_sample_idx_map, desired_same_subject_idxs)
diff_subject_snp_idxs = parse_midas_data.apply_sample_index_map_to_indices(snp_sample_idx_map, desired_diff_subject_idxs)
diff_subject_gene_idxs = parse_midas_data.apply_sample_index_map_to_indices(gene_sample_idx_map, desired_diff_subject_idxs)
absent_thresholds = numpy.array([0, 0.01,0.05,0.1,0.25,0.49])
fold_changes = numpy.array([2,3,4,5,6,7,8,9,10,15,20])
total_changes = []
for absent_threshold in absent_thresholds:
# Calculate matrix of number of genes that differ
sys.stderr.write("Calculating matrix of gene differences for threshold %g...\n" % absent_threshold)
gene_gain_matrix, gene_loss_matrix, gene_opportunity_matrix = gene_diversity_utils.calculate_coverage_based_gene_hamming_matrix_gain_loss(gene_depth_matrix, marker_coverages, absent_threshold=absent_threshold)
gene_difference_matrix = gene_gain_matrix + gene_loss_matrix
changes = 0
for sample_pair_idx in xrange(0,len(same_subject_snp_idxs[0])):
snp_i = same_subject_snp_idxs[0][sample_pair_idx]
snp_j = same_subject_snp_idxs[1][sample_pair_idx]
i = same_subject_gene_idxs[0][sample_pair_idx]
j = same_subject_gene_idxs[1][sample_pair_idx]
if marker_coverages[i]<min_coverage or marker_coverages[j]<min_coverage:
# can't look at gene changes!
continue
else:
if snp_substitution_rate[snp_i,snp_j] < modification_divergence_threshold:
changes += gene_difference_matrix[i,j]
else:
continue
total_changes.append(changes)
total_changes = numpy.array(total_changes)
total_changes = numpy.clip(total_changes, 3e-01,1e09)
pylab.semilogy(absent_thresholds, numpy.ones_like(absent_thresholds)*1,'k:')
pylab.semilogy(absent_thresholds, total_changes,'b.-')
pylab.xlabel('Copynum threshold for absence')
pylab.ylabel('Total number of gene modifications')
sys.stderr.write("Saving figure...\t")
pylab.savefig('%s/supplemental_gene_foldchange%s.pdf' % (parse_midas_data.analysis_directory, other_species_str), bbox_inches='tight')
pylab.savefig('%s/supplemental_gene_foldchange%s.png' % (parse_midas_data.analysis_directory, other_species_str), bbox_inches='tight', dpi=300)
sys.stderr.write("Done!\n")
|
py | 7df98a39c6e5c3c659ce986e2548eb8130518839 | #!/usr/bin/env python
# coding: utf-8
# In[19]:
get_ipython().run_line_magic('matplotlib', 'inline')
get_ipython().run_line_magic('config', "InlineBackend.figure_format = 'retina'")
import matplotlib.pyplot as plt
# In[20]:
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression, Lasso
from sklearn.model_selection import train_test_split
# In[21]:
n = 100
np.random.seed(0)
X = np.random.randn( n, 1)
X.shape
# In[22]:
eps = np.random.normal(0.0, 0.1, n)
# In[23]:
y = np.sin(X[:,0]) + eps
plt.plot(X, y, '.');
# In[24]:
model = LinearRegression()
# In[25]:
model.fit(X, y)
# In[26]:
y_pred = model.predict(X)
# In[27]:
plt.plot(X, y_pred, 'r-', X, y, 'bo');
# In[28]:
n = 100
dim = 10
np.random.seed(0)
X = np.random.randn(n, dim)
X.shape
# In[29]:
eps = np.random.normal(0.0, 0.1, n)
y = np.sin(X[:,0]) + eps
y.shape
# In[30]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=42)
# In[31]:
X_train.shape, y_train.shape
# In[32]:
model = Lasso(alpha=0.2)
model.fit(X_train, y_train)
# In[33]:
y_pred = model.predict(X_test)
y_pred.shape
# In[34]:
plt.scatter(X_test[:,0], y_test, color='b')
plt.plot(X_test[:,0], y_pred, color='r', linewidth=2)
|
py | 7df98aec10ccae086fb2c2ce263d05777205daa4 | # -*- encoding: utf-8 -*-
"""
Copyright (c) 2019 - present GlastHeim.pe
"""
from django.urls import path, re_path
from apps.control import views
urlpatterns = [
path('api/ecoact/search/', views.EconomicActivitieList.as_view(), name='ecoact.search'),
] |
py | 7df98b7946a93bafb21e7f2e8a9e8584f493d630 | # -*- coding: utf-8 -*-
from sympy import (
Add, And, Basic, Derivative, Dict, Eq, Equivalent, FF,
FiniteSet, Function, Ge, Gt, I, Implies, Integral, SingularityFunction,
Lambda, Le, Limit, Lt, Matrix, Mul, Nand, Ne, Nor, Not, O, Or,
Pow, Product, QQ, RR, Rational, Ray, rootof, RootSum, S,
Segment, Subs, Sum, Symbol, Tuple, Trace, Xor, ZZ, conjugate,
groebner, oo, pi, symbols, ilex, grlex, Range, Contains,
SeqPer, SeqFormula, SeqAdd, SeqMul, fourier_series, fps, ITE,
Complement, Interval, Intersection, Union, EulerGamma, GoldenRatio,
LambertW, airyai, airybi, airyaiprime, airybiprime, fresnelc, fresnels,
Heaviside, dirichlet_eta, diag, MatrixSlice)
from sympy.codegen.ast import (Assignment, AddAugmentedAssignment,
SubAugmentedAssignment, MulAugmentedAssignment, DivAugmentedAssignment, ModAugmentedAssignment)
from sympy.core.compatibility import u_decode as u
from sympy.core.expr import UnevaluatedExpr
from sympy.core.trace import Tr
from sympy.functions import (Abs, Chi, Ci, Ei, KroneckerDelta,
Piecewise, Shi, Si, atan2, beta, binomial, catalan, ceiling, cos,
euler, exp, expint, factorial, factorial2, floor, gamma, hyper, log,
meijerg, sin, sqrt, subfactorial, tan, uppergamma, lerchphi,
elliptic_k, elliptic_f, elliptic_e, elliptic_pi, DiracDelta, bell,
bernoulli, fibonacci, tribonacci, lucas, stieltjes, mathieuc, mathieus,
mathieusprime, mathieucprime)
from sympy.matrices import Adjoint, Inverse, MatrixSymbol, Transpose, KroneckerProduct
from sympy.matrices.expressions import hadamard_power
from sympy.physics import mechanics
from sympy.physics.control.lti import TransferFunction, Series, Parallel, Feedback
from sympy.physics.units import joule, degree
from sympy.printing.pretty import pprint, pretty as xpretty
from sympy.printing.pretty.pretty_symbology import center_accent, is_combining
from sympy import ConditionSet
from sympy.sets import ImageSet, ProductSet
from sympy.sets.setexpr import SetExpr
from sympy.tensor.array import (ImmutableDenseNDimArray, ImmutableSparseNDimArray,
MutableDenseNDimArray, MutableSparseNDimArray, tensorproduct)
from sympy.tensor.functions import TensorProduct
from sympy.tensor.tensor import (TensorIndexType, tensor_indices, TensorHead,
TensorElement, tensor_heads)
from sympy.testing.pytest import raises
from sympy.vector import CoordSys3D, Gradient, Curl, Divergence, Dot, Cross, Laplacian
import sympy as sym
class lowergamma(sym.lowergamma):
pass # testing notation inheritance by a subclass with same name
a, b, c, d, x, y, z, k, n, s, p = symbols('a,b,c,d,x,y,z,k,n,s,p')
f = Function("f")
th = Symbol('theta')
ph = Symbol('phi')
"""
Expressions whose pretty-printing is tested here:
(A '#' to the right of an expression indicates that its various acceptable
orderings are accounted for by the tests.)
BASIC EXPRESSIONS:
oo
(x**2)
1/x
y*x**-2
x**Rational(-5,2)
(-2)**x
Pow(3, 1, evaluate=False)
(x**2 + x + 1) #
1-x #
1-2*x #
x/y
-x/y
(x+2)/y #
(1+x)*y #3
-5*x/(x+10) # correct placement of negative sign
1 - Rational(3,2)*(x+1)
-(-x + 5)*(-x - 2*sqrt(2) + 5) - (-y + 5)*(-y + 5) # issue 5524
ORDERING:
x**2 + x + 1
1 - x
1 - 2*x
2*x**4 + y**2 - x**2 + y**3
RELATIONAL:
Eq(x, y)
Lt(x, y)
Gt(x, y)
Le(x, y)
Ge(x, y)
Ne(x/(y+1), y**2) #
RATIONAL NUMBERS:
y*x**-2
y**Rational(3,2) * x**Rational(-5,2)
sin(x)**3/tan(x)**2
FUNCTIONS (ABS, CONJ, EXP, FUNCTION BRACES, FACTORIAL, FLOOR, CEILING):
(2*x + exp(x)) #
Abs(x)
Abs(x/(x**2+1)) #
Abs(1 / (y - Abs(x)))
factorial(n)
factorial(2*n)
subfactorial(n)
subfactorial(2*n)
factorial(factorial(factorial(n)))
factorial(n+1) #
conjugate(x)
conjugate(f(x+1)) #
f(x)
f(x, y)
f(x/(y+1), y) #
f(x**x**x**x**x**x)
sin(x)**2
conjugate(a+b*I)
conjugate(exp(a+b*I))
conjugate( f(1 + conjugate(f(x))) ) #
f(x/(y+1), y) # denom of first arg
floor(1 / (y - floor(x)))
ceiling(1 / (y - ceiling(x)))
SQRT:
sqrt(2)
2**Rational(1,3)
2**Rational(1,1000)
sqrt(x**2 + 1)
(1 + sqrt(5))**Rational(1,3)
2**(1/x)
sqrt(2+pi)
(2+(1+x**2)/(2+x))**Rational(1,4)+(1+x**Rational(1,1000))/sqrt(3+x**2)
DERIVATIVES:
Derivative(log(x), x, evaluate=False)
Derivative(log(x), x, evaluate=False) + x #
Derivative(log(x) + x**2, x, y, evaluate=False)
Derivative(2*x*y, y, x, evaluate=False) + x**2 #
beta(alpha).diff(alpha)
INTEGRALS:
Integral(log(x), x)
Integral(x**2, x)
Integral((sin(x))**2 / (tan(x))**2)
Integral(x**(2**x), x)
Integral(x**2, (x,1,2))
Integral(x**2, (x,Rational(1,2),10))
Integral(x**2*y**2, x,y)
Integral(x**2, (x, None, 1))
Integral(x**2, (x, 1, None))
Integral(sin(th)/cos(ph), (th,0,pi), (ph, 0, 2*pi))
MATRICES:
Matrix([[x**2+1, 1], [y, x+y]]) #
Matrix([[x/y, y, th], [0, exp(I*k*ph), 1]])
PIECEWISE:
Piecewise((x,x<1),(x**2,True))
ITE:
ITE(x, y, z)
SEQUENCES (TUPLES, LISTS, DICTIONARIES):
()
[]
{}
(1/x,)
[x**2, 1/x, x, y, sin(th)**2/cos(ph)**2]
(x**2, 1/x, x, y, sin(th)**2/cos(ph)**2)
{x: sin(x)}
{1/x: 1/y, x: sin(x)**2} #
[x**2]
(x**2,)
{x**2: 1}
LIMITS:
Limit(x, x, oo)
Limit(x**2, x, 0)
Limit(1/x, x, 0)
Limit(sin(x)/x, x, 0)
UNITS:
joule => kg*m**2/s
SUBS:
Subs(f(x), x, ph**2)
Subs(f(x).diff(x), x, 0)
Subs(f(x).diff(x)/y, (x, y), (0, Rational(1, 2)))
ORDER:
O(1)
O(1/x)
O(x**2 + y**2)
"""
def pretty(expr, order=None):
"""ASCII pretty-printing"""
return xpretty(expr, order=order, use_unicode=False, wrap_line=False)
def upretty(expr, order=None):
"""Unicode pretty-printing"""
return xpretty(expr, order=order, use_unicode=True, wrap_line=False)
def test_pretty_ascii_str():
assert pretty( 'xxx' ) == 'xxx'
assert pretty( "xxx" ) == 'xxx'
assert pretty( 'xxx\'xxx' ) == 'xxx\'xxx'
assert pretty( 'xxx"xxx' ) == 'xxx\"xxx'
assert pretty( 'xxx\"xxx' ) == 'xxx\"xxx'
assert pretty( "xxx'xxx" ) == 'xxx\'xxx'
assert pretty( "xxx\'xxx" ) == 'xxx\'xxx'
assert pretty( "xxx\"xxx" ) == 'xxx\"xxx'
assert pretty( "xxx\"xxx\'xxx" ) == 'xxx"xxx\'xxx'
assert pretty( "xxx\nxxx" ) == 'xxx\nxxx'
def test_pretty_unicode_str():
assert pretty( u'xxx' ) == u'xxx'
assert pretty( u'xxx' ) == u'xxx'
assert pretty( u'xxx\'xxx' ) == u'xxx\'xxx'
assert pretty( u'xxx"xxx' ) == u'xxx\"xxx'
assert pretty( u'xxx\"xxx' ) == u'xxx\"xxx'
assert pretty( u"xxx'xxx" ) == u'xxx\'xxx'
assert pretty( u"xxx\'xxx" ) == u'xxx\'xxx'
assert pretty( u"xxx\"xxx" ) == u'xxx\"xxx'
assert pretty( u"xxx\"xxx\'xxx" ) == u'xxx"xxx\'xxx'
assert pretty( u"xxx\nxxx" ) == u'xxx\nxxx'
def test_upretty_greek():
assert upretty( oo ) == u'โ'
assert upretty( Symbol('alpha^+_1') ) == u'ฮฑโบโ'
assert upretty( Symbol('beta') ) == u'ฮฒ'
assert upretty(Symbol('lambda')) == u'ฮป'
def test_upretty_multiindex():
assert upretty( Symbol('beta12') ) == u'ฮฒโโ'
assert upretty( Symbol('Y00') ) == u'Yโโ'
assert upretty( Symbol('Y_00') ) == u'Yโโ'
assert upretty( Symbol('F^+-') ) == u'Fโบโป'
def test_upretty_sub_super():
assert upretty( Symbol('beta_1_2') ) == u'ฮฒโ โ'
assert upretty( Symbol('beta^1^2') ) == u'ฮฒยน ยฒ'
assert upretty( Symbol('beta_1^2') ) == u'ฮฒยฒโ'
assert upretty( Symbol('beta_10_20') ) == u'ฮฒโโ โโ'
assert upretty( Symbol('beta_ax_gamma^i') ) == u'ฮฒโฑโโ แตง'
assert upretty( Symbol("F^1^2_3_4") ) == u'Fยน ยฒโ โ'
assert upretty( Symbol("F_1_2^3^4") ) == u'Fยณ โดโ โ'
assert upretty( Symbol("F_1_2_3_4") ) == u'Fโ โ โ โ'
assert upretty( Symbol("F^1^2^3^4") ) == u'Fยน ยฒ ยณ โด'
def test_upretty_subs_missing_in_24():
assert upretty( Symbol('F_beta') ) == u'Fแตฆ'
assert upretty( Symbol('F_gamma') ) == u'Fแตง'
assert upretty( Symbol('F_rho') ) == u'Fแตจ'
assert upretty( Symbol('F_phi') ) == u'Fแตฉ'
assert upretty( Symbol('F_chi') ) == u'Fแตช'
assert upretty( Symbol('F_a') ) == u'Fโ'
assert upretty( Symbol('F_e') ) == u'Fโ'
assert upretty( Symbol('F_i') ) == u'Fแตข'
assert upretty( Symbol('F_o') ) == u'Fโ'
assert upretty( Symbol('F_u') ) == u'Fแตค'
assert upretty( Symbol('F_r') ) == u'Fแตฃ'
assert upretty( Symbol('F_v') ) == u'Fแตฅ'
assert upretty( Symbol('F_x') ) == u'Fโ'
def test_missing_in_2X_issue_9047():
assert upretty( Symbol('F_h') ) == u'Fโ'
assert upretty( Symbol('F_k') ) == u'Fโ'
assert upretty( Symbol('F_l') ) == u'Fโ'
assert upretty( Symbol('F_m') ) == u'Fโ'
assert upretty( Symbol('F_n') ) == u'Fโ'
assert upretty( Symbol('F_p') ) == u'Fโ'
assert upretty( Symbol('F_s') ) == u'Fโ'
assert upretty( Symbol('F_t') ) == u'Fโ'
def test_upretty_modifiers():
# Accents
assert upretty( Symbol('Fmathring') ) == u'Fฬ'
assert upretty( Symbol('Fddddot') ) == u'Fโ'
assert upretty( Symbol('Fdddot') ) == u'Fโ'
assert upretty( Symbol('Fddot') ) == u'Fฬ'
assert upretty( Symbol('Fdot') ) == u'Fฬ'
assert upretty( Symbol('Fcheck') ) == u'Fฬ'
assert upretty( Symbol('Fbreve') ) == u'Fฬ'
assert upretty( Symbol('Facute') ) == u'Fฬ'
assert upretty( Symbol('Fgrave') ) == u'Fฬ'
assert upretty( Symbol('Ftilde') ) == u'Fฬ'
assert upretty( Symbol('Fhat') ) == u'Fฬ'
assert upretty( Symbol('Fbar') ) == u'Fฬ
'
assert upretty( Symbol('Fvec') ) == u'Fโ'
assert upretty( Symbol('Fprime') ) == u'Fโฒ'
assert upretty( Symbol('Fprm') ) == u'Fโฒ'
# No faces are actually implemented, but test to make sure the modifiers are stripped
assert upretty( Symbol('Fbold') ) == u'Fbold'
assert upretty( Symbol('Fbm') ) == u'Fbm'
assert upretty( Symbol('Fcal') ) == u'Fcal'
assert upretty( Symbol('Fscr') ) == u'Fscr'
assert upretty( Symbol('Ffrak') ) == u'Ffrak'
# Brackets
assert upretty( Symbol('Fnorm') ) == u'โFโ'
assert upretty( Symbol('Favg') ) == u'โจFโฉ'
assert upretty( Symbol('Fabs') ) == u'|F|'
assert upretty( Symbol('Fmag') ) == u'|F|'
# Combinations
assert upretty( Symbol('xvecdot') ) == u'xโฬ'
assert upretty( Symbol('xDotVec') ) == u'xฬโ'
assert upretty( Symbol('xHATNorm') ) == u'โxฬโ'
assert upretty( Symbol('xMathring_yCheckPRM__zbreveAbs') ) == u'xฬ_yฬโฒ__|zฬ|'
assert upretty( Symbol('alphadothat_nVECDOT__tTildePrime') ) == u'ฮฑฬฬ_nโฬ__tฬโฒ'
assert upretty( Symbol('x_dot') ) == u'x_dot'
assert upretty( Symbol('x__dot') ) == u'x__dot'
def test_pretty_Cycle():
from sympy.combinatorics.permutations import Cycle
assert pretty(Cycle(1, 2)) == '(1 2)'
assert pretty(Cycle(2)) == '(2)'
assert pretty(Cycle(1, 3)(4, 5)) == '(1 3)(4 5)'
assert pretty(Cycle()) == '()'
def test_pretty_Permutation():
from sympy.combinatorics.permutations import Permutation
p1 = Permutation(1, 2)(3, 4)
assert xpretty(p1, perm_cyclic=True, use_unicode=True) == "(1 2)(3 4)"
assert xpretty(p1, perm_cyclic=True, use_unicode=False) == "(1 2)(3 4)"
assert xpretty(p1, perm_cyclic=False, use_unicode=True) == \
u'โ0 1 2 3 4โ\n'\
u'โ0 2 1 4 3โ '
assert xpretty(p1, perm_cyclic=False, use_unicode=False) == \
"/0 1 2 3 4\\\n"\
"\\0 2 1 4 3/"
def test_pretty_basic():
assert pretty( -Rational(1)/2 ) == '-1/2'
assert pretty( -Rational(13)/22 ) == \
"""\
-13 \n\
----\n\
22 \
"""
expr = oo
ascii_str = \
"""\
oo\
"""
ucode_str = \
u("""\
โ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (x**2)
ascii_str = \
"""\
2\n\
x \
"""
ucode_str = \
u("""\
2\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 1/x
ascii_str = \
"""\
1\n\
-\n\
x\
"""
ucode_str = \
u("""\
1\n\
โ\n\
x\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
# not the same as 1/x
expr = x**-1.0
ascii_str = \
"""\
-1.0\n\
x \
"""
ucode_str = \
("""\
-1.0\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
# see issue #2860
expr = Pow(S(2), -1.0, evaluate=False)
ascii_str = \
"""\
-1.0\n\
2 \
"""
ucode_str = \
("""\
-1.0\n\
2 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = y*x**-2
ascii_str = \
"""\
y \n\
--\n\
2\n\
x \
"""
ucode_str = \
u("""\
y \n\
โโ\n\
2\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
#see issue #14033
expr = x**Rational(1, 3)
ascii_str = \
"""\
1/3\n\
x \
"""
ucode_str = \
u("""\
1/3\n\
x \
""")
assert xpretty(expr, use_unicode=False, wrap_line=False,\
root_notation = False) == ascii_str
assert xpretty(expr, use_unicode=True, wrap_line=False,\
root_notation = False) == ucode_str
expr = x**Rational(-5, 2)
ascii_str = \
"""\
1 \n\
----\n\
5/2\n\
x \
"""
ucode_str = \
u("""\
1 \n\
โโโโ\n\
5/2\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (-2)**x
ascii_str = \
"""\
x\n\
(-2) \
"""
ucode_str = \
u("""\
x\n\
(-2) \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
# See issue 4923
expr = Pow(3, 1, evaluate=False)
ascii_str = \
"""\
1\n\
3 \
"""
ucode_str = \
u("""\
1\n\
3 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (x**2 + x + 1)
ascii_str_1 = \
"""\
2\n\
1 + x + x \
"""
ascii_str_2 = \
"""\
2 \n\
x + x + 1\
"""
ascii_str_3 = \
"""\
2 \n\
x + 1 + x\
"""
ucode_str_1 = \
u("""\
2\n\
1 + x + x \
""")
ucode_str_2 = \
u("""\
2 \n\
x + x + 1\
""")
ucode_str_3 = \
u("""\
2 \n\
x + 1 + x\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2, ascii_str_3]
assert upretty(expr) in [ucode_str_1, ucode_str_2, ucode_str_3]
expr = 1 - x
ascii_str_1 = \
"""\
1 - x\
"""
ascii_str_2 = \
"""\
-x + 1\
"""
ucode_str_1 = \
u("""\
1 - x\
""")
ucode_str_2 = \
u("""\
-x + 1\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = 1 - 2*x
ascii_str_1 = \
"""\
1 - 2*x\
"""
ascii_str_2 = \
"""\
-2*x + 1\
"""
ucode_str_1 = \
u("""\
1 - 2โ
x\
""")
ucode_str_2 = \
u("""\
-2โ
x + 1\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = x/y
ascii_str = \
"""\
x\n\
-\n\
y\
"""
ucode_str = \
u("""\
x\n\
โ\n\
y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -x/y
ascii_str = \
"""\
-x \n\
---\n\
y \
"""
ucode_str = \
u("""\
-x \n\
โโโ\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (x + 2)/y
ascii_str_1 = \
"""\
2 + x\n\
-----\n\
y \
"""
ascii_str_2 = \
"""\
x + 2\n\
-----\n\
y \
"""
ucode_str_1 = \
u("""\
2 + x\n\
โโโโโ\n\
y \
""")
ucode_str_2 = \
u("""\
x + 2\n\
โโโโโ\n\
y \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = (1 + x)*y
ascii_str_1 = \
"""\
y*(1 + x)\
"""
ascii_str_2 = \
"""\
(1 + x)*y\
"""
ascii_str_3 = \
"""\
y*(x + 1)\
"""
ucode_str_1 = \
u("""\
yโ
(1 + x)\
""")
ucode_str_2 = \
u("""\
(1 + x)โ
y\
""")
ucode_str_3 = \
u("""\
yโ
(x + 1)\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2, ascii_str_3]
assert upretty(expr) in [ucode_str_1, ucode_str_2, ucode_str_3]
# Test for correct placement of the negative sign
expr = -5*x/(x + 10)
ascii_str_1 = \
"""\
-5*x \n\
------\n\
10 + x\
"""
ascii_str_2 = \
"""\
-5*x \n\
------\n\
x + 10\
"""
ucode_str_1 = \
u("""\
-5โ
x \n\
โโโโโโ\n\
10 + x\
""")
ucode_str_2 = \
u("""\
-5โ
x \n\
โโโโโโ\n\
x + 10\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = -S.Half - 3*x
ascii_str = \
"""\
-3*x - 1/2\
"""
ucode_str = \
u("""\
-3โ
x - 1/2\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = S.Half - 3*x
ascii_str = \
"""\
1/2 - 3*x\
"""
ucode_str = \
u("""\
1/2 - 3โ
x\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -S.Half - 3*x/2
ascii_str = \
"""\
3*x 1\n\
- --- - -\n\
2 2\
"""
ucode_str = \
u("""\
3โ
x 1\n\
- โโโ - โ\n\
2 2\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = S.Half - 3*x/2
ascii_str = \
"""\
1 3*x\n\
- - ---\n\
2 2 \
"""
ucode_str = \
u("""\
1 3โ
x\n\
โ - โโโ\n\
2 2 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_negative_fractions():
expr = -x/y
ascii_str =\
"""\
-x \n\
---\n\
y \
"""
ucode_str =\
u("""\
-x \n\
โโโ\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -x*z/y
ascii_str =\
"""\
-x*z \n\
-----\n\
y \
"""
ucode_str =\
u("""\
-xโ
z \n\
โโโโโ\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = x**2/y
ascii_str =\
"""\
2\n\
x \n\
--\n\
y \
"""
ucode_str =\
u("""\
2\n\
x \n\
โโ\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -x**2/y
ascii_str =\
"""\
2 \n\
-x \n\
----\n\
y \
"""
ucode_str =\
u("""\
2 \n\
-x \n\
โโโโ\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -x/(y*z)
ascii_str =\
"""\
-x \n\
---\n\
y*z\
"""
ucode_str =\
u("""\
-x \n\
โโโ\n\
yโ
z\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -a/y**2
ascii_str =\
"""\
-a \n\
---\n\
2\n\
y \
"""
ucode_str =\
u("""\
-a \n\
โโโ\n\
2\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = y**(-a/b)
ascii_str =\
"""\
-a \n\
---\n\
b \n\
y \
"""
ucode_str =\
u("""\
-a \n\
โโโ\n\
b \n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -1/y**2
ascii_str =\
"""\
-1 \n\
---\n\
2\n\
y \
"""
ucode_str =\
u("""\
-1 \n\
โโโ\n\
2\n\
y \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -10/b**2
ascii_str =\
"""\
-10 \n\
----\n\
2 \n\
b \
"""
ucode_str =\
u("""\
-10 \n\
โโโโ\n\
2 \n\
b \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Rational(-200, 37)
ascii_str =\
"""\
-200 \n\
-----\n\
37 \
"""
ucode_str =\
u("""\
-200 \n\
โโโโโ\n\
37 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Mul(0, 1, evaluate=False)
assert pretty(expr) == "0*1"
assert upretty(expr) == "0โ
1"
expr = Mul(1, 0, evaluate=False)
assert pretty(expr) == "1*0"
assert upretty(expr) == "1โ
0"
expr = Mul(1, 1, evaluate=False)
assert pretty(expr) == "1*1"
assert upretty(expr) == "1โ
1"
expr = Mul(1, 1, 1, evaluate=False)
assert pretty(expr) == "1*1*1"
assert upretty(expr) == "1โ
1โ
1"
expr = Mul(1, 2, evaluate=False)
assert pretty(expr) == "1*2"
assert upretty(expr) == "1โ
2"
expr = Add(0, 1, evaluate=False)
assert pretty(expr) == "0 + 1"
assert upretty(expr) == "0 + 1"
expr = Mul(1, 1, 2, evaluate=False)
assert pretty(expr) == "1*1*2"
assert upretty(expr) == "1โ
1โ
2"
expr = Add(0, 0, 1, evaluate=False)
assert pretty(expr) == "0 + 0 + 1"
assert upretty(expr) == "0 + 0 + 1"
expr = Mul(1, -1, evaluate=False)
assert pretty(expr) == "1*(-1)"
assert upretty(expr) == "1โ
(-1)"
expr = Mul(1.0, x, evaluate=False)
assert pretty(expr) == "1.0*x"
assert upretty(expr) == "1.0โ
x"
expr = Mul(1, 1, 2, 3, x, evaluate=False)
assert pretty(expr) == "1*1*2*3*x"
assert upretty(expr) == "1โ
1โ
2โ
3โ
x"
expr = Mul(-1, 1, evaluate=False)
assert pretty(expr) == "-1*1"
assert upretty(expr) == "-1โ
1"
expr = Mul(4, 3, 2, 1, 0, y, x, evaluate=False)
assert pretty(expr) == "4*3*2*1*0*y*x"
assert upretty(expr) == "4โ
3โ
2โ
1โ
0โ
yโ
x"
expr = Mul(4, 3, 2, 1+z, 0, y, x, evaluate=False)
assert pretty(expr) == "4*3*2*(z + 1)*0*y*x"
assert upretty(expr) == "4โ
3โ
2โ
(z + 1)โ
0โ
yโ
x"
expr = Mul(Rational(2, 3), Rational(5, 7), evaluate=False)
assert pretty(expr) == "2/3*5/7"
assert upretty(expr) == "2/3โ
5/7"
def test_issue_5524():
assert pretty(-(-x + 5)*(-x - 2*sqrt(2) + 5) - (-y + 5)*(-y + 5)) == \
"""\
2 / ___ \\\n\
- (5 - y) + (x - 5)*\\-x - 2*\\/ 2 + 5/\
"""
assert upretty(-(-x + 5)*(-x - 2*sqrt(2) + 5) - (-y + 5)*(-y + 5)) == \
u("""\
2 \n\
- (5 - y) + (x - 5)โ
(-x - 2โ
โ2 + 5)\
""")
def test_pretty_ordering():
assert pretty(x**2 + x + 1, order='lex') == \
"""\
2 \n\
x + x + 1\
"""
assert pretty(x**2 + x + 1, order='rev-lex') == \
"""\
2\n\
1 + x + x \
"""
assert pretty(1 - x, order='lex') == '-x + 1'
assert pretty(1 - x, order='rev-lex') == '1 - x'
assert pretty(1 - 2*x, order='lex') == '-2*x + 1'
assert pretty(1 - 2*x, order='rev-lex') == '1 - 2*x'
f = 2*x**4 + y**2 - x**2 + y**3
assert pretty(f, order=None) == \
"""\
4 2 3 2\n\
2*x - x + y + y \
"""
assert pretty(f, order='lex') == \
"""\
4 2 3 2\n\
2*x - x + y + y \
"""
assert pretty(f, order='rev-lex') == \
"""\
2 3 2 4\n\
y + y - x + 2*x \
"""
expr = x - x**3/6 + x**5/120 + O(x**6)
ascii_str = \
"""\
3 5 \n\
x x / 6\\\n\
x - -- + --- + O\\x /\n\
6 120 \
"""
ucode_str = \
u("""\
3 5 \n\
x x โ 6โ\n\
x - โโ + โโโ + Oโx โ \n\
6 120 \
""")
assert pretty(expr, order=None) == ascii_str
assert upretty(expr, order=None) == ucode_str
assert pretty(expr, order='lex') == ascii_str
assert upretty(expr, order='lex') == ucode_str
assert pretty(expr, order='rev-lex') == ascii_str
assert upretty(expr, order='rev-lex') == ucode_str
def test_EulerGamma():
assert pretty(EulerGamma) == str(EulerGamma) == "EulerGamma"
assert upretty(EulerGamma) == u"ฮณ"
def test_GoldenRatio():
assert pretty(GoldenRatio) == str(GoldenRatio) == "GoldenRatio"
assert upretty(GoldenRatio) == u"ฯ"
def test_pretty_relational():
expr = Eq(x, y)
ascii_str = \
"""\
x = y\
"""
ucode_str = \
u("""\
x = y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Lt(x, y)
ascii_str = \
"""\
x < y\
"""
ucode_str = \
u("""\
x < y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Gt(x, y)
ascii_str = \
"""\
x > y\
"""
ucode_str = \
u("""\
x > y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Le(x, y)
ascii_str = \
"""\
x <= y\
"""
ucode_str = \
u("""\
x โค y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Ge(x, y)
ascii_str = \
"""\
x >= y\
"""
ucode_str = \
u("""\
x โฅ y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Ne(x/(y + 1), y**2)
ascii_str_1 = \
"""\
x 2\n\
----- != y \n\
1 + y \
"""
ascii_str_2 = \
"""\
x 2\n\
----- != y \n\
y + 1 \
"""
ucode_str_1 = \
u("""\
x 2\n\
โโโโโ โ y \n\
1 + y \
""")
ucode_str_2 = \
u("""\
x 2\n\
โโโโโ โ y \n\
y + 1 \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
def test_Assignment():
expr = Assignment(x, y)
ascii_str = \
"""\
x := y\
"""
ucode_str = \
u("""\
x := y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_AugmentedAssignment():
expr = AddAugmentedAssignment(x, y)
ascii_str = \
"""\
x += y\
"""
ucode_str = \
u("""\
x += y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = SubAugmentedAssignment(x, y)
ascii_str = \
"""\
x -= y\
"""
ucode_str = \
u("""\
x -= y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = MulAugmentedAssignment(x, y)
ascii_str = \
"""\
x *= y\
"""
ucode_str = \
u("""\
x *= y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = DivAugmentedAssignment(x, y)
ascii_str = \
"""\
x /= y\
"""
ucode_str = \
u("""\
x /= y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = ModAugmentedAssignment(x, y)
ascii_str = \
"""\
x %= y\
"""
ucode_str = \
u("""\
x %= y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_rational():
expr = y*x**-2
ascii_str = \
"""\
y \n\
--\n\
2\n\
x \
"""
ucode_str = \
u("""\
y \n\
โโ\n\
2\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = y**Rational(3, 2) * x**Rational(-5, 2)
ascii_str = \
"""\
3/2\n\
y \n\
----\n\
5/2\n\
x \
"""
ucode_str = \
u("""\
3/2\n\
y \n\
โโโโ\n\
5/2\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = sin(x)**3/tan(x)**2
ascii_str = \
"""\
3 \n\
sin (x)\n\
-------\n\
2 \n\
tan (x)\
"""
ucode_str = \
u("""\
3 \n\
sin (x)\n\
โโโโโโโ\n\
2 \n\
tan (x)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_functions():
"""Tests for Abs, conjugate, exp, function braces, and factorial."""
expr = (2*x + exp(x))
ascii_str_1 = \
"""\
x\n\
2*x + e \
"""
ascii_str_2 = \
"""\
x \n\
e + 2*x\
"""
ucode_str_1 = \
u("""\
x\n\
2โ
x + โฏ \
""")
ucode_str_2 = \
u("""\
x \n\
โฏ + 2โ
x\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = Abs(x)
ascii_str = \
"""\
|x|\
"""
ucode_str = \
u("""\
โxโ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Abs(x/(x**2 + 1))
ascii_str_1 = \
"""\
| x |\n\
|------|\n\
| 2|\n\
|1 + x |\
"""
ascii_str_2 = \
"""\
| x |\n\
|------|\n\
| 2 |\n\
|x + 1|\
"""
ucode_str_1 = \
u("""\
โ x โ\n\
โโโโโโโโ\n\
โ 2โ\n\
โ1 + x โ\
""")
ucode_str_2 = \
u("""\
โ x โ\n\
โโโโโโโโ\n\
โ 2 โ\n\
โx + 1โ\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = Abs(1 / (y - Abs(x)))
ascii_str = \
"""\
1 \n\
---------\n\
|y - |x||\
"""
ucode_str = \
u("""\
1 \n\
โโโโโโโโโ\n\
โy - โxโโ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
n = Symbol('n', integer=True)
expr = factorial(n)
ascii_str = \
"""\
n!\
"""
ucode_str = \
u("""\
n!\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = factorial(2*n)
ascii_str = \
"""\
(2*n)!\
"""
ucode_str = \
u("""\
(2โ
n)!\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = factorial(factorial(factorial(n)))
ascii_str = \
"""\
((n!)!)!\
"""
ucode_str = \
u("""\
((n!)!)!\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = factorial(n + 1)
ascii_str_1 = \
"""\
(1 + n)!\
"""
ascii_str_2 = \
"""\
(n + 1)!\
"""
ucode_str_1 = \
u("""\
(1 + n)!\
""")
ucode_str_2 = \
u("""\
(n + 1)!\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = subfactorial(n)
ascii_str = \
"""\
!n\
"""
ucode_str = \
u("""\
!n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = subfactorial(2*n)
ascii_str = \
"""\
!(2*n)\
"""
ucode_str = \
u("""\
!(2โ
n)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
n = Symbol('n', integer=True)
expr = factorial2(n)
ascii_str = \
"""\
n!!\
"""
ucode_str = \
u("""\
n!!\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = factorial2(2*n)
ascii_str = \
"""\
(2*n)!!\
"""
ucode_str = \
u("""\
(2โ
n)!!\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = factorial2(factorial2(factorial2(n)))
ascii_str = \
"""\
((n!!)!!)!!\
"""
ucode_str = \
u("""\
((n!!)!!)!!\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = factorial2(n + 1)
ascii_str_1 = \
"""\
(1 + n)!!\
"""
ascii_str_2 = \
"""\
(n + 1)!!\
"""
ucode_str_1 = \
u("""\
(1 + n)!!\
""")
ucode_str_2 = \
u("""\
(n + 1)!!\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = 2*binomial(n, k)
ascii_str = \
"""\
/n\\\n\
2*| |\n\
\\k/\
"""
ucode_str = \
u("""\
โnโ\n\
2โ
โ โ\n\
โkโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 2*binomial(2*n, k)
ascii_str = \
"""\
/2*n\\\n\
2*| |\n\
\\ k /\
"""
ucode_str = \
u("""\
โ2โ
nโ\n\
2โ
โ โ\n\
โ k โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 2*binomial(n**2, k)
ascii_str = \
"""\
/ 2\\\n\
|n |\n\
2*| |\n\
\\k /\
"""
ucode_str = \
u("""\
โ 2โ\n\
โn โ\n\
2โ
โ โ\n\
โk โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = catalan(n)
ascii_str = \
"""\
C \n\
n\
"""
ucode_str = \
u("""\
C \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = catalan(n)
ascii_str = \
"""\
C \n\
n\
"""
ucode_str = \
u("""\
C \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = bell(n)
ascii_str = \
"""\
B \n\
n\
"""
ucode_str = \
u("""\
B \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = bernoulli(n)
ascii_str = \
"""\
B \n\
n\
"""
ucode_str = \
u("""\
B \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = bernoulli(n, x)
ascii_str = \
"""\
B (x)\n\
n \
"""
ucode_str = \
u("""\
B (x)\n\
n \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = fibonacci(n)
ascii_str = \
"""\
F \n\
n\
"""
ucode_str = \
u("""\
F \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = lucas(n)
ascii_str = \
"""\
L \n\
n\
"""
ucode_str = \
u("""\
L \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = tribonacci(n)
ascii_str = \
"""\
T \n\
n\
"""
ucode_str = \
u("""\
T \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = stieltjes(n)
ascii_str = \
"""\
stieltjes \n\
n\
"""
ucode_str = \
u("""\
ฮณ \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = stieltjes(n, x)
ascii_str = \
"""\
stieltjes (x)\n\
n \
"""
ucode_str = \
u("""\
ฮณ (x)\n\
n \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = mathieuc(x, y, z)
ascii_str = 'C(x, y, z)'
ucode_str = u('C(x, y, z)')
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = mathieus(x, y, z)
ascii_str = 'S(x, y, z)'
ucode_str = u('S(x, y, z)')
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = mathieucprime(x, y, z)
ascii_str = "C'(x, y, z)"
ucode_str = u("C'(x, y, z)")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = mathieusprime(x, y, z)
ascii_str = "S'(x, y, z)"
ucode_str = u("S'(x, y, z)")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = conjugate(x)
ascii_str = \
"""\
_\n\
x\
"""
ucode_str = \
u("""\
_\n\
x\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
f = Function('f')
expr = conjugate(f(x + 1))
ascii_str_1 = \
"""\
________\n\
f(1 + x)\
"""
ascii_str_2 = \
"""\
________\n\
f(x + 1)\
"""
ucode_str_1 = \
u("""\
________\n\
f(1 + x)\
""")
ucode_str_2 = \
u("""\
________\n\
f(x + 1)\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = f(x)
ascii_str = \
"""\
f(x)\
"""
ucode_str = \
u("""\
f(x)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = f(x, y)
ascii_str = \
"""\
f(x, y)\
"""
ucode_str = \
u("""\
f(x, y)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = f(x/(y + 1), y)
ascii_str_1 = \
"""\
/ x \\\n\
f|-----, y|\n\
\\1 + y /\
"""
ascii_str_2 = \
"""\
/ x \\\n\
f|-----, y|\n\
\\y + 1 /\
"""
ucode_str_1 = \
u("""\
โ x โ\n\
fโโโโโโ, yโ\n\
โ1 + y โ \
""")
ucode_str_2 = \
u("""\
โ x โ\n\
fโโโโโโ, yโ\n\
โy + 1 โ \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = f(x**x**x**x**x**x)
ascii_str = \
"""\
/ / / / / x\\\\\\\\\\
| | | | \\x /||||
| | | \\x /|||
| | \\x /||
| \\x /|
f\\x /\
"""
ucode_str = \
u("""\
โ โ โ โ โ xโโโโโ
โ โ โ โ โx โ โโโโ
โ โ โ โx โ โโโ
โ โ โx โ โโ
โ โx โ โ
fโx โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = sin(x)**2
ascii_str = \
"""\
2 \n\
sin (x)\
"""
ucode_str = \
u("""\
2 \n\
sin (x)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = conjugate(a + b*I)
ascii_str = \
"""\
_ _\n\
a - I*b\
"""
ucode_str = \
u("""\
_ _\n\
a - โ
โ
b\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = conjugate(exp(a + b*I))
ascii_str = \
"""\
_ _\n\
a - I*b\n\
e \
"""
ucode_str = \
u("""\
_ _\n\
a - โ
โ
b\n\
โฏ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = conjugate( f(1 + conjugate(f(x))) )
ascii_str_1 = \
"""\
___________\n\
/ ____\\\n\
f\\1 + f(x)/\
"""
ascii_str_2 = \
"""\
___________\n\
/____ \\\n\
f\\f(x) + 1/\
"""
ucode_str_1 = \
u("""\
___________\n\
โ ____โ\n\
fโ1 + f(x)โ \
""")
ucode_str_2 = \
u("""\
___________\n\
โ____ โ\n\
fโf(x) + 1โ \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = f(x/(y + 1), y)
ascii_str_1 = \
"""\
/ x \\\n\
f|-----, y|\n\
\\1 + y /\
"""
ascii_str_2 = \
"""\
/ x \\\n\
f|-----, y|\n\
\\y + 1 /\
"""
ucode_str_1 = \
u("""\
โ x โ\n\
fโโโโโโ, yโ\n\
โ1 + y โ \
""")
ucode_str_2 = \
u("""\
โ x โ\n\
fโโโโโโ, yโ\n\
โy + 1 โ \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = floor(1 / (y - floor(x)))
ascii_str = \
"""\
/ 1 \\\n\
floor|------------|\n\
\\y - floor(x)/\
"""
ucode_str = \
u("""\
โข 1 โฅ\n\
โขโโโโโโโโฅ\n\
โฃy - โxโโฆ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = ceiling(1 / (y - ceiling(x)))
ascii_str = \
"""\
/ 1 \\\n\
ceiling|--------------|\n\
\\y - ceiling(x)/\
"""
ucode_str = \
u("""\
โก 1 โค\n\
โขโโโโโโโโฅ\n\
โขy - โxโโฅ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = euler(n)
ascii_str = \
"""\
E \n\
n\
"""
ucode_str = \
u("""\
E \n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = euler(1/(1 + 1/(1 + 1/n)))
ascii_str = \
"""\
E \n\
1 \n\
---------\n\
1 \n\
1 + -----\n\
1\n\
1 + -\n\
n\
"""
ucode_str = \
u("""\
E \n\
1 \n\
โโโโโโโโโ\n\
1 \n\
1 + โโโโโ\n\
1\n\
1 + โ\n\
n\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = euler(n, x)
ascii_str = \
"""\
E (x)\n\
n \
"""
ucode_str = \
u("""\
E (x)\n\
n \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = euler(n, x/2)
ascii_str = \
"""\
/x\\\n\
E |-|\n\
n\\2/\
"""
ucode_str = \
u("""\
โxโ\n\
E โโโ\n\
nโ2โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_sqrt():
expr = sqrt(2)
ascii_str = \
"""\
___\n\
\\/ 2 \
"""
ucode_str = \
u"โ2"
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 2**Rational(1, 3)
ascii_str = \
"""\
3 ___\n\
\\/ 2 \
"""
ucode_str = \
u("""\
3 ___\n\
โฒโฑ 2 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 2**Rational(1, 1000)
ascii_str = \
"""\
1000___\n\
\\/ 2 \
"""
ucode_str = \
u("""\
1000___\n\
โฒโฑ 2 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = sqrt(x**2 + 1)
ascii_str = \
"""\
________\n\
/ 2 \n\
\\/ x + 1 \
"""
ucode_str = \
u("""\
________\n\
โฑ 2 \n\
โฒโฑ x + 1 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (1 + sqrt(5))**Rational(1, 3)
ascii_str = \
"""\
___________\n\
3 / ___ \n\
\\/ 1 + \\/ 5 \
"""
ucode_str = \
u("""\
3 ________\n\
โฒโฑ 1 + โ5 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 2**(1/x)
ascii_str = \
"""\
x ___\n\
\\/ 2 \
"""
ucode_str = \
u("""\
x ___\n\
โฒโฑ 2 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = sqrt(2 + pi)
ascii_str = \
"""\
________\n\
\\/ 2 + pi \
"""
ucode_str = \
u("""\
_______\n\
โฒโฑ 2 + ฯ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (2 + (
1 + x**2)/(2 + x))**Rational(1, 4) + (1 + x**Rational(1, 1000))/sqrt(3 + x**2)
ascii_str = \
"""\
____________ \n\
/ 2 1000___ \n\
/ x + 1 \\/ x + 1\n\
4 / 2 + ------ + -----------\n\
\\/ x + 2 ________\n\
/ 2 \n\
\\/ x + 3 \
"""
ucode_str = \
u("""\
____________ \n\
โฑ 2 1000___ \n\
โฑ x + 1 โฒโฑ x + 1\n\
4 โฑ 2 + โโโโโโ + โโโโโโโโโโโ\n\
โฒโฑ x + 2 ________\n\
โฑ 2 \n\
โฒโฑ x + 3 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_sqrt_char_knob():
# See PR #9234.
expr = sqrt(2)
ucode_str1 = \
u("""\
___\n\
โฒโฑ 2 \
""")
ucode_str2 = \
u"โ2"
assert xpretty(expr, use_unicode=True,
use_unicode_sqrt_char=False) == ucode_str1
assert xpretty(expr, use_unicode=True,
use_unicode_sqrt_char=True) == ucode_str2
def test_pretty_sqrt_longsymbol_no_sqrt_char():
# Do not use unicode sqrt char for long symbols (see PR #9234).
expr = sqrt(Symbol('C1'))
ucode_str = \
u("""\
____\n\
โฒโฑ Cโ \
""")
assert upretty(expr) == ucode_str
def test_pretty_KroneckerDelta():
x, y = symbols("x, y")
expr = KroneckerDelta(x, y)
ascii_str = \
"""\
d \n\
x,y\
"""
ucode_str = \
u("""\
ฮด \n\
x,y\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_product():
n, m, k, l = symbols('n m k l')
f = symbols('f', cls=Function)
expr = Product(f((n/3)**2), (n, k**2, l))
unicode_str = \
u("""\
l \n\
โโฌโโโโโโโฌโ \n\
โ โ โ 2โ\n\
โ โ โn โ\n\
โ โ fโโโโ\n\
โ โ โ9 โ \n\
โ โ \n\
2 \n\
n = k """)
ascii_str = \
"""\
l \n\
__________ \n\
| | / 2\\\n\
| | |n |\n\
| | f|--|\n\
| | \\9 /\n\
| | \n\
2 \n\
n = k """
expr = Product(f((n/3)**2), (n, k**2, l), (l, 1, m))
unicode_str = \
u("""\
m l \n\
โโฌโโโโโโโฌโ โโฌโโโโโโโฌโ \n\
โ โ โ โ โ 2โ\n\
โ โ โ โ โn โ\n\
โ โ โ โ fโโโโ\n\
โ โ โ โ โ9 โ \n\
โ โ โ โ \n\
l = 1 2 \n\
n = k """)
ascii_str = \
"""\
m l \n\
__________ __________ \n\
| | | | / 2\\\n\
| | | | |n |\n\
| | | | f|--|\n\
| | | | \\9 /\n\
| | | | \n\
l = 1 2 \n\
n = k """
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
def test_pretty_Lambda():
# S.IdentityFunction is a special case
expr = Lambda(y, y)
assert pretty(expr) == "x -> x"
assert upretty(expr) == u"x โฆ x"
expr = Lambda(x, x+1)
assert pretty(expr) == "x -> x + 1"
assert upretty(expr) == u"x โฆ x + 1"
expr = Lambda(x, x**2)
ascii_str = \
"""\
2\n\
x -> x \
"""
ucode_str = \
u("""\
2\n\
x โฆ x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Lambda(x, x**2)**2
ascii_str = \
"""\
2
/ 2\\ \n\
\\x -> x / \
"""
ucode_str = \
u("""\
2
โ 2โ \n\
โx โฆ x โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Lambda((x, y), x)
ascii_str = "(x, y) -> x"
ucode_str = u"(x, y) โฆ x"
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Lambda((x, y), x**2)
ascii_str = \
"""\
2\n\
(x, y) -> x \
"""
ucode_str = \
u("""\
2\n\
(x, y) โฆ x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Lambda(((x, y),), x**2)
ascii_str = \
"""\
2\n\
((x, y),) -> x \
"""
ucode_str = \
u("""\
2\n\
((x, y),) โฆ x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_TransferFunction():
tf1 = TransferFunction(s - 1, s + 1, s)
assert upretty(tf1) == u"s - 1\nโโโโโ\ns + 1"
tf2 = TransferFunction(2*s + 1, 3 - p, s)
assert upretty(tf2) == u"2โ
s + 1\nโโโโโโโ\n 3 - p "
tf3 = TransferFunction(p, p + 1, p)
assert upretty(tf3) == u" p \nโโโโโ\np + 1"
def test_pretty_Series():
tf1 = TransferFunction(x + y, x - 2*y, y)
tf2 = TransferFunction(x - y, x + y, y)
tf3 = TransferFunction(x**2 + y, y - x, y)
expected1 = \
u("""\
โ 2 โ\n\
โ x + y โ โx + yโ\n\
โโโโโโโโโโ
โโโโโโโโ\n\
โx - 2โ
yโ โ-x + yโ \
""")
expected2 = \
u("""\
โ-x + yโ โ -x - yโ\n\
โโโโโโโโโ
โโโโโโโโโ\n\
โx + y โ โx - 2โ
yโ \
""")
expected3 = \
u("""\
โ 2 โ \n\
โx + yโ โ x + y โ โ -x - y x - yโ\n\
โโโโโโโโโ
โโโโโโโโโโ
โโโโโโโโ + โโโโโโ\n\
โ-x + yโ โx - 2โ
yโ โx - 2โ
y x + yโ \
""")
expected4 = \
u("""\
โ 2 โ\n\
โ x + y x - yโ โx - y x + yโ\n\
โโโโโโโโ + โโโโโโโ
โโโโโโ + โโโโโโโ\n\
โx - 2โ
y x + yโ โx + y -x + yโ \
""")
assert upretty(Series(tf1, tf3)) == expected1
assert upretty(Series(-tf2, -tf1)) == expected2
assert upretty(Series(tf3, tf1, Parallel(-tf1, tf2))) == expected3
assert upretty(Series(Parallel(tf1, tf2), Parallel(tf2, tf3))) == expected4
def test_pretty_Parallel():
tf1 = TransferFunction(x + y, x - 2*y, y)
tf2 = TransferFunction(x - y, x + y, y)
tf3 = TransferFunction(x**2 + y, y - x, y)
expected1 = \
u("""\
x + y x - y\n\
โโโโโโโ + โโโโโ\n\
x - 2โ
y x + y\
""")
expected2 = \
u("""\
-x + y -x - y\n\
โโโโโโ + โโโโโโโ\n\
x + y x - 2โ
y\
""")
expected3 = \
u("""\
2 \n\
x + y x + y โ -x - yโ โx - yโ\n\
โโโโโโ + โโโโโโโ + โโโโโโโโโโ
โโโโโโโ\n\
-x + y x - 2โ
y โx - 2โ
yโ โx + yโ \
""")
expected4 = \
u("""\
โ 2 โ\n\
โ x + y โ โx - yโ โx - yโ โx + yโ\n\
โโโโโโโโโโ
โโโโโโโ + โโโโโโโโ
โโโโโโโโ\n\
โx - 2โ
yโ โx + yโ โx + yโ โ-x + yโ \
""")
assert upretty(Parallel(tf1, tf2)) == expected1
assert upretty(Parallel(-tf2, -tf1)) == expected2
assert upretty(Parallel(tf3, tf1, Series(-tf1, tf2))) == expected3
assert upretty(Parallel(Series(tf1, tf2), Series(tf2, tf3))) == expected4
def test_pretty_Feedback():
tf = TransferFunction(1, 1, y)
tf1 = TransferFunction(x + y, x - 2*y, y)
tf2 = TransferFunction(x - y, x + y, y)
tf3 = TransferFunction(y**2 - 2*y + 1, y + 5, y)
tf4 = TransferFunction(x - 2*y**3, x + y, x)
tf5 = TransferFunction(1 - x, x - y, y)
tf6 = TransferFunction(2, 2, x)
expected1 = \
u("""\
โ1โ \n\
โโโ \n\
โ1โ \n\
โโโโโโโโโโโ\n\
1 x + y \n\
โ + โโโโโโโ\n\
1 x - 2โ
y\
""")
expected2 = \
u("""\
โ1โ \n\
โโโ \n\
โ1โ \n\
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ\n\
โ 2 โ\n\
1 โx - yโ โ x + y โ โy - 2โ
y + 1โ\n\
โ + โโโโโโโโ
โโโโโโโโโโ
โโโโโโโโโโโโโโ\n\
1 โx + yโ โx - 2โ
yโ โ y + 5 โ \
""")
expected3 = \
u("""\
โ x + y โ \n\
โโโโโโโโโ \n\
โx - 2โ
yโ \n\
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ\n\
โ 2 โ \n\
1 โ x + y โ โx - yโ โy - 2โ
y + 1โ โ1 - xโ\n\
โ + โโโโโโโโโโ
โโโโโโโโ
โโโโโโโโโโโโโโโ
โโโโโโโ\n\
1 โx - 2โ
yโ โx + yโ โ y + 5 โ โx - yโ \
""")
expected4 = \
u("""\
โ x + y โ โx - yโ \n\
โโโโโโโโโโ
โโโโโโโ \n\
โx - 2โ
yโ โx + yโ \n\
โโโโโโโโโโโโโโโโโโโโโ\n\
1 โ x + y โ โx - yโ\n\
โ + โโโโโโโโโโ
โโโโโโโ\n\
1 โx - 2โ
yโ โx + yโ \
""")
expected5 = \
u("""\
โ x + y โ โx - yโ \n\
โโโโโโโโโโ
โโโโโโโ \n\
โx - 2โ
yโ โx + yโ \n\
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโ\n\
1 โ x + y โ โx - yโ โ1 - xโ\n\
โ + โโโโโโโโโโ
โโโโโโโโ
โโโโโโโ\n\
1 โx - 2โ
yโ โx + yโ โx - yโ \
""")
expected6 = \
u("""\
โ 2 โ \n\
โy - 2โ
y + 1โ โ1 - xโ \n\
โโโโโโโโโโโโโโโ
โโโโโโโ \n\
โ y + 5 โ โx - yโ \n\
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ\n\
โ 2 โ \n\
1 โy - 2โ
y + 1โ โ1 - xโ โx - yโ โ x + y โ\n\
โ + โโโโโโโโโโโโโโโ
โโโโโโโโ
โโโโโโโโ
โโโโโโโโโ\n\
1 โ y + 5 โ โx - yโ โx + yโ โx - 2โ
yโ \
""")
expected7 = \
u("""\
โ 3โ \n\
โx - 2โ
y โ \n\
โโโโโโโโโโ \n\
โ x + y โ \n\
โโโโโโโโโโโโโโโโโโ\n\
โ 3โ \n\
1 โx - 2โ
y โ โ2โ\n\
โ + โโโโโโโโโโโ
โโโ\n\
1 โ x + y โ โ2โ \
""")
expected8 = \
u("""\
โ1 - xโ \n\
โโโโโโโ \n\
โx - yโ \n\
โโโโโโโโโ\n\
1 1 - x\n\
โ + โโโโโ\n\
1 x - y\
""")
assert upretty(Feedback(tf, tf1)) == expected1
assert upretty(Feedback(tf, tf2*tf1*tf3)) == expected2
assert upretty(Feedback(tf1, tf2*tf3*tf5)) == expected3
assert upretty(Feedback(tf1*tf2, tf)) == expected4
assert upretty(Feedback(tf1*tf2, tf5)) == expected5
assert upretty(Feedback(tf3*tf5, tf2*tf1)) == expected6
assert upretty(Feedback(tf4, tf6)) == expected7
assert upretty(Feedback(tf5, tf)) == expected8
def test_pretty_order():
expr = O(1)
ascii_str = \
"""\
O(1)\
"""
ucode_str = \
u("""\
O(1)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = O(1/x)
ascii_str = \
"""\
/1\\\n\
O|-|\n\
\\x/\
"""
ucode_str = \
u("""\
โ1โ\n\
Oโโโ\n\
โxโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = O(x**2 + y**2)
ascii_str = \
"""\
/ 2 2 \\\n\
O\\x + y ; (x, y) -> (0, 0)/\
"""
ucode_str = \
u("""\
โ 2 2 โ\n\
Oโx + y ; (x, y) โ (0, 0)โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = O(1, (x, oo))
ascii_str = \
"""\
O(1; x -> oo)\
"""
ucode_str = \
u("""\
O(1; x โ โ)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = O(1/x, (x, oo))
ascii_str = \
"""\
/1 \\\n\
O|-; x -> oo|\n\
\\x /\
"""
ucode_str = \
u("""\
โ1 โ\n\
Oโโ; x โ โโ\n\
โx โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = O(x**2 + y**2, (x, oo), (y, oo))
ascii_str = \
"""\
/ 2 2 \\\n\
O\\x + y ; (x, y) -> (oo, oo)/\
"""
ucode_str = \
u("""\
โ 2 2 โ\n\
Oโx + y ; (x, y) โ (โ, โ)โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_derivatives():
# Simple
expr = Derivative(log(x), x, evaluate=False)
ascii_str = \
"""\
d \n\
--(log(x))\n\
dx \
"""
ucode_str = \
u("""\
d \n\
โโ(log(x))\n\
dx \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Derivative(log(x), x, evaluate=False) + x
ascii_str_1 = \
"""\
d \n\
x + --(log(x))\n\
dx \
"""
ascii_str_2 = \
"""\
d \n\
--(log(x)) + x\n\
dx \
"""
ucode_str_1 = \
u("""\
d \n\
x + โโ(log(x))\n\
dx \
""")
ucode_str_2 = \
u("""\
d \n\
โโ(log(x)) + x\n\
dx \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
# basic partial derivatives
expr = Derivative(log(x + y) + x, x)
ascii_str_1 = \
"""\
d \n\
--(log(x + y) + x)\n\
dx \
"""
ascii_str_2 = \
"""\
d \n\
--(x + log(x + y))\n\
dx \
"""
ucode_str_1 = \
u("""\
โ \n\
โโ(log(x + y) + x)\n\
โx \
""")
ucode_str_2 = \
u("""\
โ \n\
โโ(x + log(x + y))\n\
โx \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2], upretty(expr)
# Multiple symbols
expr = Derivative(log(x) + x**2, x, y)
ascii_str_1 = \
"""\
2 \n\
d / 2\\\n\
-----\\log(x) + x /\n\
dy dx \
"""
ascii_str_2 = \
"""\
2 \n\
d / 2 \\\n\
-----\\x + log(x)/\n\
dy dx \
"""
ucode_str_1 = \
u("""\
2 \n\
d โ 2โ\n\
โโโโโโlog(x) + x โ \n\
dy dx \
""")
ucode_str_2 = \
u("""\
2 \n\
d โ 2 โ\n\
โโโโโโx + log(x)โ \n\
dy dx \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = Derivative(2*x*y, y, x) + x**2
ascii_str_1 = \
"""\
2 \n\
d 2\n\
-----(2*x*y) + x \n\
dx dy \
"""
ascii_str_2 = \
"""\
2 \n\
2 d \n\
x + -----(2*x*y)\n\
dx dy \
"""
ucode_str_1 = \
u("""\
2 \n\
โ 2\n\
โโโโโ(2โ
xโ
y) + x \n\
โx โy \
""")
ucode_str_2 = \
u("""\
2 \n\
2 โ \n\
x + โโโโโ(2โ
xโ
y)\n\
โx โy \
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = Derivative(2*x*y, x, x)
ascii_str = \
"""\
2 \n\
d \n\
---(2*x*y)\n\
2 \n\
dx \
"""
ucode_str = \
u("""\
2 \n\
โ \n\
โโโ(2โ
xโ
y)\n\
2 \n\
โx \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Derivative(2*x*y, x, 17)
ascii_str = \
"""\
17 \n\
d \n\
----(2*x*y)\n\
17 \n\
dx \
"""
ucode_str = \
u("""\
17 \n\
โ \n\
โโโโ(2โ
xโ
y)\n\
17 \n\
โx \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Derivative(2*x*y, x, x, y)
ascii_str = \
"""\
3 \n\
d \n\
------(2*x*y)\n\
2 \n\
dy dx \
"""
ucode_str = \
u("""\
3 \n\
โ \n\
โโโโโโ(2โ
xโ
y)\n\
2 \n\
โy โx \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
# Greek letters
alpha = Symbol('alpha')
beta = Function('beta')
expr = beta(alpha).diff(alpha)
ascii_str = \
"""\
d \n\
------(beta(alpha))\n\
dalpha \
"""
ucode_str = \
u("""\
d \n\
โโ(ฮฒ(ฮฑ))\n\
dฮฑ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Derivative(f(x), (x, n))
ascii_str = \
"""\
n \n\
d \n\
---(f(x))\n\
n \n\
dx \
"""
ucode_str = \
u("""\
n \n\
d \n\
โโโ(f(x))\n\
n \n\
dx \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_integrals():
expr = Integral(log(x), x)
ascii_str = \
"""\
/ \n\
| \n\
| log(x) dx\n\
| \n\
/ \
"""
ucode_str = \
u("""\
โ \n\
โฎ log(x) dx\n\
โก \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(x**2, x)
ascii_str = \
"""\
/ \n\
| \n\
| 2 \n\
| x dx\n\
| \n\
/ \
"""
ucode_str = \
u("""\
โ \n\
โฎ 2 \n\
โฎ x dx\n\
โก \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral((sin(x))**2 / (tan(x))**2)
ascii_str = \
"""\
/ \n\
| \n\
| 2 \n\
| sin (x) \n\
| ------- dx\n\
| 2 \n\
| tan (x) \n\
| \n\
/ \
"""
ucode_str = \
u("""\
โ \n\
โฎ 2 \n\
โฎ sin (x) \n\
โฎ โโโโโโโ dx\n\
โฎ 2 \n\
โฎ tan (x) \n\
โก \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(x**(2**x), x)
ascii_str = \
"""\
/ \n\
| \n\
| / x\\ \n\
| \\2 / \n\
| x dx\n\
| \n\
/ \
"""
ucode_str = \
u("""\
โ \n\
โฎ โ xโ \n\
โฎ โ2 โ \n\
โฎ x dx\n\
โก \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(x**2, (x, 1, 2))
ascii_str = \
"""\
2 \n\
/ \n\
| \n\
| 2 \n\
| x dx\n\
| \n\
/ \n\
1 \
"""
ucode_str = \
u("""\
2 \n\
โ \n\
โฎ 2 \n\
โฎ x dx\n\
โก \n\
1 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(x**2, (x, Rational(1, 2), 10))
ascii_str = \
"""\
10 \n\
/ \n\
| \n\
| 2 \n\
| x dx\n\
| \n\
/ \n\
1/2 \
"""
ucode_str = \
u("""\
10 \n\
โ \n\
โฎ 2 \n\
โฎ x dx\n\
โก \n\
1/2 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(x**2*y**2, x, y)
ascii_str = \
"""\
/ / \n\
| | \n\
| | 2 2 \n\
| | x *y dx dy\n\
| | \n\
/ / \
"""
ucode_str = \
u("""\
โ โ \n\
โฎ โฎ 2 2 \n\
โฎ โฎ x โ
y dx dy\n\
โก โก \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(sin(th)/cos(ph), (th, 0, pi), (ph, 0, 2*pi))
ascii_str = \
"""\
2*pi pi \n\
/ / \n\
| | \n\
| | sin(theta) \n\
| | ---------- d(theta) d(phi)\n\
| | cos(phi) \n\
| | \n\
/ / \n\
0 0 \
"""
ucode_str = \
u("""\
2โ
ฯ ฯ \n\
โ โ \n\
โฎ โฎ sin(ฮธ) \n\
โฎ โฎ โโโโโโ dฮธ dฯ\n\
โฎ โฎ cos(ฯ) \n\
โก โก \n\
0 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_matrix():
# Empty Matrix
expr = Matrix()
ascii_str = "[]"
unicode_str = "[]"
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
expr = Matrix(2, 0, lambda i, j: 0)
ascii_str = "[]"
unicode_str = "[]"
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
expr = Matrix(0, 2, lambda i, j: 0)
ascii_str = "[]"
unicode_str = "[]"
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
expr = Matrix([[x**2 + 1, 1], [y, x + y]])
ascii_str_1 = \
"""\
[ 2 ]
[1 + x 1 ]
[ ]
[ y x + y]\
"""
ascii_str_2 = \
"""\
[ 2 ]
[x + 1 1 ]
[ ]
[ y x + y]\
"""
ucode_str_1 = \
u("""\
โก 2 โค
โข1 + x 1 โฅ
โข โฅ
โฃ y x + yโฆ\
""")
ucode_str_2 = \
u("""\
โก 2 โค
โขx + 1 1 โฅ
โข โฅ
โฃ y x + yโฆ\
""")
assert pretty(expr) in [ascii_str_1, ascii_str_2]
assert upretty(expr) in [ucode_str_1, ucode_str_2]
expr = Matrix([[x/y, y, th], [0, exp(I*k*ph), 1]])
ascii_str = \
"""\
[x ]
[- y theta]
[y ]
[ ]
[ I*k*phi ]
[0 e 1 ]\
"""
ucode_str = \
u("""\
โกx โค
โขโ y ฮธโฅ
โขy โฅ
โข โฅ
โข โ
โ
kโ
ฯ โฅ
โฃ0 โฏ 1โฆ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
unicode_str = \
u("""\
โกvฬ_msc_00 0 0 โค
โข โฅ
โข 0 vฬ_msc_01 0 โฅ
โข โฅ
โฃ 0 0 vฬ_msc_02โฆ\
""")
expr = diag(*MatrixSymbol('vdot_msc',1,3))
assert upretty(expr) == unicode_str
def test_pretty_ndim_arrays():
x, y, z, w = symbols("x y z w")
for ArrayType in (ImmutableDenseNDimArray, ImmutableSparseNDimArray, MutableDenseNDimArray, MutableSparseNDimArray):
# Basic: scalar array
M = ArrayType(x)
assert pretty(M) == "x"
assert upretty(M) == "x"
M = ArrayType([[1/x, y], [z, w]])
M1 = ArrayType([1/x, y, z])
M2 = tensorproduct(M1, M)
M3 = tensorproduct(M, M)
ascii_str = \
"""\
[1 ]\n\
[- y]\n\
[x ]\n\
[ ]\n\
[z w]\
"""
ucode_str = \
u("""\
โก1 โค\n\
โขโ yโฅ\n\
โขx โฅ\n\
โข โฅ\n\
โฃz wโฆ\
""")
assert pretty(M) == ascii_str
assert upretty(M) == ucode_str
ascii_str = \
"""\
[1 ]\n\
[- y z]\n\
[x ]\
"""
ucode_str = \
u("""\
โก1 โค\n\
โขโ y zโฅ\n\
โฃx โฆ\
""")
assert pretty(M1) == ascii_str
assert upretty(M1) == ucode_str
ascii_str = \
"""\
[[1 y] ]\n\
[[-- -] [z ]]\n\
[[ 2 x] [ y 2 ] [- y*z]]\n\
[[x ] [ - y ] [x ]]\n\
[[ ] [ x ] [ ]]\n\
[[z w] [ ] [ 2 ]]\n\
[[- -] [y*z w*y] [z w*z]]\n\
[[x x] ]\
"""
ucode_str = \
u("""\
โกโก1 yโค โค\n\
โขโขโโ โโฅ โกz โคโฅ\n\
โขโข 2 xโฅ โก y 2 โค โขโ yโ
zโฅโฅ\n\
โขโขx โฅ โข โ y โฅ โขx โฅโฅ\n\
โขโข โฅ โข x โฅ โข โฅโฅ\n\
โขโขz wโฅ โข โฅ โข 2 โฅโฅ\n\
โขโขโ โโฅ โฃyโ
z wโ
yโฆ โฃz wโ
zโฆโฅ\n\
โฃโฃx xโฆ โฆ\
""")
assert pretty(M2) == ascii_str
assert upretty(M2) == ucode_str
ascii_str = \
"""\
[ [1 y] ]\n\
[ [-- -] ]\n\
[ [ 2 x] [ y 2 ]]\n\
[ [x ] [ - y ]]\n\
[ [ ] [ x ]]\n\
[ [z w] [ ]]\n\
[ [- -] [y*z w*y]]\n\
[ [x x] ]\n\
[ ]\n\
[[z ] [ w ]]\n\
[[- y*z] [ - w*y]]\n\
[[x ] [ x ]]\n\
[[ ] [ ]]\n\
[[ 2 ] [ 2 ]]\n\
[[z w*z] [w*z w ]]\
"""
ucode_str = \
u("""\
โก โก1 yโค โค\n\
โข โขโโ โโฅ โฅ\n\
โข โข 2 xโฅ โก y 2 โคโฅ\n\
โข โขx โฅ โข โ y โฅโฅ\n\
โข โข โฅ โข x โฅโฅ\n\
โข โขz wโฅ โข โฅโฅ\n\
โข โขโ โโฅ โฃyโ
z wโ
yโฆโฅ\n\
โข โฃx xโฆ โฅ\n\
โข โฅ\n\
โขโกz โค โก w โคโฅ\n\
โขโขโ yโ
zโฅ โข โ wโ
yโฅโฅ\n\
โขโขx โฅ โข x โฅโฅ\n\
โขโข โฅ โข โฅโฅ\n\
โขโข 2 โฅ โข 2 โฅโฅ\n\
โฃโฃz wโ
zโฆ โฃwโ
z w โฆโฆ\
""")
assert pretty(M3) == ascii_str
assert upretty(M3) == ucode_str
Mrow = ArrayType([[x, y, 1 / z]])
Mcolumn = ArrayType([[x], [y], [1 / z]])
Mcol2 = ArrayType([Mcolumn.tolist()])
ascii_str = \
"""\
[[ 1]]\n\
[[x y -]]\n\
[[ z]]\
"""
ucode_str = \
u("""\
โกโก 1โคโค\n\
โขโขx y โโฅโฅ\n\
โฃโฃ zโฆโฆ\
""")
assert pretty(Mrow) == ascii_str
assert upretty(Mrow) == ucode_str
ascii_str = \
"""\
[x]\n\
[ ]\n\
[y]\n\
[ ]\n\
[1]\n\
[-]\n\
[z]\
"""
ucode_str = \
u("""\
โกxโค\n\
โข โฅ\n\
โขyโฅ\n\
โข โฅ\n\
โข1โฅ\n\
โขโโฅ\n\
โฃzโฆ\
""")
assert pretty(Mcolumn) == ascii_str
assert upretty(Mcolumn) == ucode_str
ascii_str = \
"""\
[[x]]\n\
[[ ]]\n\
[[y]]\n\
[[ ]]\n\
[[1]]\n\
[[-]]\n\
[[z]]\
"""
ucode_str = \
u("""\
โกโกxโคโค\n\
โขโข โฅโฅ\n\
โขโขyโฅโฅ\n\
โขโข โฅโฅ\n\
โขโข1โฅโฅ\n\
โขโขโโฅโฅ\n\
โฃโฃzโฆโฆ\
""")
assert pretty(Mcol2) == ascii_str
assert upretty(Mcol2) == ucode_str
def test_tensor_TensorProduct():
A = MatrixSymbol("A", 3, 3)
B = MatrixSymbol("B", 3, 3)
assert upretty(TensorProduct(A, B)) == "A\u2297B"
assert upretty(TensorProduct(A, B, A)) == "A\u2297B\u2297A"
def test_diffgeom_print_WedgeProduct():
from sympy.diffgeom.rn import R2
from sympy.diffgeom import WedgeProduct
wp = WedgeProduct(R2.dx, R2.dy)
assert upretty(wp) == u("โ
xโงโ
y")
def test_Adjoint():
X = MatrixSymbol('X', 2, 2)
Y = MatrixSymbol('Y', 2, 2)
assert pretty(Adjoint(X)) == " +\nX "
assert pretty(Adjoint(X + Y)) == " +\n(X + Y) "
assert pretty(Adjoint(X) + Adjoint(Y)) == " + +\nX + Y "
assert pretty(Adjoint(X*Y)) == " +\n(X*Y) "
assert pretty(Adjoint(Y)*Adjoint(X)) == " + +\nY *X "
assert pretty(Adjoint(X**2)) == " +\n/ 2\\ \n\\X / "
assert pretty(Adjoint(X)**2) == " 2\n/ +\\ \n\\X / "
assert pretty(Adjoint(Inverse(X))) == " +\n/ -1\\ \n\\X / "
assert pretty(Inverse(Adjoint(X))) == " -1\n/ +\\ \n\\X / "
assert pretty(Adjoint(Transpose(X))) == " +\n/ T\\ \n\\X / "
assert pretty(Transpose(Adjoint(X))) == " T\n/ +\\ \n\\X / "
assert upretty(Adjoint(X)) == u" โ \nX "
assert upretty(Adjoint(X + Y)) == u" โ \n(X + Y) "
assert upretty(Adjoint(X) + Adjoint(Y)) == u" โ โ \nX + Y "
assert upretty(Adjoint(X*Y)) == u" โ \n(Xโ
Y) "
assert upretty(Adjoint(Y)*Adjoint(X)) == u" โ โ \nY โ
X "
assert upretty(Adjoint(X**2)) == \
u" โ \nโ 2โ \nโX โ "
assert upretty(Adjoint(X)**2) == \
u" 2\nโ โ โ \nโX โ "
assert upretty(Adjoint(Inverse(X))) == \
u" โ \nโ -1โ \nโX โ "
assert upretty(Inverse(Adjoint(X))) == \
u" -1\nโ โ โ \nโX โ "
assert upretty(Adjoint(Transpose(X))) == \
u" โ \nโ Tโ \nโX โ "
assert upretty(Transpose(Adjoint(X))) == \
u" T\nโ โ โ \nโX โ "
def test_pretty_Trace_issue_9044():
X = Matrix([[1, 2], [3, 4]])
Y = Matrix([[2, 4], [6, 8]])
ascii_str_1 = \
"""\
/[1 2]\\
tr|[ ]|
\\[3 4]/\
"""
ucode_str_1 = \
u("""\
โโก1 2โคโ
trโโข โฅโ
โโฃ3 4โฆโ \
""")
ascii_str_2 = \
"""\
/[1 2]\\ /[2 4]\\
tr|[ ]| + tr|[ ]|
\\[3 4]/ \\[6 8]/\
"""
ucode_str_2 = \
u("""\
โโก1 2โคโ โโก2 4โคโ
trโโข โฅโ + trโโข โฅโ
โโฃ3 4โฆโ โโฃ6 8โฆโ \
""")
assert pretty(Trace(X)) == ascii_str_1
assert upretty(Trace(X)) == ucode_str_1
assert pretty(Trace(X) + Trace(Y)) == ascii_str_2
assert upretty(Trace(X) + Trace(Y)) == ucode_str_2
def test_MatrixSlice():
n = Symbol('n', integer=True)
x, y, z, w, t, = symbols('x y z w t')
X = MatrixSymbol('X', n, n)
Y = MatrixSymbol('Y', 10, 10)
Z = MatrixSymbol('Z', 10, 10)
expr = MatrixSlice(X, (None, None, None), (None, None, None))
assert pretty(expr) == upretty(expr) == 'X[:, :]'
expr = X[x:x + 1, y:y + 1]
assert pretty(expr) == upretty(expr) == 'X[x:x + 1, y:y + 1]'
expr = X[x:x + 1:2, y:y + 1:2]
assert pretty(expr) == upretty(expr) == 'X[x:x + 1:2, y:y + 1:2]'
expr = X[:x, y:]
assert pretty(expr) == upretty(expr) == 'X[:x, y:]'
expr = X[:x, y:]
assert pretty(expr) == upretty(expr) == 'X[:x, y:]'
expr = X[x:, :y]
assert pretty(expr) == upretty(expr) == 'X[x:, :y]'
expr = X[x:y, z:w]
assert pretty(expr) == upretty(expr) == 'X[x:y, z:w]'
expr = X[x:y:t, w:t:x]
assert pretty(expr) == upretty(expr) == 'X[x:y:t, w:t:x]'
expr = X[x::y, t::w]
assert pretty(expr) == upretty(expr) == 'X[x::y, t::w]'
expr = X[:x:y, :t:w]
assert pretty(expr) == upretty(expr) == 'X[:x:y, :t:w]'
expr = X[::x, ::y]
assert pretty(expr) == upretty(expr) == 'X[::x, ::y]'
expr = MatrixSlice(X, (0, None, None), (0, None, None))
assert pretty(expr) == upretty(expr) == 'X[:, :]'
expr = MatrixSlice(X, (None, n, None), (None, n, None))
assert pretty(expr) == upretty(expr) == 'X[:, :]'
expr = MatrixSlice(X, (0, n, None), (0, n, None))
assert pretty(expr) == upretty(expr) == 'X[:, :]'
expr = MatrixSlice(X, (0, n, 2), (0, n, 2))
assert pretty(expr) == upretty(expr) == 'X[::2, ::2]'
expr = X[1:2:3, 4:5:6]
assert pretty(expr) == upretty(expr) == 'X[1:2:3, 4:5:6]'
expr = X[1:3:5, 4:6:8]
assert pretty(expr) == upretty(expr) == 'X[1:3:5, 4:6:8]'
expr = X[1:10:2]
assert pretty(expr) == upretty(expr) == 'X[1:10:2, :]'
expr = Y[:5, 1:9:2]
assert pretty(expr) == upretty(expr) == 'Y[:5, 1:9:2]'
expr = Y[:5, 1:10:2]
assert pretty(expr) == upretty(expr) == 'Y[:5, 1::2]'
expr = Y[5, :5:2]
assert pretty(expr) == upretty(expr) == 'Y[5:6, :5:2]'
expr = X[0:1, 0:1]
assert pretty(expr) == upretty(expr) == 'X[:1, :1]'
expr = X[0:1:2, 0:1:2]
assert pretty(expr) == upretty(expr) == 'X[:1:2, :1:2]'
expr = (Y + Z)[2:, 2:]
assert pretty(expr) == upretty(expr) == '(Y + Z)[2:, 2:]'
def test_MatrixExpressions():
n = Symbol('n', integer=True)
X = MatrixSymbol('X', n, n)
assert pretty(X) == upretty(X) == "X"
# Apply function elementwise (`ElementwiseApplyFunc`):
expr = (X.T*X).applyfunc(sin)
ascii_str = """\
/ T \\\n\
(d -> sin(d)).\\X *X/\
"""
ucode_str = u("""\
โ T โ\n\
(d โฆ sin(d))หณโX โ
Xโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
lamda = Lambda(x, 1/x)
expr = (n*X).applyfunc(lamda)
ascii_str = """\
/ 1\\ \n\
|x -> -|.(n*X)\n\
\\ x/ \
"""
ucode_str = u("""\
โ 1โ \n\
โx โฆ โโหณ(nโ
X)\n\
โ xโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_dotproduct():
from sympy.matrices import Matrix, MatrixSymbol
from sympy.matrices.expressions.dotproduct import DotProduct
n = symbols("n", integer=True)
A = MatrixSymbol('A', n, 1)
B = MatrixSymbol('B', n, 1)
C = Matrix(1, 3, [1, 2, 3])
D = Matrix(1, 3, [1, 3, 4])
assert pretty(DotProduct(A, B)) == u"A*B"
assert pretty(DotProduct(C, D)) == u"[1 2 3]*[1 3 4]"
assert upretty(DotProduct(A, B)) == u"Aโ
B"
assert upretty(DotProduct(C, D)) == u"[1 2 3]โ
[1 3 4]"
def test_pretty_piecewise():
expr = Piecewise((x, x < 1), (x**2, True))
ascii_str = \
"""\
/x for x < 1\n\
| \n\
< 2 \n\
|x otherwise\n\
\\ \
"""
ucode_str = \
u("""\
โงx for x < 1\n\
โช \n\
โจ 2 \n\
โชx otherwise\n\
โฉ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -Piecewise((x, x < 1), (x**2, True))
ascii_str = \
"""\
//x for x < 1\\\n\
|| |\n\
-|< 2 |\n\
||x otherwise|\n\
\\\\ /\
"""
ucode_str = \
u("""\
โโงx for x < 1โ\n\
โโช โ\n\
-โโจ 2 โ\n\
โโชx otherwiseโ\n\
โโฉ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = x + Piecewise((x, x > 0), (y, True)) + Piecewise((x/y, x < 2),
(y**2, x > 2), (1, True)) + 1
ascii_str = \
"""\
//x \\ \n\
||- for x < 2| \n\
||y | \n\
//x for x > 0\\ || | \n\
x + |< | + |< 2 | + 1\n\
\\\\y otherwise/ ||y for x > 2| \n\
|| | \n\
||1 otherwise| \n\
\\\\ / \
"""
ucode_str = \
u("""\
โโงx โ \n\
โโชโ for x < 2โ \n\
โโชy โ \n\
โโงx for x > 0โ โโช โ \n\
x + โโจ โ + โโจ 2 โ + 1\n\
โโฉy otherwiseโ โโชy for x > 2โ \n\
โโช โ \n\
โโช1 otherwiseโ \n\
โโฉ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = x - Piecewise((x, x > 0), (y, True)) + Piecewise((x/y, x < 2),
(y**2, x > 2), (1, True)) + 1
ascii_str = \
"""\
//x \\ \n\
||- for x < 2| \n\
||y | \n\
//x for x > 0\\ || | \n\
x - |< | + |< 2 | + 1\n\
\\\\y otherwise/ ||y for x > 2| \n\
|| | \n\
||1 otherwise| \n\
\\\\ / \
"""
ucode_str = \
u("""\
โโงx โ \n\
โโชโ for x < 2โ \n\
โโชy โ \n\
โโงx for x > 0โ โโช โ \n\
x - โโจ โ + โโจ 2 โ + 1\n\
โโฉy otherwiseโ โโชy for x > 2โ \n\
โโช โ \n\
โโช1 otherwiseโ \n\
โโฉ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = x*Piecewise((x, x > 0), (y, True))
ascii_str = \
"""\
//x for x > 0\\\n\
x*|< |\n\
\\\\y otherwise/\
"""
ucode_str = \
u("""\
โโงx for x > 0โ\n\
xโ
โโจ โ\n\
โโฉy otherwiseโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Piecewise((x, x > 0), (y, True))*Piecewise((x/y, x < 2), (y**2, x >
2), (1, True))
ascii_str = \
"""\
//x \\\n\
||- for x < 2|\n\
||y |\n\
//x for x > 0\\ || |\n\
|< |*|< 2 |\n\
\\\\y otherwise/ ||y for x > 2|\n\
|| |\n\
||1 otherwise|\n\
\\\\ /\
"""
ucode_str = \
u("""\
โโงx โ\n\
โโชโ for x < 2โ\n\
โโชy โ\n\
โโงx for x > 0โ โโช โ\n\
โโจ โโ
โโจ 2 โ\n\
โโฉy otherwiseโ โโชy for x > 2โ\n\
โโช โ\n\
โโช1 otherwiseโ\n\
โโฉ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -Piecewise((x, x > 0), (y, True))*Piecewise((x/y, x < 2), (y**2, x
> 2), (1, True))
ascii_str = \
"""\
//x \\\n\
||- for x < 2|\n\
||y |\n\
//x for x > 0\\ || |\n\
-|< |*|< 2 |\n\
\\\\y otherwise/ ||y for x > 2|\n\
|| |\n\
||1 otherwise|\n\
\\\\ /\
"""
ucode_str = \
u("""\
โโงx โ\n\
โโชโ for x < 2โ\n\
โโชy โ\n\
โโงx for x > 0โ โโช โ\n\
-โโจ โโ
โโจ 2 โ\n\
โโฉy otherwiseโ โโชy for x > 2โ\n\
โโช โ\n\
โโช1 otherwiseโ\n\
โโฉ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Piecewise((0, Abs(1/y) < 1), (1, Abs(y) < 1), (y*meijerg(((2, 1),
()), ((), (1, 0)), 1/y), True))
ascii_str = \
"""\
/ 1 \n\
| 0 for --- < 1\n\
| |y| \n\
| \n\
< 1 for |y| < 1\n\
| \n\
| __0, 2 /2, 1 | 1\\ \n\
|y*/__ | | -| otherwise \n\
\\ \\_|2, 2 \\ 1, 0 | y/ \
"""
ucode_str = \
u("""\
โง 1 \n\
โช 0 for โโโ < 1\n\
โช โyโ \n\
โช \n\
โจ 1 for โyโ < 1\n\
โช \n\
โช โญโโฎ0, 2 โ2, 1 โ 1โ \n\
โชyโ
โโถโ โ โ โโ otherwise \n\
โฉ โฐโโฏ2, 2 โ 1, 0 โ yโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
# XXX: We have to use evaluate=False here because Piecewise._eval_power
# denests the power.
expr = Pow(Piecewise((x, x > 0), (y, True)), 2, evaluate=False)
ascii_str = \
"""\
2\n\
//x for x > 0\\ \n\
|< | \n\
\\\\y otherwise/ \
"""
ucode_str = \
u("""\
2\n\
โโงx for x > 0โ \n\
โโจ โ \n\
โโฉy otherwiseโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_ITE():
expr = ITE(x, y, z)
assert pretty(expr) == (
'/y for x \n'
'< \n'
'\\z otherwise'
)
assert upretty(expr) == u("""\
โงy for x \n\
โจ \n\
โฉz otherwise\
""")
def test_pretty_seq():
expr = ()
ascii_str = \
"""\
()\
"""
ucode_str = \
u("""\
()\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = []
ascii_str = \
"""\
[]\
"""
ucode_str = \
u("""\
[]\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = {}
expr_2 = {}
ascii_str = \
"""\
{}\
"""
ucode_str = \
u("""\
{}\
""")
assert pretty(expr) == ascii_str
assert pretty(expr_2) == ascii_str
assert upretty(expr) == ucode_str
assert upretty(expr_2) == ucode_str
expr = (1/x,)
ascii_str = \
"""\
1 \n\
(-,)\n\
x \
"""
ucode_str = \
u("""\
โ1 โ\n\
โโ,โ\n\
โx โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = [x**2, 1/x, x, y, sin(th)**2/cos(ph)**2]
ascii_str = \
"""\
2 \n\
2 1 sin (theta) \n\
[x , -, x, y, -----------]\n\
x 2 \n\
cos (phi) \
"""
ucode_str = \
u("""\
โก 2 โค\n\
โข 2 1 sin (ฮธ)โฅ\n\
โขx , โ, x, y, โโโโโโโโฅ\n\
โข x 2 โฅ\n\
โฃ cos (ฯ)โฆ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (x**2, 1/x, x, y, sin(th)**2/cos(ph)**2)
ascii_str = \
"""\
2 \n\
2 1 sin (theta) \n\
(x , -, x, y, -----------)\n\
x 2 \n\
cos (phi) \
"""
ucode_str = \
u("""\
โ 2 โ\n\
โ 2 1 sin (ฮธ)โ\n\
โx , โ, x, y, โโโโโโโโ\n\
โ x 2 โ\n\
โ cos (ฯ)โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Tuple(x**2, 1/x, x, y, sin(th)**2/cos(ph)**2)
ascii_str = \
"""\
2 \n\
2 1 sin (theta) \n\
(x , -, x, y, -----------)\n\
x 2 \n\
cos (phi) \
"""
ucode_str = \
u("""\
โ 2 โ\n\
โ 2 1 sin (ฮธ)โ\n\
โx , โ, x, y, โโโโโโโโ\n\
โ x 2 โ\n\
โ cos (ฯ)โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = {x: sin(x)}
expr_2 = Dict({x: sin(x)})
ascii_str = \
"""\
{x: sin(x)}\
"""
ucode_str = \
u("""\
{x: sin(x)}\
""")
assert pretty(expr) == ascii_str
assert pretty(expr_2) == ascii_str
assert upretty(expr) == ucode_str
assert upretty(expr_2) == ucode_str
expr = {1/x: 1/y, x: sin(x)**2}
expr_2 = Dict({1/x: 1/y, x: sin(x)**2})
ascii_str = \
"""\
1 1 2 \n\
{-: -, x: sin (x)}\n\
x y \
"""
ucode_str = \
u("""\
โง1 1 2 โซ\n\
โจโ: โ, x: sin (x)โฌ\n\
โฉx y โญ\
""")
assert pretty(expr) == ascii_str
assert pretty(expr_2) == ascii_str
assert upretty(expr) == ucode_str
assert upretty(expr_2) == ucode_str
# There used to be a bug with pretty-printing sequences of even height.
expr = [x**2]
ascii_str = \
"""\
2 \n\
[x ]\
"""
ucode_str = \
u("""\
โก 2โค\n\
โฃx โฆ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (x**2,)
ascii_str = \
"""\
2 \n\
(x ,)\
"""
ucode_str = \
u("""\
โ 2 โ\n\
โx ,โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Tuple(x**2)
ascii_str = \
"""\
2 \n\
(x ,)\
"""
ucode_str = \
u("""\
โ 2 โ\n\
โx ,โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = {x**2: 1}
expr_2 = Dict({x**2: 1})
ascii_str = \
"""\
2 \n\
{x : 1}\
"""
ucode_str = \
u("""\
โง 2 โซ\n\
โจx : 1โฌ\n\
โฉ โญ\
""")
assert pretty(expr) == ascii_str
assert pretty(expr_2) == ascii_str
assert upretty(expr) == ucode_str
assert upretty(expr_2) == ucode_str
def test_any_object_in_sequence():
# Cf. issue 5306
b1 = Basic()
b2 = Basic(Basic())
expr = [b2, b1]
assert pretty(expr) == "[Basic(Basic()), Basic()]"
assert upretty(expr) == u"[Basic(Basic()), Basic()]"
expr = {b2, b1}
assert pretty(expr) == "{Basic(), Basic(Basic())}"
assert upretty(expr) == u"{Basic(), Basic(Basic())}"
expr = {b2: b1, b1: b2}
expr2 = Dict({b2: b1, b1: b2})
assert pretty(expr) == "{Basic(): Basic(Basic()), Basic(Basic()): Basic()}"
assert pretty(
expr2) == "{Basic(): Basic(Basic()), Basic(Basic()): Basic()}"
assert upretty(
expr) == u"{Basic(): Basic(Basic()), Basic(Basic()): Basic()}"
assert upretty(
expr2) == u"{Basic(): Basic(Basic()), Basic(Basic()): Basic()}"
def test_print_builtin_set():
assert pretty(set()) == 'set()'
assert upretty(set()) == u'set()'
assert pretty(frozenset()) == 'frozenset()'
assert upretty(frozenset()) == u'frozenset()'
s1 = {1/x, x}
s2 = frozenset(s1)
assert pretty(s1) == \
"""\
1 \n\
{-, x}
x \
"""
assert upretty(s1) == \
u"""\
โง1 โซ
โจโ, xโฌ
โฉx โญ\
"""
assert pretty(s2) == \
"""\
1 \n\
frozenset({-, x})
x \
"""
assert upretty(s2) == \
u"""\
โโง1 โซโ
frozensetโโจโ, xโฌโ
โโฉx โญโ \
"""
def test_pretty_sets():
s = FiniteSet
assert pretty(s(*[x*y, x**2])) == \
"""\
2 \n\
{x , x*y}\
"""
assert pretty(s(*range(1, 6))) == "{1, 2, 3, 4, 5}"
assert pretty(s(*range(1, 13))) == "{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}"
assert pretty(set([x*y, x**2])) == \
"""\
2 \n\
{x , x*y}\
"""
assert pretty(set(range(1, 6))) == "{1, 2, 3, 4, 5}"
assert pretty(set(range(1, 13))) == \
"{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}"
assert pretty(frozenset([x*y, x**2])) == \
"""\
2 \n\
frozenset({x , x*y})\
"""
assert pretty(frozenset(range(1, 6))) == "frozenset({1, 2, 3, 4, 5})"
assert pretty(frozenset(range(1, 13))) == \
"frozenset({1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12})"
assert pretty(Range(0, 3, 1)) == '{0, 1, 2}'
ascii_str = '{0, 1, ..., 29}'
ucode_str = u'{0, 1, โฆ, 29}'
assert pretty(Range(0, 30, 1)) == ascii_str
assert upretty(Range(0, 30, 1)) == ucode_str
ascii_str = '{30, 29, ..., 2}'
ucode_str = u('{30, 29, โฆ, 2}')
assert pretty(Range(30, 1, -1)) == ascii_str
assert upretty(Range(30, 1, -1)) == ucode_str
ascii_str = '{0, 2, ...}'
ucode_str = u'{0, 2, โฆ}'
assert pretty(Range(0, oo, 2)) == ascii_str
assert upretty(Range(0, oo, 2)) == ucode_str
ascii_str = '{..., 2, 0}'
ucode_str = u('{โฆ, 2, 0}')
assert pretty(Range(oo, -2, -2)) == ascii_str
assert upretty(Range(oo, -2, -2)) == ucode_str
ascii_str = '{-2, -3, ...}'
ucode_str = u('{-2, -3, โฆ}')
assert pretty(Range(-2, -oo, -1)) == ascii_str
assert upretty(Range(-2, -oo, -1)) == ucode_str
def test_pretty_SetExpr():
iv = Interval(1, 3)
se = SetExpr(iv)
ascii_str = "SetExpr([1, 3])"
ucode_str = u("SetExpr([1, 3])")
assert pretty(se) == ascii_str
assert upretty(se) == ucode_str
def test_pretty_ImageSet():
imgset = ImageSet(Lambda((x, y), x + y), {1, 2, 3}, {3, 4})
ascii_str = '{x + y | x in {1, 2, 3} , y in {3, 4}}'
ucode_str = u('{x + y | x โ {1, 2, 3} , y โ {3, 4}}')
assert pretty(imgset) == ascii_str
assert upretty(imgset) == ucode_str
imgset = ImageSet(Lambda(((x, y),), x + y), ProductSet({1, 2, 3}, {3, 4}))
ascii_str = '{x + y | (x, y) in {1, 2, 3} x {3, 4}}'
ucode_str = u('{x + y | (x, y) โ {1, 2, 3} ร {3, 4}}')
assert pretty(imgset) == ascii_str
assert upretty(imgset) == ucode_str
imgset = ImageSet(Lambda(x, x**2), S.Naturals)
ascii_str = \
' 2 \n'\
'{x | x in Naturals}'
ucode_str = u('''\
โง 2 โซ\n\
โจx | x โ โโฌ\n\
โฉ โญ''')
assert pretty(imgset) == ascii_str
assert upretty(imgset) == ucode_str
def test_pretty_ConditionSet():
from sympy import ConditionSet
ascii_str = '{x | x in (-oo, oo) and sin(x) = 0}'
ucode_str = u'{x | x โ โ โง (sin(x) = 0)}'
assert pretty(ConditionSet(x, Eq(sin(x), 0), S.Reals)) == ascii_str
assert upretty(ConditionSet(x, Eq(sin(x), 0), S.Reals)) == ucode_str
assert pretty(ConditionSet(x, Contains(x, S.Reals, evaluate=False), FiniteSet(1))) == '{1}'
assert upretty(ConditionSet(x, Contains(x, S.Reals, evaluate=False), FiniteSet(1))) == u'{1}'
assert pretty(ConditionSet(x, And(x > 1, x < -1), FiniteSet(1, 2, 3))) == "EmptySet"
assert upretty(ConditionSet(x, And(x > 1, x < -1), FiniteSet(1, 2, 3))) == u"โ
"
assert pretty(ConditionSet(x, Or(x > 1, x < -1), FiniteSet(1, 2))) == '{2}'
assert upretty(ConditionSet(x, Or(x > 1, x < -1), FiniteSet(1, 2))) == u'{2}'
def test_pretty_ComplexRegion():
from sympy import ComplexRegion
ucode_str = u'{x + yโ
โ
| x, y โ [3, 5] ร [4, 6]}'
assert upretty(ComplexRegion(Interval(3, 5)*Interval(4, 6))) == ucode_str
ucode_str = u'{rโ
(โ
โ
sin(ฮธ) + cos(ฮธ)) | r, ฮธ โ [0, 1] ร [0, 2โ
ฯ)}'
assert upretty(ComplexRegion(Interval(0, 1)*Interval(0, 2*pi), polar=True)) == ucode_str
def test_pretty_Union_issue_10414():
a, b = Interval(2, 3), Interval(4, 7)
ucode_str = u'[2, 3] โช [4, 7]'
ascii_str = '[2, 3] U [4, 7]'
assert upretty(Union(a, b)) == ucode_str
assert pretty(Union(a, b)) == ascii_str
def test_pretty_Intersection_issue_10414():
x, y, z, w = symbols('x, y, z, w')
a, b = Interval(x, y), Interval(z, w)
ucode_str = u'[x, y] โฉ [z, w]'
ascii_str = '[x, y] n [z, w]'
assert upretty(Intersection(a, b)) == ucode_str
assert pretty(Intersection(a, b)) == ascii_str
def test_ProductSet_exponent():
ucode_str = ' 1\n[0, 1] '
assert upretty(Interval(0, 1)**1) == ucode_str
ucode_str = ' 2\n[0, 1] '
assert upretty(Interval(0, 1)**2) == ucode_str
def test_ProductSet_parenthesis():
ucode_str = u'([4, 7] ร {1, 2}) โช ([2, 3] ร [4, 7])'
a, b = Interval(2, 3), Interval(4, 7)
assert upretty(Union(a*b, b*FiniteSet(1, 2))) == ucode_str
def test_ProductSet_prod_char_issue_10413():
ascii_str = '[2, 3] x [4, 7]'
ucode_str = u'[2, 3] ร [4, 7]'
a, b = Interval(2, 3), Interval(4, 7)
assert pretty(a*b) == ascii_str
assert upretty(a*b) == ucode_str
def test_pretty_sequences():
s1 = SeqFormula(a**2, (0, oo))
s2 = SeqPer((1, 2))
ascii_str = '[0, 1, 4, 9, ...]'
ucode_str = u'[0, 1, 4, 9, โฆ]'
assert pretty(s1) == ascii_str
assert upretty(s1) == ucode_str
ascii_str = '[1, 2, 1, 2, ...]'
ucode_str = u'[1, 2, 1, 2, โฆ]'
assert pretty(s2) == ascii_str
assert upretty(s2) == ucode_str
s3 = SeqFormula(a**2, (0, 2))
s4 = SeqPer((1, 2), (0, 2))
ascii_str = '[0, 1, 4]'
ucode_str = u'[0, 1, 4]'
assert pretty(s3) == ascii_str
assert upretty(s3) == ucode_str
ascii_str = '[1, 2, 1]'
ucode_str = u'[1, 2, 1]'
assert pretty(s4) == ascii_str
assert upretty(s4) == ucode_str
s5 = SeqFormula(a**2, (-oo, 0))
s6 = SeqPer((1, 2), (-oo, 0))
ascii_str = '[..., 9, 4, 1, 0]'
ucode_str = u'[โฆ, 9, 4, 1, 0]'
assert pretty(s5) == ascii_str
assert upretty(s5) == ucode_str
ascii_str = '[..., 2, 1, 2, 1]'
ucode_str = u'[โฆ, 2, 1, 2, 1]'
assert pretty(s6) == ascii_str
assert upretty(s6) == ucode_str
ascii_str = '[1, 3, 5, 11, ...]'
ucode_str = u'[1, 3, 5, 11, โฆ]'
assert pretty(SeqAdd(s1, s2)) == ascii_str
assert upretty(SeqAdd(s1, s2)) == ucode_str
ascii_str = '[1, 3, 5]'
ucode_str = u'[1, 3, 5]'
assert pretty(SeqAdd(s3, s4)) == ascii_str
assert upretty(SeqAdd(s3, s4)) == ucode_str
ascii_str = '[..., 11, 5, 3, 1]'
ucode_str = u'[โฆ, 11, 5, 3, 1]'
assert pretty(SeqAdd(s5, s6)) == ascii_str
assert upretty(SeqAdd(s5, s6)) == ucode_str
ascii_str = '[0, 2, 4, 18, ...]'
ucode_str = u'[0, 2, 4, 18, โฆ]'
assert pretty(SeqMul(s1, s2)) == ascii_str
assert upretty(SeqMul(s1, s2)) == ucode_str
ascii_str = '[0, 2, 4]'
ucode_str = u'[0, 2, 4]'
assert pretty(SeqMul(s3, s4)) == ascii_str
assert upretty(SeqMul(s3, s4)) == ucode_str
ascii_str = '[..., 18, 4, 2, 0]'
ucode_str = u'[โฆ, 18, 4, 2, 0]'
assert pretty(SeqMul(s5, s6)) == ascii_str
assert upretty(SeqMul(s5, s6)) == ucode_str
# Sequences with symbolic limits, issue 12629
s7 = SeqFormula(a**2, (a, 0, x))
raises(NotImplementedError, lambda: pretty(s7))
raises(NotImplementedError, lambda: upretty(s7))
b = Symbol('b')
s8 = SeqFormula(b*a**2, (a, 0, 2))
ascii_str = u'[0, b, 4*b]'
ucode_str = u'[0, b, 4โ
b]'
assert pretty(s8) == ascii_str
assert upretty(s8) == ucode_str
def test_pretty_FourierSeries():
f = fourier_series(x, (x, -pi, pi))
ascii_str = \
"""\
2*sin(3*x) \n\
2*sin(x) - sin(2*x) + ---------- + ...\n\
3 \
"""
ucode_str = \
u("""\
2โ
sin(3โ
x) \n\
2โ
sin(x) - sin(2โ
x) + โโโโโโโโโโ + โฆ\n\
3 \
""")
assert pretty(f) == ascii_str
assert upretty(f) == ucode_str
def test_pretty_FormalPowerSeries():
f = fps(log(1 + x))
ascii_str = \
"""\
oo \n\
____ \n\
\\ ` \n\
\\ -k k \n\
\\ -(-1) *x \n\
/ -----------\n\
/ k \n\
/___, \n\
k = 1 \
"""
ucode_str = \
u("""\
โ \n\
____ \n\
โฒ \n\
โฒ -k k \n\
โฒ -(-1) โ
x \n\
โฑ โโโโโโโโโโโ\n\
โฑ k \n\
โฑ \n\
โพโพโพโพ \n\
k = 1 \
""")
assert pretty(f) == ascii_str
assert upretty(f) == ucode_str
def test_pretty_limits():
expr = Limit(x, x, oo)
ascii_str = \
"""\
lim x\n\
x->oo \
"""
ucode_str = \
u("""\
lim x\n\
xโโโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(x**2, x, 0)
ascii_str = \
"""\
2\n\
lim x \n\
x->0+ \
"""
ucode_str = \
u("""\
2\n\
lim x \n\
xโโ0โบ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(1/x, x, 0)
ascii_str = \
"""\
1\n\
lim -\n\
x->0+x\
"""
ucode_str = \
u("""\
1\n\
lim โ\n\
xโโ0โบx\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(sin(x)/x, x, 0)
ascii_str = \
"""\
/sin(x)\\\n\
lim |------|\n\
x->0+\\ x /\
"""
ucode_str = \
u("""\
โsin(x)โ\n\
lim โโโโโโโโ\n\
xโโ0โบโ x โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(sin(x)/x, x, 0, "-")
ascii_str = \
"""\
/sin(x)\\\n\
lim |------|\n\
x->0-\\ x /\
"""
ucode_str = \
u("""\
โsin(x)โ\n\
lim โโโโโโโโ\n\
xโโ0โปโ x โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(x + sin(x), x, 0)
ascii_str = \
"""\
lim (x + sin(x))\n\
x->0+ \
"""
ucode_str = \
u("""\
lim (x + sin(x))\n\
xโโ0โบ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(x, x, 0)**2
ascii_str = \
"""\
2\n\
/ lim x\\ \n\
\\x->0+ / \
"""
ucode_str = \
u("""\
2\n\
โ lim xโ \n\
โxโโ0โบ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(x*Limit(y/2,y,0), x, 0)
ascii_str = \
"""\
/ /y\\\\\n\
lim |x* lim |-||\n\
x->0+\\ y->0+\\2//\
"""
ucode_str = \
u("""\
โ โyโโ\n\
lim โxโ
lim โโโโ\n\
xโโ0โบโ yโโ0โบโ2โ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = 2*Limit(x*Limit(y/2,y,0), x, 0)
ascii_str = \
"""\
/ /y\\\\\n\
2* lim |x* lim |-||\n\
x->0+\\ y->0+\\2//\
"""
ucode_str = \
u("""\
โ โyโโ\n\
2โ
lim โxโ
lim โโโโ\n\
xโโ0โบโ yโโ0โบโ2โ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Limit(sin(x), x, 0, dir='+-')
ascii_str = \
"""\
lim sin(x)\n\
x->0 \
"""
ucode_str = \
u("""\
lim sin(x)\n\
xโโ0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_ComplexRootOf():
expr = rootof(x**5 + 11*x - 2, 0)
ascii_str = \
"""\
/ 5 \\\n\
CRootOf\\x + 11*x - 2, 0/\
"""
ucode_str = \
u("""\
โ 5 โ\n\
CRootOfโx + 11โ
x - 2, 0โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_RootSum():
expr = RootSum(x**5 + 11*x - 2, auto=False)
ascii_str = \
"""\
/ 5 \\\n\
RootSum\\x + 11*x - 2/\
"""
ucode_str = \
u("""\
โ 5 โ\n\
RootSumโx + 11โ
x - 2โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = RootSum(x**5 + 11*x - 2, Lambda(z, exp(z)))
ascii_str = \
"""\
/ 5 z\\\n\
RootSum\\x + 11*x - 2, z -> e /\
"""
ucode_str = \
u("""\
โ 5 zโ\n\
RootSumโx + 11โ
x - 2, z โฆ โฏ โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_GroebnerBasis():
expr = groebner([], x, y)
ascii_str = \
"""\
GroebnerBasis([], x, y, domain=ZZ, order=lex)\
"""
ucode_str = \
u("""\
GroebnerBasis([], x, y, domain=โค, order=lex)\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1]
expr = groebner(F, x, y, order='grlex')
ascii_str = \
"""\
/[ 2 2 ] \\\n\
GroebnerBasis\\[x - x - 3*y + 1, y - 2*x + y - 1], x, y, domain=ZZ, order=grlex/\
"""
ucode_str = \
u("""\
โโก 2 2 โค โ\n\
GroebnerBasisโโฃx - x - 3โ
y + 1, y - 2โ
x + y - 1โฆ, x, y, domain=โค, order=grlexโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = expr.fglm('lex')
ascii_str = \
"""\
/[ 2 4 3 2 ] \\\n\
GroebnerBasis\\[2*x - y - y + 1, y + 2*y - 3*y - 16*y + 7], x, y, domain=ZZ, order=lex/\
"""
ucode_str = \
u("""\
โโก 2 4 3 2 โค โ\n\
GroebnerBasisโโฃ2โ
x - y - y + 1, y + 2โ
y - 3โ
y - 16โ
y + 7โฆ, x, y, domain=โค, order=lexโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_UniversalSet():
assert pretty(S.UniversalSet) == "UniversalSet"
assert upretty(S.UniversalSet) == u'๐'
def test_pretty_Boolean():
expr = Not(x, evaluate=False)
assert pretty(expr) == "Not(x)"
assert upretty(expr) == u"ยฌx"
expr = And(x, y)
assert pretty(expr) == "And(x, y)"
assert upretty(expr) == u"x โง y"
expr = Or(x, y)
assert pretty(expr) == "Or(x, y)"
assert upretty(expr) == u"x โจ y"
syms = symbols('a:f')
expr = And(*syms)
assert pretty(expr) == "And(a, b, c, d, e, f)"
assert upretty(expr) == u"a โง b โง c โง d โง e โง f"
expr = Or(*syms)
assert pretty(expr) == "Or(a, b, c, d, e, f)"
assert upretty(expr) == u"a โจ b โจ c โจ d โจ e โจ f"
expr = Xor(x, y, evaluate=False)
assert pretty(expr) == "Xor(x, y)"
assert upretty(expr) == u"x โป y"
expr = Nand(x, y, evaluate=False)
assert pretty(expr) == "Nand(x, y)"
assert upretty(expr) == u"x โผ y"
expr = Nor(x, y, evaluate=False)
assert pretty(expr) == "Nor(x, y)"
assert upretty(expr) == u"x โฝ y"
expr = Implies(x, y, evaluate=False)
assert pretty(expr) == "Implies(x, y)"
assert upretty(expr) == u"x โ y"
# don't sort args
expr = Implies(y, x, evaluate=False)
assert pretty(expr) == "Implies(y, x)"
assert upretty(expr) == u"y โ x"
expr = Equivalent(x, y, evaluate=False)
assert pretty(expr) == "Equivalent(x, y)"
assert upretty(expr) == u"x โ y"
expr = Equivalent(y, x, evaluate=False)
assert pretty(expr) == "Equivalent(x, y)"
assert upretty(expr) == u"x โ y"
def test_pretty_Domain():
expr = FF(23)
assert pretty(expr) == "GF(23)"
assert upretty(expr) == u"โคโโ"
expr = ZZ
assert pretty(expr) == "ZZ"
assert upretty(expr) == u"โค"
expr = QQ
assert pretty(expr) == "QQ"
assert upretty(expr) == u"โ"
expr = RR
assert pretty(expr) == "RR"
assert upretty(expr) == u"โ"
expr = QQ[x]
assert pretty(expr) == "QQ[x]"
assert upretty(expr) == u"โ[x]"
expr = QQ[x, y]
assert pretty(expr) == "QQ[x, y]"
assert upretty(expr) == u"โ[x, y]"
expr = ZZ.frac_field(x)
assert pretty(expr) == "ZZ(x)"
assert upretty(expr) == u"โค(x)"
expr = ZZ.frac_field(x, y)
assert pretty(expr) == "ZZ(x, y)"
assert upretty(expr) == u"โค(x, y)"
expr = QQ.poly_ring(x, y, order=grlex)
assert pretty(expr) == "QQ[x, y, order=grlex]"
assert upretty(expr) == u"โ[x, y, order=grlex]"
expr = QQ.poly_ring(x, y, order=ilex)
assert pretty(expr) == "QQ[x, y, order=ilex]"
assert upretty(expr) == u"โ[x, y, order=ilex]"
def test_pretty_prec():
assert xpretty(S("0.3"), full_prec=True, wrap_line=False) == "0.300000000000000"
assert xpretty(S("0.3"), full_prec="auto", wrap_line=False) == "0.300000000000000"
assert xpretty(S("0.3"), full_prec=False, wrap_line=False) == "0.3"
assert xpretty(S("0.3")*x, full_prec=True, use_unicode=False, wrap_line=False) in [
"0.300000000000000*x",
"x*0.300000000000000"
]
assert xpretty(S("0.3")*x, full_prec="auto", use_unicode=False, wrap_line=False) in [
"0.3*x",
"x*0.3"
]
assert xpretty(S("0.3")*x, full_prec=False, use_unicode=False, wrap_line=False) in [
"0.3*x",
"x*0.3"
]
def test_pprint():
import sys
from sympy.core.compatibility import StringIO
fd = StringIO()
sso = sys.stdout
sys.stdout = fd
try:
pprint(pi, use_unicode=False, wrap_line=False)
finally:
sys.stdout = sso
assert fd.getvalue() == 'pi\n'
def test_pretty_class():
"""Test that the printer dispatcher correctly handles classes."""
class C:
pass # C has no .__class__ and this was causing problems
class D(object):
pass
assert pretty( C ) == str( C )
assert pretty( D ) == str( D )
def test_pretty_no_wrap_line():
huge_expr = 0
for i in range(20):
huge_expr += i*sin(i + x)
assert xpretty(huge_expr ).find('\n') != -1
assert xpretty(huge_expr, wrap_line=False).find('\n') == -1
def test_settings():
raises(TypeError, lambda: pretty(S(4), method="garbage"))
def test_pretty_sum():
from sympy.abc import x, a, b, k, m, n
expr = Sum(k**k, (k, 0, n))
ascii_str = \
"""\
n \n\
___ \n\
\\ ` \n\
\\ k\n\
/ k \n\
/__, \n\
k = 0 \
"""
ucode_str = \
u("""\
n \n\
___ \n\
โฒ \n\
โฒ k\n\
โฑ k \n\
โฑ \n\
โพโพโพ \n\
k = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(k**k, (k, oo, n))
ascii_str = \
"""\
n \n\
___ \n\
\\ ` \n\
\\ k\n\
/ k \n\
/__, \n\
k = oo \
"""
ucode_str = \
u("""\
n \n\
___ \n\
โฒ \n\
โฒ k\n\
โฑ k \n\
โฑ \n\
โพโพโพ \n\
k = โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(k**(Integral(x**n, (x, -oo, oo))), (k, 0, n**n))
ascii_str = \
"""\
n \n\
n \n\
______ \n\
\\ ` \n\
\\ oo \n\
\\ / \n\
\\ | \n\
\\ | n \n\
) | x dx\n\
/ | \n\
/ / \n\
/ -oo \n\
/ k \n\
/_____, \n\
k = 0 \
"""
ucode_str = \
u("""\
n \n\
n \n\
______ \n\
โฒ \n\
โฒ \n\
โฒ โ \n\
โฒ โ \n\
โฒ โฎ n \n\
โฑ โฎ x dx\n\
โฑ โก \n\
โฑ -โ \n\
โฑ k \n\
โฑ \n\
โพโพโพโพโพโพ \n\
k = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(k**(
Integral(x**n, (x, -oo, oo))), (k, 0, Integral(x**x, (x, -oo, oo))))
ascii_str = \
"""\
oo \n\
/ \n\
| \n\
| x \n\
| x dx \n\
| \n\
/ \n\
-oo \n\
______ \n\
\\ ` \n\
\\ oo \n\
\\ / \n\
\\ | \n\
\\ | n \n\
) | x dx\n\
/ | \n\
/ / \n\
/ -oo \n\
/ k \n\
/_____, \n\
k = 0 \
"""
ucode_str = \
u("""\
โ \n\
โ \n\
โฎ x \n\
โฎ x dx \n\
โก \n\
-โ \n\
______ \n\
โฒ \n\
โฒ \n\
โฒ โ \n\
โฒ โ \n\
โฒ โฎ n \n\
โฑ โฎ x dx\n\
โฑ โก \n\
โฑ -โ \n\
โฑ k \n\
โฑ \n\
โพโพโพโพโพโพ \n\
k = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(k**(Integral(x**n, (x, -oo, oo))), (
k, x + n + x**2 + n**2 + (x/n) + (1/x), Integral(x**x, (x, -oo, oo))))
ascii_str = \
"""\
oo \n\
/ \n\
| \n\
| x \n\
| x dx \n\
| \n\
/ \n\
-oo \n\
______ \n\
\\ ` \n\
\\ oo \n\
\\ / \n\
\\ | \n\
\\ | n \n\
) | x dx\n\
/ | \n\
/ / \n\
/ -oo \n\
/ k \n\
/_____, \n\
2 2 1 x \n\
k = n + n + x + x + - + - \n\
x n \
"""
ucode_str = \
u("""\
โ \n\
โ \n\
โฎ x \n\
โฎ x dx \n\
โก \n\
-โ \n\
______ \n\
โฒ \n\
โฒ \n\
โฒ โ \n\
โฒ โ \n\
โฒ โฎ n \n\
โฑ โฎ x dx\n\
โฑ โก \n\
โฑ -โ \n\
โฑ k \n\
โฑ \n\
โพโพโพโพโพโพ \n\
2 2 1 x \n\
k = n + n + x + x + โ + โ \n\
x n \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(k**(
Integral(x**n, (x, -oo, oo))), (k, 0, x + n + x**2 + n**2 + (x/n) + (1/x)))
ascii_str = \
"""\
2 2 1 x \n\
n + n + x + x + - + - \n\
x n \n\
______ \n\
\\ ` \n\
\\ oo \n\
\\ / \n\
\\ | \n\
\\ | n \n\
) | x dx\n\
/ | \n\
/ / \n\
/ -oo \n\
/ k \n\
/_____, \n\
k = 0 \
"""
ucode_str = \
u("""\
2 2 1 x \n\
n + n + x + x + โ + โ \n\
x n \n\
______ \n\
โฒ \n\
โฒ \n\
โฒ โ \n\
โฒ โ \n\
โฒ โฎ n \n\
โฑ โฎ x dx\n\
โฑ โก \n\
โฑ -โ \n\
โฑ k \n\
โฑ \n\
โพโพโพโพโพโพ \n\
k = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(x, (x, 0, oo))
ascii_str = \
"""\
oo \n\
__ \n\
\\ ` \n\
) x\n\
/_, \n\
x = 0 \
"""
ucode_str = \
u("""\
โ \n\
___ \n\
โฒ \n\
โฒ \n\
โฑ x\n\
โฑ \n\
โพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(x**2, (x, 0, oo))
ascii_str = \
u("""\
oo \n\
___ \n\
\\ ` \n\
\\ 2\n\
/ x \n\
/__, \n\
x = 0 \
""")
ucode_str = \
u("""\
โ \n\
___ \n\
โฒ \n\
โฒ 2\n\
โฑ x \n\
โฑ \n\
โพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(x/2, (x, 0, oo))
ascii_str = \
"""\
oo \n\
___ \n\
\\ ` \n\
\\ x\n\
) -\n\
/ 2\n\
/__, \n\
x = 0 \
"""
ucode_str = \
u("""\
โ \n\
____ \n\
โฒ \n\
โฒ \n\
โฒ x\n\
โฑ โ\n\
โฑ 2\n\
โฑ \n\
โพโพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(x**3/2, (x, 0, oo))
ascii_str = \
"""\
oo \n\
____ \n\
\\ ` \n\
\\ 3\n\
\\ x \n\
/ --\n\
/ 2 \n\
/___, \n\
x = 0 \
"""
ucode_str = \
u("""\
โ \n\
____ \n\
โฒ \n\
โฒ 3\n\
โฒ x \n\
โฑ โโ\n\
โฑ 2 \n\
โฑ \n\
โพโพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum((x**3*y**(x/2))**n, (x, 0, oo))
ascii_str = \
"""\
oo \n\
____ \n\
\\ ` \n\
\\ n\n\
\\ / x\\ \n\
) | -| \n\
/ | 3 2| \n\
/ \\x *y / \n\
/___, \n\
x = 0 \
"""
ucode_str = \
u("""\
โ \n\
_____ \n\
โฒ \n\
โฒ \n\
โฒ n\n\
โฒ โ xโ \n\
โฑ โ โโ \n\
โฑ โ 3 2โ \n\
โฑ โx โ
y โ \n\
โฑ \n\
โพโพโพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(1/x**2, (x, 0, oo))
ascii_str = \
"""\
oo \n\
____ \n\
\\ ` \n\
\\ 1 \n\
\\ --\n\
/ 2\n\
/ x \n\
/___, \n\
x = 0 \
"""
ucode_str = \
u("""\
โ \n\
____ \n\
โฒ \n\
โฒ 1 \n\
โฒ โโ\n\
โฑ 2\n\
โฑ x \n\
โฑ \n\
โพโพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(1/y**(a/b), (x, 0, oo))
ascii_str = \
"""\
oo \n\
____ \n\
\\ ` \n\
\\ -a \n\
\\ ---\n\
/ b \n\
/ y \n\
/___, \n\
x = 0 \
"""
ucode_str = \
u("""\
โ \n\
____ \n\
โฒ \n\
โฒ -a \n\
โฒ โโโ\n\
โฑ b \n\
โฑ y \n\
โฑ \n\
โพโพโพโพ \n\
x = 0 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Sum(1/y**(a/b), (x, 0, oo), (y, 1, 2))
ascii_str = \
"""\
2 oo \n\
____ ____ \n\
\\ ` \\ ` \n\
\\ \\ -a\n\
\\ \\ --\n\
/ / b \n\
/ / y \n\
/___, /___, \n\
y = 1 x = 0 \
"""
ucode_str = \
u("""\
2 โ \n\
____ ____ \n\
โฒ โฒ \n\
โฒ โฒ -a\n\
โฒ โฒ โโ\n\
โฑ โฑ b \n\
โฑ โฑ y \n\
โฑ โฑ \n\
โพโพโพโพ โพโพโพโพ \n\
y = 1 x = 0 \
""")
expr = Sum(1/(1 + 1/(
1 + 1/k)) + 1, (k, 111, 1 + 1/n), (k, 1/(1 + m), oo)) + 1/(1 + 1/k)
ascii_str = \
"""\
1 \n\
1 + - \n\
oo n \n\
_____ _____ \n\
\\ ` \\ ` \n\
\\ \\ / 1 \\ \n\
\\ \\ |1 + ---------| \n\
\\ \\ | 1 | 1 \n\
) ) | 1 + -----| + -----\n\
/ / | 1| 1\n\
/ / | 1 + -| 1 + -\n\
/ / \\ k/ k\n\
/____, /____, \n\
1 k = 111 \n\
k = ----- \n\
m + 1 \
"""
ucode_str = \
u("""\
1 \n\
1 + โ \n\
โ n \n\
______ ______ \n\
โฒ โฒ \n\
โฒ โฒ \n\
โฒ โฒ โ 1 โ \n\
โฒ โฒ โ1 + โโโโโโโโโโ \n\
โฒ โฒ โ 1 โ 1 \n\
โฑ โฑ โ 1 + โโโโโโ + โโโโโ\n\
โฑ โฑ โ 1โ 1\n\
โฑ โฑ โ 1 + โโ 1 + โ\n\
โฑ โฑ โ kโ k\n\
โฑ โฑ \n\
โพโพโพโพโพโพ โพโพโพโพโพโพ \n\
1 k = 111 \n\
k = โโโโโ \n\
m + 1 \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_units():
expr = joule
ascii_str1 = \
"""\
2\n\
kilogram*meter \n\
---------------\n\
2 \n\
second \
"""
unicode_str1 = \
u("""\
2\n\
kilogramโ
meter \n\
โโโโโโโโโโโโโโโ\n\
2 \n\
second \
""")
ascii_str2 = \
"""\
2\n\
3*x*y*kilogram*meter \n\
---------------------\n\
2 \n\
second \
"""
unicode_str2 = \
u("""\
2\n\
3โ
xโ
yโ
kilogramโ
meter \n\
โโโโโโโโโโโโโโโโโโโโโ\n\
2 \n\
second \
""")
from sympy.physics.units import kg, m, s
assert upretty(expr) == u("joule")
assert pretty(expr) == "joule"
assert upretty(expr.convert_to(kg*m**2/s**2)) == unicode_str1
assert pretty(expr.convert_to(kg*m**2/s**2)) == ascii_str1
assert upretty(3*kg*x*m**2*y/s**2) == unicode_str2
assert pretty(3*kg*x*m**2*y/s**2) == ascii_str2
def test_pretty_Subs():
f = Function('f')
expr = Subs(f(x), x, ph**2)
ascii_str = \
"""\
(f(x))| 2\n\
|x=phi \
"""
unicode_str = \
u("""\
(f(x))โ 2\n\
โx=ฯ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
expr = Subs(f(x).diff(x), x, 0)
ascii_str = \
"""\
/d \\| \n\
|--(f(x))|| \n\
\\dx /|x=0\
"""
unicode_str = \
u("""\
โd โโ \n\
โโโ(f(x))โโ \n\
โdx โ โx=0\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
expr = Subs(f(x).diff(x)/y, (x, y), (0, Rational(1, 2)))
ascii_str = \
"""\
/d \\| \n\
|--(f(x))|| \n\
|dx || \n\
|--------|| \n\
\\ y /|x=0, y=1/2\
"""
unicode_str = \
u("""\
โd โโ \n\
โโโ(f(x))โโ \n\
โdx โโ \n\
โโโโโโโโโโโ \n\
โ y โ โx=0, y=1/2\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == unicode_str
def test_gammas():
assert upretty(lowergamma(x, y)) == u"ฮณ(x, y)"
assert upretty(uppergamma(x, y)) == u"ฮ(x, y)"
assert xpretty(gamma(x), use_unicode=True) == u'ฮ(x)'
assert xpretty(gamma, use_unicode=True) == u'ฮ'
assert xpretty(symbols('gamma', cls=Function)(x), use_unicode=True) == u'ฮณ(x)'
assert xpretty(symbols('gamma', cls=Function), use_unicode=True) == u'ฮณ'
def test_beta():
assert xpretty(beta(x,y), use_unicode=True) == u'ฮ(x, y)'
assert xpretty(beta(x,y), use_unicode=False) == u'B(x, y)'
assert xpretty(beta, use_unicode=True) == u'ฮ'
assert xpretty(beta, use_unicode=False) == u'B'
mybeta = Function('beta')
assert xpretty(mybeta(x), use_unicode=True) == u'ฮฒ(x)'
assert xpretty(mybeta(x, y, z), use_unicode=False) == u'beta(x, y, z)'
assert xpretty(mybeta, use_unicode=True) == u'ฮฒ'
# test that notation passes to subclasses of the same name only
def test_function_subclass_different_name():
class mygamma(gamma):
pass
assert xpretty(mygamma, use_unicode=True) == r"mygamma"
assert xpretty(mygamma(x), use_unicode=True) == r"mygamma(x)"
def test_SingularityFunction():
assert xpretty(SingularityFunction(x, 0, n), use_unicode=True) == (
"""\
n\n\
<x> \
""")
assert xpretty(SingularityFunction(x, 1, n), use_unicode=True) == (
"""\
n\n\
<x - 1> \
""")
assert xpretty(SingularityFunction(x, -1, n), use_unicode=True) == (
"""\
n\n\
<x + 1> \
""")
assert xpretty(SingularityFunction(x, a, n), use_unicode=True) == (
"""\
n\n\
<-a + x> \
""")
assert xpretty(SingularityFunction(x, y, n), use_unicode=True) == (
"""\
n\n\
<x - y> \
""")
assert xpretty(SingularityFunction(x, 0, n), use_unicode=False) == (
"""\
n\n\
<x> \
""")
assert xpretty(SingularityFunction(x, 1, n), use_unicode=False) == (
"""\
n\n\
<x - 1> \
""")
assert xpretty(SingularityFunction(x, -1, n), use_unicode=False) == (
"""\
n\n\
<x + 1> \
""")
assert xpretty(SingularityFunction(x, a, n), use_unicode=False) == (
"""\
n\n\
<-a + x> \
""")
assert xpretty(SingularityFunction(x, y, n), use_unicode=False) == (
"""\
n\n\
<x - y> \
""")
def test_deltas():
assert xpretty(DiracDelta(x), use_unicode=True) == u'ฮด(x)'
assert xpretty(DiracDelta(x, 1), use_unicode=True) == \
u("""\
(1) \n\
ฮด (x)\
""")
assert xpretty(x*DiracDelta(x, 1), use_unicode=True) == \
u("""\
(1) \n\
xโ
ฮด (x)\
""")
def test_hyper():
expr = hyper((), (), z)
ucode_str = \
u("""\
โโ โ โ โ\n\
โโ โ โ zโ\n\
0โต 0 โ โ โ \
""")
ascii_str = \
"""\
_ \n\
|_ / | \\\n\
| | | z|\n\
0 0 \\ | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hyper((), (1,), x)
ucode_str = \
u("""\
โโ โ โ โ\n\
โโ โ โ xโ\n\
0โต 1 โ1 โ โ \
""")
ascii_str = \
"""\
_ \n\
|_ / | \\\n\
| | | x|\n\
0 1 \\1 | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hyper([2], [1], x)
ucode_str = \
u("""\
โโ โ2 โ โ\n\
โโ โ โ xโ\n\
1โต 1 โ1 โ โ \
""")
ascii_str = \
"""\
_ \n\
|_ /2 | \\\n\
| | | x|\n\
1 1 \\1 | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hyper((pi/3, -2*k), (3, 4, 5, -3), x)
ucode_str = \
u("""\
โ ฯ โ โ\n\
โโ โ โ, -2โ
k โ โ\n\
โโ โ 3 โ xโ\n\
2โต 4 โ โ โ\n\
โ3, 4, 5, -3 โ โ \
""")
ascii_str = \
"""\
\n\
_ / pi | \\\n\
|_ | --, -2*k | |\n\
| | 3 | x|\n\
2 4 | | |\n\
\\3, 4, 5, -3 | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hyper((pi, S('2/3'), -2*k), (3, 4, 5, -3), x**2)
ucode_str = \
u("""\
โโ โฯ, 2/3, -2โ
k โ 2โ\n\
โโ โ โ x โ\n\
3โต 4 โ3, 4, 5, -3 โ โ \
""")
ascii_str = \
"""\
_ \n\
|_ /pi, 2/3, -2*k | 2\\\n\
| | | x |\n\
3 4 \\ 3, 4, 5, -3 | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hyper([1, 2], [3, 4], 1/(1/(1/(1/x + 1) + 1) + 1))
ucode_str = \
u("""\
โ โ 1 โ\n\
โ โ โโโโโโโโโโโโโโ\n\
โ โ 1 โ\n\
โโ โ1, 2 โ 1 + โโโโโโโโโโ\n\
โโ โ โ 1 โ\n\
2โต 2 โ3, 4 โ 1 + โโโโโโ\n\
โ โ 1โ\n\
โ โ 1 + โโ\n\
โ โ xโ \
""")
ascii_str = \
"""\
\n\
/ | 1 \\\n\
| | -------------|\n\
_ | | 1 |\n\
|_ |1, 2 | 1 + ---------|\n\
| | | 1 |\n\
2 2 |3, 4 | 1 + -----|\n\
| | 1|\n\
| | 1 + -|\n\
\\ | x/\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_meijerg():
expr = meijerg([pi, pi, x], [1], [0, 1], [1, 2, 3], z)
ucode_str = \
u("""\
โญโโฎ2, 3 โฯ, ฯ, x 1 โ โ\n\
โโถโ โ โ zโ\n\
โฐโโฏ4, 5 โ 0, 1 1, 2, 3 โ โ \
""")
ascii_str = \
"""\
__2, 3 /pi, pi, x 1 | \\\n\
/__ | | z|\n\
\\_|4, 5 \\ 0, 1 1, 2, 3 | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = meijerg([1, pi/7], [2, pi, 5], [], [], z**2)
ucode_str = \
u("""\
โ ฯ โ โ\n\
โญโโฎ0, 2 โ1, โ 2, ฯ, 5 โ 2โ\n\
โโถโ โ 7 โ z โ\n\
โฐโโฏ5, 0 โ โ โ\n\
โ โ โ \
""")
ascii_str = \
"""\
/ pi | \\\n\
__0, 2 |1, -- 2, pi, 5 | 2|\n\
/__ | 7 | z |\n\
\\_|5, 0 | | |\n\
\\ | /\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
ucode_str = \
u("""\
โญโโฎ 1, 10 โ1, 1, 1, 1, 1, 1, 1, 1, 1, 1 1 โ โ\n\
โโถโ โ โ zโ\n\
โฐโโฏ11, 2 โ 1 1 โ โ \
""")
ascii_str = \
"""\
__ 1, 10 /1, 1, 1, 1, 1, 1, 1, 1, 1, 1 1 | \\\n\
/__ | | z|\n\
\\_|11, 2 \\ 1 1 | /\
"""
expr = meijerg([1]*10, [1], [1], [1], z)
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = meijerg([1, 2, ], [4, 3], [3], [4, 5], 1/(1/(1/(1/x + 1) + 1) + 1))
ucode_str = \
u("""\
โ โ 1 โ\n\
โ โ โโโโโโโโโโโโโโ\n\
โ โ 1 โ\n\
โญโโฎ1, 2 โ1, 2 4, 3 โ 1 + โโโโโโโโโโ\n\
โโถโ โ โ 1 โ\n\
โฐโโฏ4, 3 โ 3 4, 5 โ 1 + โโโโโโ\n\
โ โ 1โ\n\
โ โ 1 + โโ\n\
โ โ xโ \
""")
ascii_str = \
"""\
/ | 1 \\\n\
| | -------------|\n\
| | 1 |\n\
__1, 2 |1, 2 4, 3 | 1 + ---------|\n\
/__ | | 1 |\n\
\\_|4, 3 | 3 4, 5 | 1 + -----|\n\
| | 1|\n\
| | 1 + -|\n\
\\ | x/\
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = Integral(expr, x)
ucode_str = \
u("""\
โ \n\
โฎ โ โ 1 โ \n\
โฎ โ โ โโโโโโโโโโโโโโ \n\
โฎ โ โ 1 โ \n\
โฎ โญโโฎ1, 2 โ1, 2 4, 3 โ 1 + โโโโโโโโโโ \n\
โฎ โโถโ โ โ 1 โ dx\n\
โฎ โฐโโฏ4, 3 โ 3 4, 5 โ 1 + โโโโโโ \n\
โฎ โ โ 1โ \n\
โฎ โ โ 1 + โโ \n\
โฎ โ โ xโ \n\
โก \
""")
ascii_str = \
"""\
/ \n\
| \n\
| / | 1 \\ \n\
| | | -------------| \n\
| | | 1 | \n\
| __1, 2 |1, 2 4, 3 | 1 + ---------| \n\
| /__ | | 1 | dx\n\
| \\_|4, 3 | 3 4, 5 | 1 + -----| \n\
| | | 1| \n\
| | | 1 + -| \n\
| \\ | x/ \n\
| \n\
/ \
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_noncommutative():
A, B, C = symbols('A,B,C', commutative=False)
expr = A*B*C**-1
ascii_str = \
"""\
-1\n\
A*B*C \
"""
ucode_str = \
u("""\
-1\n\
Aโ
Bโ
C \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = C**-1*A*B
ascii_str = \
"""\
-1 \n\
C *A*B\
"""
ucode_str = \
u("""\
-1 \n\
C โ
Aโ
B\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A*C**-1*B
ascii_str = \
"""\
-1 \n\
A*C *B\
"""
ucode_str = \
u("""\
-1 \n\
Aโ
C โ
B\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A*C**-1*B/x
ascii_str = \
"""\
-1 \n\
A*C *B\n\
-------\n\
x \
"""
ucode_str = \
u("""\
-1 \n\
Aโ
C โ
B\n\
โโโโโโโ\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_special_functions():
x, y = symbols("x y")
# atan2
expr = atan2(y/sqrt(200), sqrt(x))
ascii_str = \
"""\
/ ___ \\\n\
|\\/ 2 *y ___|\n\
atan2|-------, \\/ x |\n\
\\ 20 /\
"""
ucode_str = \
u("""\
โโ2โ
y โ\n\
atan2โโโโโ, โxโ\n\
โ 20 โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_geometry():
e = Segment((0, 1), (0, 2))
assert pretty(e) == 'Segment2D(Point2D(0, 1), Point2D(0, 2))'
e = Ray((1, 1), angle=4.02*pi)
assert pretty(e) == 'Ray2D(Point2D(1, 1), Point2D(2, tan(pi/50) + 1))'
def test_expint():
expr = Ei(x)
string = 'Ei(x)'
assert pretty(expr) == string
assert upretty(expr) == string
expr = expint(1, z)
ucode_str = u"Eโ(z)"
ascii_str = "expint(1, z)"
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
assert pretty(Shi(x)) == 'Shi(x)'
assert pretty(Si(x)) == 'Si(x)'
assert pretty(Ci(x)) == 'Ci(x)'
assert pretty(Chi(x)) == 'Chi(x)'
assert upretty(Shi(x)) == 'Shi(x)'
assert upretty(Si(x)) == 'Si(x)'
assert upretty(Ci(x)) == 'Ci(x)'
assert upretty(Chi(x)) == 'Chi(x)'
def test_elliptic_functions():
ascii_str = \
"""\
/ 1 \\\n\
K|-----|\n\
\\z + 1/\
"""
ucode_str = \
u("""\
โ 1 โ\n\
Kโโโโโโโ\n\
โz + 1โ \
""")
expr = elliptic_k(1/(z + 1))
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
ascii_str = \
"""\
/ | 1 \\\n\
F|1|-----|\n\
\\ |z + 1/\
"""
ucode_str = \
u("""\
โ โ 1 โ\n\
Fโ1โโโโโโโ\n\
โ โz + 1โ \
""")
expr = elliptic_f(1, 1/(1 + z))
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
ascii_str = \
"""\
/ 1 \\\n\
E|-----|\n\
\\z + 1/\
"""
ucode_str = \
u("""\
โ 1 โ\n\
Eโโโโโโโ\n\
โz + 1โ \
""")
expr = elliptic_e(1/(z + 1))
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
ascii_str = \
"""\
/ | 1 \\\n\
E|1|-----|\n\
\\ |z + 1/\
"""
ucode_str = \
u("""\
โ โ 1 โ\n\
Eโ1โโโโโโโ\n\
โ โz + 1โ \
""")
expr = elliptic_e(1, 1/(1 + z))
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
ascii_str = \
"""\
/ |4\\\n\
Pi|3|-|\n\
\\ |x/\
"""
ucode_str = \
u("""\
โ โ4โ\n\
ฮ โ3โโโ\n\
โ โxโ \
""")
expr = elliptic_pi(3, 4/x)
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
ascii_str = \
"""\
/ 4| \\\n\
Pi|3; -|6|\n\
\\ x| /\
"""
ucode_str = \
u("""\
โ 4โ โ\n\
ฮ โ3; โโ6โ\n\
โ xโ โ \
""")
expr = elliptic_pi(3, 4/x, 6)
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_RandomDomain():
from sympy.stats import Normal, Die, Exponential, pspace, where
X = Normal('x1', 0, 1)
assert upretty(where(X > 0)) == u"Domain: 0 < xโ โง xโ < โ"
D = Die('d1', 6)
assert upretty(where(D > 4)) == u'Domain: dโ = 5 โจ dโ = 6'
A = Exponential('a', 1)
B = Exponential('b', 1)
assert upretty(pspace(Tuple(A, B)).domain) == \
u'Domain: 0 โค a โง 0 โค b โง a < โ โง b < โ'
def test_PrettyPoly():
F = QQ.frac_field(x, y)
R = QQ.poly_ring(x, y)
expr = F.convert(x/(x + y))
assert pretty(expr) == "x/(x + y)"
assert upretty(expr) == u"x/(x + y)"
expr = R.convert(x + y)
assert pretty(expr) == "x + y"
assert upretty(expr) == u"x + y"
def test_issue_6285():
assert pretty(Pow(2, -5, evaluate=False)) == '1 \n--\n 5\n2 '
assert pretty(Pow(x, (1/pi))) == 'pi___\n\\/ x '
def test_issue_6359():
assert pretty(Integral(x**2, x)**2) == \
"""\
2
/ / \\ \n\
| | | \n\
| | 2 | \n\
| | x dx| \n\
| | | \n\
\\/ / \
"""
assert upretty(Integral(x**2, x)**2) == \
u("""\
2
โโ โ \n\
โโฎ 2 โ \n\
โโฎ x dxโ \n\
โโก โ \
""")
assert pretty(Sum(x**2, (x, 0, 1))**2) == \
"""\
2
/ 1 \\ \n\
| ___ | \n\
| \\ ` | \n\
| \\ 2| \n\
| / x | \n\
| /__, | \n\
\\x = 0 / \
"""
assert upretty(Sum(x**2, (x, 0, 1))**2) == \
u("""\
2
โ 1 โ \n\
โ ___ โ \n\
โ โฒ โ \n\
โ โฒ 2โ \n\
โ โฑ x โ \n\
โ โฑ โ \n\
โ โพโพโพ โ \n\
โx = 0 โ \
""")
assert pretty(Product(x**2, (x, 1, 2))**2) == \
"""\
2
/ 2 \\ \n\
|______ | \n\
| | | 2| \n\
| | | x | \n\
| | | | \n\
\\x = 1 / \
"""
assert upretty(Product(x**2, (x, 1, 2))**2) == \
u("""\
2
โ 2 โ \n\
โโโฌโโโฌโ โ \n\
โ โ โ 2โ \n\
โ โ โ x โ \n\
โ โ โ โ \n\
โx = 1 โ \
""")
f = Function('f')
assert pretty(Derivative(f(x), x)**2) == \
"""\
2
/d \\ \n\
|--(f(x))| \n\
\\dx / \
"""
assert upretty(Derivative(f(x), x)**2) == \
u("""\
2
โd โ \n\
โโโ(f(x))โ \n\
โdx โ \
""")
def test_issue_6739():
ascii_str = \
"""\
1 \n\
-----\n\
___\n\
\\/ x \
"""
ucode_str = \
u("""\
1 \n\
โโ\n\
โx\
""")
assert pretty(1/sqrt(x)) == ascii_str
assert upretty(1/sqrt(x)) == ucode_str
def test_complicated_symbol_unchanged():
for symb_name in ["dexpr2_d1tau", "dexpr2^d1tau"]:
assert pretty(Symbol(symb_name)) == symb_name
def test_categories():
from sympy.categories import (Object, IdentityMorphism,
NamedMorphism, Category, Diagram, DiagramGrid)
A1 = Object("A1")
A2 = Object("A2")
A3 = Object("A3")
f1 = NamedMorphism(A1, A2, "f1")
f2 = NamedMorphism(A2, A3, "f2")
id_A1 = IdentityMorphism(A1)
K1 = Category("K1")
assert pretty(A1) == "A1"
assert upretty(A1) == u"Aโ"
assert pretty(f1) == "f1:A1-->A2"
assert upretty(f1) == u"fโ:AโโโโถAโ"
assert pretty(id_A1) == "id:A1-->A1"
assert upretty(id_A1) == u"id:AโโโโถAโ"
assert pretty(f2*f1) == "f2*f1:A1-->A3"
assert upretty(f2*f1) == u"fโโfโ:AโโโโถAโ"
assert pretty(K1) == "K1"
assert upretty(K1) == u"Kโ"
# Test how diagrams are printed.
d = Diagram()
assert pretty(d) == "EmptySet"
assert upretty(d) == u"โ
"
d = Diagram({f1: "unique", f2: S.EmptySet})
assert pretty(d) == "{f2*f1:A1-->A3: EmptySet, id:A1-->A1: " \
"EmptySet, id:A2-->A2: EmptySet, id:A3-->A3: " \
"EmptySet, f1:A1-->A2: {unique}, f2:A2-->A3: EmptySet}"
assert upretty(d) == u("{fโโfโ:AโโโโถAโ: โ
, id:AโโโโถAโ: โ
, " \
"id:AโโโโถAโ: โ
, id:AโโโโถAโ: โ
, fโ:AโโโโถAโ: {unique}, fโ:AโโโโถAโ: โ
}")
d = Diagram({f1: "unique", f2: S.EmptySet}, {f2 * f1: "unique"})
assert pretty(d) == "{f2*f1:A1-->A3: EmptySet, id:A1-->A1: " \
"EmptySet, id:A2-->A2: EmptySet, id:A3-->A3: " \
"EmptySet, f1:A1-->A2: {unique}, f2:A2-->A3: EmptySet}" \
" ==> {f2*f1:A1-->A3: {unique}}"
assert upretty(d) == u("{fโโfโ:AโโโโถAโ: โ
, id:AโโโโถAโ: โ
, id:AโโโโถAโ: " \
"โ
, id:AโโโโถAโ: โ
, fโ:AโโโโถAโ: {unique}, fโ:AโโโโถAโ: โ
}" \
" โโโถ {fโโfโ:AโโโโถAโ: {unique}}")
grid = DiagramGrid(d)
assert pretty(grid) == "A1 A2\n \nA3 "
assert upretty(grid) == u"Aโ Aโ\n \nAโ "
def test_PrettyModules():
R = QQ.old_poly_ring(x, y)
F = R.free_module(2)
M = F.submodule([x, y], [1, x**2])
ucode_str = \
u("""\
2\n\
โ[x, y] \
""")
ascii_str = \
"""\
2\n\
QQ[x, y] \
"""
assert upretty(F) == ucode_str
assert pretty(F) == ascii_str
ucode_str = \
u("""\
โฑ โก 2โคโฒ\n\
โฒ[x, y], โฃ1, x โฆโฑ\
""")
ascii_str = \
"""\
2 \n\
<[x, y], [1, x ]>\
"""
assert upretty(M) == ucode_str
assert pretty(M) == ascii_str
I = R.ideal(x**2, y)
ucode_str = \
u("""\
โฑ 2 โฒ\n\
โฒx , yโฑ\
""")
ascii_str = \
"""\
2 \n\
<x , y>\
"""
assert upretty(I) == ucode_str
assert pretty(I) == ascii_str
Q = F / M
ucode_str = \
u("""\
2 \n\
โ[x, y] \n\
โโโโโโโโโโโโโโโโโ\n\
โฑ โก 2โคโฒ\n\
โฒ[x, y], โฃ1, x โฆโฑ\
""")
ascii_str = \
"""\
2 \n\
QQ[x, y] \n\
-----------------\n\
2 \n\
<[x, y], [1, x ]>\
"""
assert upretty(Q) == ucode_str
assert pretty(Q) == ascii_str
ucode_str = \
u("""\
โฑโก 3โค โฒ\n\
โโข x โฅ โฑ โก 2โคโฒ โฑ โก 2โคโฒโ\n\
โโข1, โโโฅ + โฒ[x, y], โฃ1, x โฆโฑ, [2, y] + โฒ[x, y], โฃ1, x โฆโฑโ\n\
โฒโฃ 2 โฆ โฑ\
""")
ascii_str = \
"""\
3 \n\
x 2 2 \n\
<[1, --] + <[x, y], [1, x ]>, [2, y] + <[x, y], [1, x ]>>\n\
2 \
"""
def test_QuotientRing():
R = QQ.old_poly_ring(x)/[x**2 + 1]
ucode_str = \
u("""\
โ[x] \n\
โโโโโโโโ\n\
โฑ 2 โฒ\n\
โฒx + 1โฑ\
""")
ascii_str = \
"""\
QQ[x] \n\
--------\n\
2 \n\
<x + 1>\
"""
assert upretty(R) == ucode_str
assert pretty(R) == ascii_str
ucode_str = \
u("""\
โฑ 2 โฒ\n\
1 + โฒx + 1โฑ\
""")
ascii_str = \
"""\
2 \n\
1 + <x + 1>\
"""
assert upretty(R.one) == ucode_str
assert pretty(R.one) == ascii_str
def test_Homomorphism():
from sympy.polys.agca import homomorphism
R = QQ.old_poly_ring(x)
expr = homomorphism(R.free_module(1), R.free_module(1), [0])
ucode_str = \
u("""\
1 1\n\
[0] : โ[x] โโ> โ[x] \
""")
ascii_str = \
"""\
1 1\n\
[0] : QQ[x] --> QQ[x] \
"""
assert upretty(expr) == ucode_str
assert pretty(expr) == ascii_str
expr = homomorphism(R.free_module(2), R.free_module(2), [0, 0])
ucode_str = \
u("""\
โก0 0โค 2 2\n\
โข โฅ : โ[x] โโ> โ[x] \n\
โฃ0 0โฆ \
""")
ascii_str = \
"""\
[0 0] 2 2\n\
[ ] : QQ[x] --> QQ[x] \n\
[0 0] \
"""
assert upretty(expr) == ucode_str
assert pretty(expr) == ascii_str
expr = homomorphism(R.free_module(1), R.free_module(1) / [[x]], [0])
ucode_str = \
u("""\
1\n\
1 โ[x] \n\
[0] : โ[x] โโ> โโโโโ\n\
<[x]>\
""")
ascii_str = \
"""\
1\n\
1 QQ[x] \n\
[0] : QQ[x] --> ------\n\
<[x]> \
"""
assert upretty(expr) == ucode_str
assert pretty(expr) == ascii_str
def test_Tr():
A, B = symbols('A B', commutative=False)
t = Tr(A*B)
assert pretty(t) == r'Tr(A*B)'
assert upretty(t) == u'Tr(Aโ
B)'
def test_pretty_Add():
eq = Mul(-2, x - 2, evaluate=False) + 5
assert pretty(eq) == '5 - 2*(x - 2)'
def test_issue_7179():
assert upretty(Not(Equivalent(x, y))) == u'x โ y'
assert upretty(Not(Implies(x, y))) == u'x โ y'
def test_issue_7180():
assert upretty(Equivalent(x, y)) == u'x โ y'
def test_pretty_Complement():
assert pretty(S.Reals - S.Naturals) == '(-oo, oo) \\ Naturals'
assert upretty(S.Reals - S.Naturals) == u'โ \\ โ'
assert pretty(S.Reals - S.Naturals0) == '(-oo, oo) \\ Naturals0'
assert upretty(S.Reals - S.Naturals0) == u'โ \\ โโ'
def test_pretty_SymmetricDifference():
from sympy import SymmetricDifference, Interval
from sympy.testing.pytest import raises
assert upretty(SymmetricDifference(Interval(2,3), Interval(3,5), \
evaluate = False)) == u'[2, 3] โ [3, 5]'
with raises(NotImplementedError):
pretty(SymmetricDifference(Interval(2,3), Interval(3,5), evaluate = False))
def test_pretty_Contains():
assert pretty(Contains(x, S.Integers)) == 'Contains(x, Integers)'
assert upretty(Contains(x, S.Integers)) == u'x โ โค'
def test_issue_8292():
from sympy.core import sympify
e = sympify('((x+x**4)/(x-1))-(2*(x-1)**4/(x-1)**4)', evaluate=False)
ucode_str = \
u("""\
4 4 \n\
2โ
(x - 1) x + x\n\
- โโโโโโโโโโ + โโโโโโ\n\
4 x - 1 \n\
(x - 1) \
""")
ascii_str = \
"""\
4 4 \n\
2*(x - 1) x + x\n\
- ---------- + ------\n\
4 x - 1 \n\
(x - 1) \
"""
assert pretty(e) == ascii_str
assert upretty(e) == ucode_str
def test_issue_4335():
y = Function('y')
expr = -y(x).diff(x)
ucode_str = \
u("""\
d \n\
-โโ(y(x))\n\
dx \
""")
ascii_str = \
"""\
d \n\
- --(y(x))\n\
dx \
"""
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_issue_8344():
from sympy.core import sympify
e = sympify('2*x*y**2/1**2 + 1', evaluate=False)
ucode_str = \
u("""\
2 \n\
2โ
xโ
y \n\
โโโโโโ + 1\n\
2 \n\
1 \
""")
assert upretty(e) == ucode_str
def test_issue_6324():
x = Pow(2, 3, evaluate=False)
y = Pow(10, -2, evaluate=False)
e = Mul(x, y, evaluate=False)
ucode_str = \
u("""\
3\n\
2 \n\
โโโ\n\
2\n\
10 \
""")
assert upretty(e) == ucode_str
def test_issue_7927():
e = sin(x/2)**cos(x/2)
ucode_str = \
u("""\
โxโ\n\
cosโโโ\n\
โ2โ \n\
โ โxโโ \n\
โsinโโโโ \n\
โ โ2โ โ \
""")
assert upretty(e) == ucode_str
e = sin(x)**(S(11)/13)
ucode_str = \
u("""\
11\n\
โโ\n\
13\n\
(sin(x)) \
""")
assert upretty(e) == ucode_str
def test_issue_6134():
from sympy.abc import lamda, t
phi = Function('phi')
e = lamda*x*Integral(phi(t)*pi*sin(pi*t), (t, 0, 1)) + lamda*x**2*Integral(phi(t)*2*pi*sin(2*pi*t), (t, 0, 1))
ucode_str = \
u("""\
1 1 \n\
2 โ โ \n\
ฮปโ
x โ
โฎ 2โ
ฯโ
ฯ(t)โ
sin(2โ
ฯโ
t) dt + ฮปโ
xโ
โฎ ฯโ
ฯ(t)โ
sin(ฯโ
t) dt\n\
โก โก \n\
0 0 \
""")
assert upretty(e) == ucode_str
def test_issue_9877():
ucode_str1 = u'(2, 3) โช ([1, 2] \\ {x})'
a, b, c = Interval(2, 3, True, True), Interval(1, 2), FiniteSet(x)
assert upretty(Union(a, Complement(b, c))) == ucode_str1
ucode_str2 = u'{x} โฉ {y} โฉ ({z} \\ [1, 2])'
d, e, f, g = FiniteSet(x), FiniteSet(y), FiniteSet(z), Interval(1, 2)
assert upretty(Intersection(d, e, Complement(f, g))) == ucode_str2
def test_issue_13651():
expr1 = c + Mul(-1, a + b, evaluate=False)
assert pretty(expr1) == 'c - (a + b)'
expr2 = c + Mul(-1, a - b + d, evaluate=False)
assert pretty(expr2) == 'c - (a - b + d)'
def test_pretty_primenu():
from sympy.ntheory.factor_ import primenu
ascii_str1 = "nu(n)"
ucode_str1 = u("ฮฝ(n)")
n = symbols('n', integer=True)
assert pretty(primenu(n)) == ascii_str1
assert upretty(primenu(n)) == ucode_str1
def test_pretty_primeomega():
from sympy.ntheory.factor_ import primeomega
ascii_str1 = "Omega(n)"
ucode_str1 = u("ฮฉ(n)")
n = symbols('n', integer=True)
assert pretty(primeomega(n)) == ascii_str1
assert upretty(primeomega(n)) == ucode_str1
def test_pretty_Mod():
from sympy.core import Mod
ascii_str1 = "x mod 7"
ucode_str1 = u("x mod 7")
ascii_str2 = "(x + 1) mod 7"
ucode_str2 = u("(x + 1) mod 7")
ascii_str3 = "2*x mod 7"
ucode_str3 = u("2โ
x mod 7")
ascii_str4 = "(x mod 7) + 1"
ucode_str4 = u("(x mod 7) + 1")
ascii_str5 = "2*(x mod 7)"
ucode_str5 = u("2โ
(x mod 7)")
x = symbols('x', integer=True)
assert pretty(Mod(x, 7)) == ascii_str1
assert upretty(Mod(x, 7)) == ucode_str1
assert pretty(Mod(x + 1, 7)) == ascii_str2
assert upretty(Mod(x + 1, 7)) == ucode_str2
assert pretty(Mod(2 * x, 7)) == ascii_str3
assert upretty(Mod(2 * x, 7)) == ucode_str3
assert pretty(Mod(x, 7) + 1) == ascii_str4
assert upretty(Mod(x, 7) + 1) == ucode_str4
assert pretty(2 * Mod(x, 7)) == ascii_str5
assert upretty(2 * Mod(x, 7)) == ucode_str5
def test_issue_11801():
assert pretty(Symbol("")) == ""
assert upretty(Symbol("")) == ""
def test_pretty_UnevaluatedExpr():
x = symbols('x')
he = UnevaluatedExpr(1/x)
ucode_str = \
u("""\
1\n\
โ\n\
x\
""")
assert upretty(he) == ucode_str
ucode_str = \
u("""\
2\n\
โ1โ \n\
โโโ \n\
โxโ \
""")
assert upretty(he**2) == ucode_str
ucode_str = \
u("""\
1\n\
1 + โ\n\
x\
""")
assert upretty(he + 1) == ucode_str
ucode_str = \
u('''\
1\n\
xโ
โ\n\
x\
''')
assert upretty(x*he) == ucode_str
def test_issue_10472():
M = (Matrix([[0, 0], [0, 0]]), Matrix([0, 0]))
ucode_str = \
u("""\
โโก0 0โค โก0โคโ
โโข โฅ, โข โฅโ
โโฃ0 0โฆ โฃ0โฆโ \
""")
assert upretty(M) == ucode_str
def test_MatrixElement_printing():
# test cases for issue #11821
A = MatrixSymbol("A", 1, 3)
B = MatrixSymbol("B", 1, 3)
C = MatrixSymbol("C", 1, 3)
ascii_str1 = "A_00"
ucode_str1 = u("Aโโ")
assert pretty(A[0, 0]) == ascii_str1
assert upretty(A[0, 0]) == ucode_str1
ascii_str1 = "3*A_00"
ucode_str1 = u("3โ
Aโโ")
assert pretty(3*A[0, 0]) == ascii_str1
assert upretty(3*A[0, 0]) == ucode_str1
ascii_str1 = "(-B + A)[0, 0]"
ucode_str1 = u("(-B + A)[0, 0]")
F = C[0, 0].subs(C, A - B)
assert pretty(F) == ascii_str1
assert upretty(F) == ucode_str1
def test_issue_12675():
from sympy.vector import CoordSys3D
x, y, t, j = symbols('x y t j')
e = CoordSys3D('e')
ucode_str = \
u("""\
โ tโ \n\
โโxโ โ j_e\n\
โโโโ โ \n\
โโyโ โ \
""")
assert upretty((x/y)**t*e.j) == ucode_str
ucode_str = \
u("""\
โ1โ \n\
โโโ j_e\n\
โyโ \
""")
assert upretty((1/y)*e.j) == ucode_str
def test_MatrixSymbol_printing():
# test cases for issue #14237
A = MatrixSymbol("A", 3, 3)
B = MatrixSymbol("B", 3, 3)
C = MatrixSymbol("C", 3, 3)
assert pretty(-A*B*C) == "-A*B*C"
assert pretty(A - B) == "-B + A"
assert pretty(A*B*C - A*B - B*C) == "-A*B -B*C + A*B*C"
# issue #14814
x = MatrixSymbol('x', n, n)
y = MatrixSymbol('y*', n, n)
assert pretty(x + y) == "x + y*"
ascii_str = \
"""\
2 \n\
-2*y* -a*x\
"""
assert pretty(-a*x + -2*y*y) == ascii_str
def test_degree_printing():
expr1 = 90*degree
assert pretty(expr1) == u'90ยฐ'
expr2 = x*degree
assert pretty(expr2) == u'xยฐ'
expr3 = cos(x*degree + 90*degree)
assert pretty(expr3) == u'cos(xยฐ + 90ยฐ)'
def test_vector_expr_pretty_printing():
A = CoordSys3D('A')
assert upretty(Cross(A.i, A.x*A.i+3*A.y*A.j)) == u("(i_A)ร((x_A) i_A + (3โ
y_A) j_A)")
assert upretty(x*Cross(A.i, A.j)) == u('xโ
(i_A)ร(j_A)')
assert upretty(Curl(A.x*A.i + 3*A.y*A.j)) == u("โร((x_A) i_A + (3โ
y_A) j_A)")
assert upretty(Divergence(A.x*A.i + 3*A.y*A.j)) == u("โโ
((x_A) i_A + (3โ
y_A) j_A)")
assert upretty(Dot(A.i, A.x*A.i+3*A.y*A.j)) == u("(i_A)โ
((x_A) i_A + (3โ
y_A) j_A)")
assert upretty(Gradient(A.x+3*A.y)) == u("โ(x_A + 3โ
y_A)")
assert upretty(Laplacian(A.x+3*A.y)) == u("โ(x_A + 3โ
y_A)")
# TODO: add support for ASCII pretty.
def test_pretty_print_tensor_expr():
L = TensorIndexType("L")
i, j, k = tensor_indices("i j k", L)
i0 = tensor_indices("i_0", L)
A, B, C, D = tensor_heads("A B C D", [L])
H = TensorHead("H", [L, L])
expr = -i
ascii_str = \
"""\
-i\
"""
ucode_str = \
u("""\
-i\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A(i)
ascii_str = \
"""\
i\n\
A \n\
\
"""
ucode_str = \
u("""\
i\n\
A \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A(i0)
ascii_str = \
"""\
i_0\n\
A \n\
\
"""
ucode_str = \
u("""\
iโ\n\
A \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A(-i)
ascii_str = \
"""\
\n\
A \n\
i\
"""
ucode_str = \
u("""\
\n\
A \n\
i\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = -3*A(-i)
ascii_str = \
"""\
\n\
-3*A \n\
i\
"""
ucode_str = \
u("""\
\n\
-3โ
A \n\
i\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = H(i, -j)
ascii_str = \
"""\
i \n\
H \n\
j\
"""
ucode_str = \
u("""\
i \n\
H \n\
j\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = H(i, -i)
ascii_str = \
"""\
L_0 \n\
H \n\
L_0\
"""
ucode_str = \
u("""\
Lโ \n\
H \n\
Lโ\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = H(i, -j)*A(j)*B(k)
ascii_str = \
"""\
i L_0 k\n\
H *A *B \n\
L_0 \
"""
ucode_str = \
u("""\
i Lโ k\n\
H โ
A โ
B \n\
Lโ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (1+x)*A(i)
ascii_str = \
"""\
i\n\
(x + 1)*A \n\
\
"""
ucode_str = \
u("""\
i\n\
(x + 1)โ
A \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A(i) + 3*B(i)
ascii_str = \
"""\
i i\n\
3*B + A \n\
\
"""
ucode_str = \
u("""\
i i\n\
3โ
B + A \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_pretty_print_tensor_partial_deriv():
from sympy.tensor.toperators import PartialDerivative
from sympy.tensor.tensor import TensorIndexType, tensor_indices, TensorHead, tensor_heads
L = TensorIndexType("L")
i, j, k = tensor_indices("i j k", L)
A, B, C, D = tensor_heads("A B C D", [L])
H = TensorHead("H", [L, L])
expr = PartialDerivative(A(i), A(j))
ascii_str = \
"""\
d / i\\\n\
---|A |\n\
j\\ /\n\
dA \n\
\
"""
ucode_str = \
u("""\
โ โ iโ\n\
โโโโA โ\n\
jโ โ \n\
โA \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A(i)*PartialDerivative(H(k, -i), A(j))
ascii_str = \
"""\
L_0 d / k \\\n\
A *---|H |\n\
j\\ L_0/\n\
dA \n\
\
"""
ucode_str = \
u("""\
Lโ โ โ k โ\n\
A โ
โโโโH โ\n\
jโ Lโโ \n\
โA \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = A(i)*PartialDerivative(B(k)*C(-i) + 3*H(k, -i), A(j))
ascii_str = \
"""\
L_0 d / k k \\\n\
A *---|3*H + B *C |\n\
j\\ L_0 L_0/\n\
dA \n\
\
"""
ucode_str = \
u("""\
Lโ โ โ k k โ\n\
A โ
โโโโ3โ
H + B โ
C โ\n\
jโ Lโ Lโโ \n\
โA \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (A(i) + B(i))*PartialDerivative(C(j), D(j))
ascii_str = \
"""\
/ i i\\ d / L_0\\\n\
|A + B |*-----|C |\n\
\\ / L_0\\ /\n\
dD \n\
\
"""
ucode_str = \
u("""\
โ i iโ โ โ Lโโ\n\
โA + B โโ
โโโโโC โ\n\
โ โ Lโโ โ \n\
โD \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = (A(i) + B(i))*PartialDerivative(C(-i), D(j))
ascii_str = \
"""\
/ L_0 L_0\\ d / \\\n\
|A + B |*---|C |\n\
\\ / j\\ L_0/\n\
dD \n\
\
"""
ucode_str = \
u("""\
โ Lโ Lโโ โ โ โ\n\
โA + B โโ
โโโโC โ\n\
โ โ jโ Lโโ \n\
โD \n\
\
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = PartialDerivative(B(-i) + A(-i), A(-j), A(-n))
ucode_str = u("""\
2 \n\
โ โ โ\n\
โโโโโโโโA + B โ\n\
โ i iโ \n\
โA โA \n\
n j \
""")
assert upretty(expr) == ucode_str
expr = PartialDerivative(3*A(-i), A(-j), A(-n))
ucode_str = u("""\
2 \n\
โ โ โ\n\
โโโโโโโโ3โ
A โ\n\
โ iโ \n\
โA โA \n\
n j \
""")
assert upretty(expr) == ucode_str
expr = TensorElement(H(i, j), {i:1})
ascii_str = \
"""\
i=1,j\n\
H \n\
\
"""
ucode_str = ascii_str
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = TensorElement(H(i, j), {i: 1, j: 1})
ascii_str = \
"""\
i=1,j=1\n\
H \n\
\
"""
ucode_str = ascii_str
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = TensorElement(H(i, j), {j: 1})
ascii_str = \
"""\
i,j=1\n\
H \n\
\
"""
ucode_str = ascii_str
expr = TensorElement(H(-i, j), {-i: 1})
ascii_str = \
"""\
j\n\
H \n\
i=1 \
"""
ucode_str = ascii_str
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_issue_15560():
a = MatrixSymbol('a', 1, 1)
e = pretty(a*(KroneckerProduct(a, a)))
result = 'a*(a x a)'
assert e == result
def test_print_lerchphi():
# Part of issue 6013
a = Symbol('a')
pretty(lerchphi(a, 1, 2))
uresult = u'ฮฆ(a, 1, 2)'
aresult = 'lerchphi(a, 1, 2)'
assert pretty(lerchphi(a, 1, 2)) == aresult
assert upretty(lerchphi(a, 1, 2)) == uresult
def test_issue_15583():
N = mechanics.ReferenceFrame('N')
result = '(n_x, n_y, n_z)'
e = pretty((N.x, N.y, N.z))
assert e == result
def test_matrixSymbolBold():
# Issue 15871
def boldpretty(expr):
return xpretty(expr, use_unicode=True, wrap_line=False, mat_symbol_style="bold")
from sympy import trace
A = MatrixSymbol("A", 2, 2)
assert boldpretty(trace(A)) == u'tr(๐)'
A = MatrixSymbol("A", 3, 3)
B = MatrixSymbol("B", 3, 3)
C = MatrixSymbol("C", 3, 3)
assert boldpretty(-A) == u'-๐'
assert boldpretty(A - A*B - B) == u'-๐ -๐โ
๐ + ๐'
assert boldpretty(-A*B - A*B*C - B) == u'-๐ -๐โ
๐ -๐โ
๐โ
๐'
A = MatrixSymbol("Addot", 3, 3)
assert boldpretty(A) == u'๐ฬ'
omega = MatrixSymbol("omega", 3, 3)
assert boldpretty(omega) == u'ฯ'
omega = MatrixSymbol("omeganorm", 3, 3)
assert boldpretty(omega) == u'โฯโ'
a = Symbol('alpha')
b = Symbol('b')
c = MatrixSymbol("c", 3, 1)
d = MatrixSymbol("d", 3, 1)
assert boldpretty(a*B*c+b*d) == u'bโ
๐ + ฮฑโ
๐โ
๐'
d = MatrixSymbol("delta", 3, 1)
B = MatrixSymbol("Beta", 3, 3)
assert boldpretty(a*B*c+b*d) == u'bโ
ฮด + ฮฑโ
ฮโ
๐'
A = MatrixSymbol("A_2", 3, 3)
assert boldpretty(A) == u'๐โ'
def test_center_accent():
assert center_accent('a', u'\N{COMBINING TILDE}') == u'aฬ'
assert center_accent('aa', u'\N{COMBINING TILDE}') == u'aaฬ'
assert center_accent('aaa', u'\N{COMBINING TILDE}') == u'aaฬa'
assert center_accent('aaaa', u'\N{COMBINING TILDE}') == u'aaaฬa'
assert center_accent('aaaaa', u'\N{COMBINING TILDE}') == u'aaaฬaa'
assert center_accent('abcdefg', u'\N{COMBINING FOUR DOTS ABOVE}') == u'abcdโefg'
def test_imaginary_unit():
from sympy import pretty # As it is redefined above
assert pretty(1 + I, use_unicode=False) == '1 + I'
assert pretty(1 + I, use_unicode=True) == u'1 + โ
'
assert pretty(1 + I, use_unicode=False, imaginary_unit='j') == '1 + I'
assert pretty(1 + I, use_unicode=True, imaginary_unit='j') == u'1 + โ
'
raises(TypeError, lambda: pretty(I, imaginary_unit=I))
raises(ValueError, lambda: pretty(I, imaginary_unit="kkk"))
def test_str_special_matrices():
from sympy.matrices import Identity, ZeroMatrix, OneMatrix
assert pretty(Identity(4)) == 'I'
assert upretty(Identity(4)) == u'๐'
assert pretty(ZeroMatrix(2, 2)) == '0'
assert upretty(ZeroMatrix(2, 2)) == u'๐'
assert pretty(OneMatrix(2, 2)) == '1'
assert upretty(OneMatrix(2, 2)) == u'๐'
def test_pretty_misc_functions():
assert pretty(LambertW(x)) == 'W(x)'
assert upretty(LambertW(x)) == u'W(x)'
assert pretty(LambertW(x, y)) == 'W(x, y)'
assert upretty(LambertW(x, y)) == u'W(x, y)'
assert pretty(airyai(x)) == 'Ai(x)'
assert upretty(airyai(x)) == u'Ai(x)'
assert pretty(airybi(x)) == 'Bi(x)'
assert upretty(airybi(x)) == u'Bi(x)'
assert pretty(airyaiprime(x)) == "Ai'(x)"
assert upretty(airyaiprime(x)) == u"Ai'(x)"
assert pretty(airybiprime(x)) == "Bi'(x)"
assert upretty(airybiprime(x)) == u"Bi'(x)"
assert pretty(fresnelc(x)) == 'C(x)'
assert upretty(fresnelc(x)) == u'C(x)'
assert pretty(fresnels(x)) == 'S(x)'
assert upretty(fresnels(x)) == u'S(x)'
assert pretty(Heaviside(x)) == 'Heaviside(x)'
assert upretty(Heaviside(x)) == u'ฮธ(x)'
assert pretty(Heaviside(x, y)) == 'Heaviside(x, y)'
assert upretty(Heaviside(x, y)) == u'ฮธ(x, y)'
assert pretty(dirichlet_eta(x)) == 'dirichlet_eta(x)'
assert upretty(dirichlet_eta(x)) == u'ฮท(x)'
def test_hadamard_power():
m, n, p = symbols('m, n, p', integer=True)
A = MatrixSymbol('A', m, n)
B = MatrixSymbol('B', m, n)
# Testing printer:
expr = hadamard_power(A, n)
ascii_str = \
"""\
.n\n\
A \
"""
ucode_str = \
u("""\
โn\n\
A \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hadamard_power(A, 1+n)
ascii_str = \
"""\
.(n + 1)\n\
A \
"""
ucode_str = \
u("""\
โ(n + 1)\n\
A \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
expr = hadamard_power(A*B.T, 1+n)
ascii_str = \
"""\
.(n + 1)\n\
/ T\\ \n\
\\A*B / \
"""
ucode_str = \
u("""\
โ(n + 1)\n\
โ Tโ \n\
โAโ
B โ \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
def test_issue_17258():
n = Symbol('n', integer=True)
assert pretty(Sum(n, (n, -oo, 1))) == \
' 1 \n'\
' __ \n'\
' \\ ` \n'\
' ) n\n'\
' /_, \n'\
'n = -oo '
assert upretty(Sum(n, (n, -oo, 1))) == \
u("""\
1 \n\
___ \n\
โฒ \n\
โฒ \n\
โฑ n\n\
โฑ \n\
โพโพโพ \n\
n = -โ \
""")
def test_is_combining():
line = u("vฬ_m")
assert [is_combining(sym) for sym in line] == \
[False, True, False, False]
def test_issue_17857():
assert pretty(Range(-oo, oo)) == '{..., -1, 0, 1, ...}'
assert pretty(Range(oo, -oo, -1)) == '{..., 1, 0, -1, ...}'
def test_issue_18272():
x = Symbol('x')
n = Symbol('n')
assert upretty(ConditionSet(x, Eq(-x + exp(x), 0), S.Complexes)) == \
'โง โ x โโซ\n'\
'โจx | x โ โ โง โ-x + โฏ = 0โ โฌ\n'\
'โฉ โญ'
assert upretty(ConditionSet(x, Contains(n/2, Interval(0, oo)), FiniteSet(-n/2, n/2))) == \
'โง โง-n nโซ โn โโซ\n'\
'โจx | x โ โจโโโ, โโฌ โง โโ โ [0, โ)โโฌ\n'\
'โฉ โฉ 2 2โญ โ2 โ โญ'
assert upretty(ConditionSet(x, Eq(Piecewise((1, x >= 3), (x/2 - 1/2, x >= 2), (1/2, x >= 1),
(x/2, True)) - 1/2, 0), Interval(0, 3))) == \
'โง โโโง 1 for x โฅ 3โ โโซ\n'\
'โช โโโช โ โโช\n'\
'โช โโโชx โ โโช\n'\
'โช โโโชโ - 0.5 for x โฅ 2โ โโช\n'\
'โช โโโช2 โ โโช\n'\
'โจx | x โ [0, 3] โง โโโจ โ - 0.5 = 0โโฌ\n'\
'โช โโโช 0.5 for x โฅ 1โ โโช\n'\
'โช โโโช โ โโช\n'\
'โช โโโช x โ โโช\n'\
'โช โโโช โ otherwiseโ โโช\n'\
'โฉ โโโฉ 2 โ โ โญ'
def test_diffgeom():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField
x,y = symbols('x y', real=True)
m = Manifold('M', 2)
assert pretty(m) == 'M'
p = Patch('P', m)
assert pretty(p) == "P"
rect = CoordSystem('rect', p, [x, y])
assert pretty(rect) == "rect"
b = BaseScalarField(rect, 0)
assert pretty(b) == "x"
|
py | 7df98b7e828a6b3340008016005cc24e2954e64f | # This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Test of the Adaptive VQE implementation with the adaptive UCCSD variational form """
import unittest
from test.chemistry import QiskitChemistryTestCase
from qiskit.aqua import aqua_globals
from qiskit.aqua.components.optimizers import L_BFGS_B
from qiskit.aqua.operators.legacy.op_converter import to_weighted_pauli_operator
from qiskit.aqua.operators.legacy.weighted_pauli_operator import Z2Symmetries
from qiskit.chemistry import FermionicOperator
from qiskit.chemistry.algorithms import VQEAdapt
from qiskit.chemistry.components.initial_states import HartreeFock
from qiskit.chemistry.components.variational_forms import UCCSD
from qiskit.chemistry.drivers import PySCFDriver, UnitsType
from qiskit.chemistry import QiskitChemistryError
class TestVQEAdaptUCCSD(QiskitChemistryTestCase):
""" Test Adaptive VQE with UCCSD"""
def setUp(self):
super().setUp()
# np.random.seed(50)
self.seed = 50
aqua_globals.random_seed = self.seed
try:
driver = PySCFDriver(atom='H .0 .0 .0; H .0 .0 0.735',
unit=UnitsType.ANGSTROM,
basis='sto3g')
except QiskitChemistryError:
self.skipTest('PYSCF driver does not appear to be installed')
return
molecule = driver.run()
self.num_particles = molecule.num_alpha + molecule.num_beta
self.num_spin_orbitals = molecule.num_orbitals * 2
fer_op = FermionicOperator(h1=molecule.one_body_integrals, h2=molecule.two_body_integrals)
map_type = 'PARITY'
qubit_op = fer_op.mapping(map_type)
self.qubit_op = Z2Symmetries.two_qubit_reduction(to_weighted_pauli_operator(qubit_op),
self.num_particles)
self.num_qubits = self.qubit_op.num_qubits
self.init_state = HartreeFock(self.num_spin_orbitals, self.num_particles)
self.var_form_base = None
def test_uccsd_adapt(self):
""" UCCSD test for adaptive features """
self.var_form_base = UCCSD(self.num_spin_orbitals,
self.num_particles, initial_state=self.init_state)
self.var_form_base.manage_hopping_operators()
# assert that the excitation pool exists
self.assertIsNotNone(self.var_form_base.excitation_pool)
# assert that the hopping ops list has been reset to be empty
self.assertEqual(self.var_form_base._hopping_ops, [])
def test_vqe_adapt(self):
""" VQEAdapt test """
try:
# pylint: disable=import-outside-toplevel
from qiskit import Aer
except Exception as ex: # pylint: disable=broad-except
self.skipTest("Aer doesn't appear to be installed. Error: '{}'".format(str(ex)))
return
self.var_form_base = UCCSD(self.num_spin_orbitals,
self.num_particles, initial_state=self.init_state)
backend = Aer.get_backend('statevector_simulator')
optimizer = L_BFGS_B()
algorithm = VQEAdapt(self.qubit_op, self.var_form_base, optimizer,
threshold=0.00001, delta=0.1, max_iterations=1)
result = algorithm.run(backend)
self.assertEqual(result.num_iterations, 1)
self.assertEqual(result.finishing_criterion, 'Maximum number of iterations reached')
algorithm = VQEAdapt(self.qubit_op, self.var_form_base, optimizer,
threshold=0.00001, delta=0.1)
result = algorithm.run(backend)
self.assertAlmostEqual(result.eigenvalue.real, -1.85727503, places=2)
self.assertEqual(result.num_iterations, 2)
self.assertAlmostEqual(result.final_max_gradient, 0.0, places=5)
self.assertEqual(result.finishing_criterion, 'Threshold converged')
def test_vqe_adapt_check_cyclicity(self):
""" VQEAdapt index cycle detection """
param_list = [
([1, 1], True),
([1, 11], False),
([11, 1], False),
([1, 12], False),
([12, 2], False),
([1, 1, 1], True),
([1, 2, 1], False),
([1, 2, 2], True),
([1, 2, 21], False),
([1, 12, 2], False),
([11, 1, 2], False),
([1, 2, 1, 1], True),
([1, 2, 1, 2], True),
([1, 2, 1, 21], False),
([11, 2, 1, 2], False),
([1, 11, 1, 111], False),
([11, 1, 111, 1], False),
([1, 2, 3, 1, 2, 3], True),
([1, 2, 3, 4, 1, 2, 3], False),
([11, 2, 3, 1, 2, 3], False),
([1, 2, 3, 1, 2, 31], False),
([1, 2, 3, 4, 1, 2, 3, 4], True),
([11, 2, 3, 4, 1, 2, 3, 4], False),
([1, 2, 3, 4, 1, 2, 3, 41], False),
([1, 2, 3, 4, 5, 1, 2, 3, 4], False),
]
for seq, is_cycle in param_list:
with self.subTest(msg="Checking index cyclicity in:", seq=seq):
self.assertEqual(is_cycle, VQEAdapt._check_cyclicity(seq))
if __name__ == '__main__':
unittest.main()
|
py | 7df98c3ce4e7764f8e5cbf1e600fc4916b319869 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: release-1.16
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class V1Affinity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'node_affinity': 'V1NodeAffinity',
'pod_affinity': 'V1PodAffinity',
'pod_anti_affinity': 'V1PodAntiAffinity'
}
attribute_map = {
'node_affinity': 'nodeAffinity',
'pod_affinity': 'podAffinity',
'pod_anti_affinity': 'podAntiAffinity'
}
def __init__(self, node_affinity=None, pod_affinity=None, pod_anti_affinity=None): # noqa: E501
"""V1Affinity - a model defined in OpenAPI""" # noqa: E501
self._node_affinity = None
self._pod_affinity = None
self._pod_anti_affinity = None
self.discriminator = None
if node_affinity is not None:
self.node_affinity = node_affinity
if pod_affinity is not None:
self.pod_affinity = pod_affinity
if pod_anti_affinity is not None:
self.pod_anti_affinity = pod_anti_affinity
@property
def node_affinity(self):
"""Gets the node_affinity of this V1Affinity. # noqa: E501
:return: The node_affinity of this V1Affinity. # noqa: E501
:rtype: V1NodeAffinity
"""
return self._node_affinity
@node_affinity.setter
def node_affinity(self, node_affinity):
"""Sets the node_affinity of this V1Affinity.
:param node_affinity: The node_affinity of this V1Affinity. # noqa: E501
:type: V1NodeAffinity
"""
self._node_affinity = node_affinity
@property
def pod_affinity(self):
"""Gets the pod_affinity of this V1Affinity. # noqa: E501
:return: The pod_affinity of this V1Affinity. # noqa: E501
:rtype: V1PodAffinity
"""
return self._pod_affinity
@pod_affinity.setter
def pod_affinity(self, pod_affinity):
"""Sets the pod_affinity of this V1Affinity.
:param pod_affinity: The pod_affinity of this V1Affinity. # noqa: E501
:type: V1PodAffinity
"""
self._pod_affinity = pod_affinity
@property
def pod_anti_affinity(self):
"""Gets the pod_anti_affinity of this V1Affinity. # noqa: E501
:return: The pod_anti_affinity of this V1Affinity. # noqa: E501
:rtype: V1PodAntiAffinity
"""
return self._pod_anti_affinity
@pod_anti_affinity.setter
def pod_anti_affinity(self, pod_anti_affinity):
"""Sets the pod_anti_affinity of this V1Affinity.
:param pod_anti_affinity: The pod_anti_affinity of this V1Affinity. # noqa: E501
:type: V1PodAntiAffinity
"""
self._pod_anti_affinity = pod_anti_affinity
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1Affinity):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
py | 7df98d32607607f637bc5b6462bbdcc19fb517f4 | # -*- coding: utf-8 -*-
from collections.abc import Iterable
from itertools import chain
from flair.parser.utils.alg import kmeans
import torch
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import DataLoader, Dataset, Sampler
class TextDataLoader(DataLoader):
def __init__(self, *args, **kwargs):
super(TextDataLoader, self).__init__(*args, **kwargs)
self.fields = self.dataset.fields
def __iter__(self):
for raw_batch in super(TextDataLoader, self).__iter__():
batch, device = [], 'cuda' if torch.cuda.is_available() else 'cpu'
for data, field in zip(raw_batch, self.fields):
if isinstance(data[0], torch.Tensor):
data = pad_sequence(data, True, field.pad_index).to(device)
elif isinstance(data[0], Iterable):
data = [pad_sequence(f, True, field.pad_index).to(device)
for f in zip(*data)]
batch.append(data)
yield batch
class TextDataset(Dataset):
def __init__(self, corpus, fields, n_buckets=1):
super(TextDataset, self).__init__()
self.corpus = corpus
self.fields = list(chain(*[
field if isinstance(field, Iterable) else [field]
for field in fields if field is not None
]))
for field in self.fields:
value = field.numericalize(getattr(corpus, field.name))
setattr(self, field.name, value)
# NOTE: the final bucket count is roughly equal to n_buckets
self.lengths = [len(i) + sum([bool(field.bos), bool(field.bos)])
for i in corpus]
self.buckets = dict(zip(*kmeans(self.lengths, n_buckets)))
def __getitem__(self, index):
for field in self.fields:
yield getattr(self, field.name)[index]
def __len__(self):
return len(self.corpus)
@property
def loader(self):
if hasattr(self, 'data_loader'):
return self.data_loader
else:
raise AttributeError
@loader.setter
def loader(self, data_loader):
self.data_loader = data_loader
@classmethod
def collate_fn(cls, batch):
return (field for field in zip(*batch))
class TextSampler(Sampler):
def __init__(self, buckets, batch_size, shuffle=False):
self.batch_size = batch_size
self.shuffle = shuffle
self.sizes, self.buckets = zip(*[
(size, bucket) for size, bucket in buckets.items()
])
# the number of chunks in each bucket, which is clipped by
# range [1, len(bucket)]
self.chunks = [
min(len(bucket), max(round(size * len(bucket) / batch_size), 1))
for size, bucket in zip(self.sizes, self.buckets)
]
def __iter__(self):
# if shuffle, shuffle both the buckets and samples in each bucket
range_fn = torch.randperm if self.shuffle else torch.arange
for i in range_fn(len(self.buckets)).tolist():
split_sizes = [(len(self.buckets[i]) - j - 1) // self.chunks[i] + 1
for j in range(self.chunks[i])]
# DON'T use `torch.chunk` which may return wrong number of chunks
for batch in range_fn(len(self.buckets[i])).split(split_sizes):
yield [self.buckets[i][j] for j in batch.tolist()]
def __len__(self):
return sum(self.chunks)
def batchify(dataset, batch_size, shuffle=False):
batch_sampler = TextSampler(buckets=dataset.buckets,
batch_size=batch_size,
shuffle=shuffle)
loader = TextDataLoader(dataset=dataset,
batch_sampler=batch_sampler,
collate_fn=dataset.collate_fn)
return loader
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.