repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
fergalbyrne/nupic | examples/opf/experiments/multistep/hotgym/permutations.py | 38 | 3713 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by ExpGenerator to generate the actual
permutations.py file by replacing $XXXXXXXX tokens with desired values.
This permutations.py file was generated by:
'~/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.swarming.permutationhelpers import *
# The name of the field being predicted. Any allowed permutation MUST contain
# the prediction field.
# (generated from PREDICTION_FIELD)
predictedField = 'consumption'
permutations = {
'modelParams': {
'inferenceType': PermuteChoices(['NontemporalMultiStep', 'TemporalMultiStep']),
'sensorParams': {
'encoders': {
'timestamp_dayOfWeek': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.dayOfWeek', radius=PermuteFloat(1.000000, 6.000000), w=21),
'timestamp_timeOfDay': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.timeOfDay', radius=PermuteFloat(0.500000, 12.000000), w=21),
'consumption': PermuteEncoder(fieldName='consumption', encoderClass='AdaptiveScalarEncoder', n=PermuteInt(28, 521), w=21, clipInput=True),
'timestamp_weekend': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.weekend', radius=PermuteChoices([1]), w=21),
},
},
'tpParams': {
'minThreshold': PermuteInt(9, 12),
'activationThreshold': PermuteInt(12, 16),
'pamLength': PermuteInt(1, 5),
},
}
}
# Fields selected for final hypersearch report;
# NOTE: These values are used as regular expressions by RunPermutations.py's
# report generator
# (fieldname values generated from PERM_PREDICTED_FIELD_NAME)
report = [
'.*consumption.*',
]
# Permutation optimization setting: either minimize or maximize metric
# used by RunPermutations.
# NOTE: The value is used as a regular expressions by RunPermutations.py's
# report generator
# (generated from minimize = 'prediction:aae:window=1000:field=consumption')
minimize = "multiStepBestPredictions:multiStep:errorMetric='aae':steps=1:window=1000:field=consumption"
def permutationFilter(perm):
""" This function can be used to selectively filter out specific permutation
combinations. It is called by RunPermutations for every possible permutation
of the variables in the permutations dict. It should return True for valid a
combination of permutation values and False for an invalid one.
Parameters:
---------------------------------------------------------
perm: dict of one possible combination of name:value
pairs chosen from permutations.
"""
# An example of how to use this
#if perm['__consumption_encoder']['maxval'] > 300:
# return False;
#
return True
| agpl-3.0 | -8,808,456,688,313,874,000 | 37.278351 | 155 | 0.695125 | false |
ncrocfer/weevely3 | testsuite/test_file_download.py | 14 | 4127 | from testsuite.base_test import BaseTest
from testfixtures import log_capture
from testsuite import config
from core.sessions import SessionURL
from core import modules
from core import messages
import subprocess
import tempfile
import datetime
import logging
import os
class FileDOwnload(BaseTest):
def setUp(self):
session = SessionURL(self.url, self.password, volatile = True)
modules.load_modules(session)
self.file_ok = os.path.join(config.script_folder, 'ok.test')
self.check_call(
config.cmd_env_content_s_to_s % ('OK', self.file_ok),
shell=True)
self.file_ko = os.path.join(config.script_folder, 'ko.test')
self.check_call(
config.cmd_env_content_s_to_s % ('KO', self.file_ko),
shell=True)
# Set ko.test to ---x--x--x 0111 execute, should be no readable
self.check_call(
config.cmd_env_chmod_s_s % ('0111', self.file_ko),
shell=True)
self.run_argv = modules.loaded['file_download'].run_argv
def tearDown(self):
self.check_call(
config.cmd_env_chmod_s_s % ('0777', '%s %s' % (self.file_ok, self.file_ko)),
shell=True)
self.check_call(
config.cmd_env_remove_s % ('%s %s' % (self.file_ok, self.file_ko)),
shell=True)
def test_download_php(self):
temp_file = tempfile.NamedTemporaryFile()
# Simple download
self.assertEqual(self.run_argv(['ok.test', temp_file.name]), 'OK')
self.assertEqual(open(temp_file.name,'r').read(), 'OK')
temp_file.truncate()
# Downoad binary. Skip check cause I don't know the remote content, and
# the md5 check is already done inside file_download.
self.assertTrue(self.run_argv(['/bin/ls', temp_file.name]))
self.assertTrue(open(temp_file.name,'r').read(), 'OK')
temp_file.truncate()
# Download of an unreadable file
self.assertEqual(self.run_argv(['ko.test', temp_file.name]), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
# Download of an remote unexistant file
self.assertEqual(self.run_argv(['bogus', temp_file.name]), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
# Download to a local unexistant folder
self.assertEqual(self.run_argv(['ok.test', '/tmp/bogus/bogus']), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
# Download to a directory
self.assertEqual(self.run_argv(['ok.test', '/tmp/']), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
temp_file.close()
def test_download_sh(self):
temp_file = tempfile.NamedTemporaryFile()
# Simple download
self.assertEqual(self.run_argv(['-vector', 'base64', 'ok.test', temp_file.name]), 'OK')
self.assertEqual(open(temp_file.name,'r').read(), 'OK')
temp_file.truncate()
# Downoad binary. Skip check cause I don't know the remote content, and
# the md5 check is already done inside file_download.
self.assertTrue(self.run_argv(['-vector', 'base64', '/bin/ls', temp_file.name]))
self.assertTrue(open(temp_file.name,'r').read(), 'OK')
temp_file.truncate()
# Download of an unreadable file
self.assertEqual(self.run_argv(['-vector', 'base64', 'ko.test', temp_file.name]), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
# Download of an remote unexistant file
self.assertEqual(self.run_argv(['-vector', 'base64', 'bogus', temp_file.name]), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
# Download to a local unexistant folder
self.assertEqual(self.run_argv(['-vector', 'base64', 'ok.test', '/tmp/bogus/bogus']), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
# Download to a directory
self.assertEqual(self.run_argv(['-vector', 'base64', 'ok.test', '/tmp/']), None)
self.assertEqual(open(temp_file.name,'r').read(), '')
temp_file.close()
| gpl-3.0 | -1,280,130,105,265,299,200 | 36.862385 | 99 | 0.611825 | false |
CERNDocumentServer/invenio | modules/bibformat/lib/elements/bfe_record_stats.py | 1 | 10296 | # This file is part of Invenio.
# Copyright (C) 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints record statistics
"""
__revision__ = "$Id$"
from invenio.dbquery import run_sql
ELASTICSEARCH_ENABLED = False
try:
from elasticsearch import Elasticsearch
from invenio.config import \
CFG_ELASTICSEARCH_LOGGING, \
CFG_ELASTICSEARCH_SEARCH_HOST, \
CFG_ELASTICSEARCH_INDEX_PREFIX
# if we were able to import all modules and ES logging is enabled, then use
# elasticsearch instead of normal db queries
if CFG_ELASTICSEARCH_LOGGING:
ELASTICSEARCH_ENABLED = True
except ImportError:
pass
# elasticsearch not supported
def format_element(bfo, display='day_distinct_ip_nb_views'):
'''
Prints record statistics
@param display: the type of statistics displayed. Can be 'total_nb_view', 'day_nb_views', 'total_distinct_ip_nb_views', 'day_distincts_ip_nb_views', 'total_distinct_ip_per_day_nb_views'
'''
if ELASTICSEARCH_ENABLED:
page_views = 0
ES_INDEX = CFG_ELASTICSEARCH_INDEX_PREFIX + "*"
recID = bfo.recID
query = ""
es = Elasticsearch(CFG_ELASTICSEARCH_SEARCH_HOST)
if display == 'total_nb_views':
query = {
"query": {
"bool": {
"must": [
{
"match": {
"id_bibrec": recID
}
},
{
"match": {
"_type": "events.pageviews"
}
}
]
}
}
}
results = es.count(index=ES_INDEX, body=query)
if results:
page_views = results.get('count', 0)
elif display == 'day_nb_views':
query = {
"query": {
"filtered": {
"query": {
"bool": {
"must": [
{
"match": {
"id_bibrec": recID
}
},
{
"match": {
"_type": "events.pageviews"
}
}
]
}
},
"filter": {
"range": {
"@timestamp": {
"gt": "now-1d"
}
}
}
}
}
}
results = es.count(index=ES_INDEX, body=query)
if results:
page_views = results.get('count', 0)
elif display == 'total_distinct_ip_nb_views':
search_type = "count"
# TODO this search query with aggregation is slow, maybe there is a way to make it faster ?
query = {
"query": {
"bool": {
"must": [
{
"match": {
"id_bibrec": recID
}
},
{
"match": {
"_type": "events.pageviews"
}
}
]
}
},
"aggregations": {
"distinct_ips": {
"cardinality": {
"field": "client_host"
}
}
}
}
results = es.search(index=ES_INDEX, body=query, search_type=search_type)
if results:
page_views = results.get('aggregations', {}).get('distinct_ips', {}).get('value', 0)
elif display == 'day_distinct_ip_nb_views':
search_type = "count"
# TODO aggregation is slow, maybe there is a way to make a faster query
query = {
"query": {
"filtered": {
"query": {
"bool": {
"must": [
{
"match": {
"id_bibrec": recID
}
},
{
"match": {
"_type": "events.pageviews"
}
}
]
}
},
"filter": {
"range": {
"@timestamp": {
"gt": "now-1d"
}
}
}
}
},
"aggregations": {
"distinct_ips": {
"cardinality": {
"field": "client_host"
}
}
}
}
results = es.search(index=ES_INDEX, body=query, search_type=search_type)
if results:
page_views = results.get('aggregations', {}).get('distinct_ips', {}).get('value', 0)
elif display == 'total_distinct_ip_per_day_nb_views':
search_type = "count"
# TODO aggregation is slow, maybe there is a way to make a faster query
query = {
"query": {
"filtered": {
"query": {
"bool": {
"must": [
{
"match": {
"id_bibrec": recID
}
},
{
"match": {
"_type": "events.pageviews"
}
}
]
}
}
}
},
"aggregations": {
"daily_stats": {
"date_histogram": {
"field": "@timestamp",
"interval": "day"
},
"aggregations": {
"distinct_ips": {
"cardinality": {
"field": "client_host"
}
}
}
}
}
}
results = es.search(index=ES_INDEX, body=query, search_type=search_type)
if results:
buckets = results.get("aggregations", {}).get("daily_stats", {}).get("buckets", {})
page_views = sum([int(bucket.get("distinct_ips", {}).get('value', '0')) for bucket in buckets])
return page_views
else:
if display == 'total_nb_views':
return run_sql("""SELECT COUNT(client_host) FROM rnkPAGEVIEWS
WHERE id_bibrec=%s""",
(bfo.recID,))[0][0]
elif display == 'day_nb_views':
return run_sql("""SELECT COUNT(client_host) FROM rnkPAGEVIEWS
WHERE id_bibrec=%s AND DATE(view_time)=CURDATE()""",
(bfo.recID,))[0][0]
elif display == 'total_distinct_ip_nb_views':
return run_sql("""SELECT COUNT(DISTINCT client_host) FROM rnkPAGEVIEWS
WHERE id_bibrec=%s""",
(bfo.recID,))[0][0]
elif display == 'day_distinct_ip_nb_views':
return run_sql("""SELECT COUNT(DISTINCT client_host) FROM rnkPAGEVIEWS
WHERE id_bibrec=%s AND DATE(view_time)=CURDATE()""",
(bfo.recID,))[0][0]
elif display == 'total_distinct_ip_per_day_nb_views':
# Count the number of distinct IP addresses for every day Then
# sum up. Similar to total_distinct_users_nb_views but assume
# that several different users can be behind a single IP
# (which could change every day)
res = run_sql("""SELECT COUNT(DISTINCT client_host)
FROM rnkPAGEVIEWS
WHERE id_bibrec=%s GROUP BY DATE(view_time)""",
(bfo.recID,))
return sum([row[0] for row in res])
| gpl-2.0 | 2,485,560,308,210,374,000 | 39.535433 | 189 | 0.355769 | false |
Netflix/security_monkey | security_monkey/auditors/openstack/openstack_security_group.py | 1 | 1657 | # Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.openstack.auditors.security_group
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Michael Stair <[email protected]>
"""
from security_monkey.auditors.security_group import SecurityGroupAuditor
from security_monkey.watchers.openstack.network.openstack_security_group import OpenStackSecurityGroup
class OpenStackSecurityGroupAuditor(SecurityGroupAuditor):
index = OpenStackSecurityGroup.index
i_am_singular = OpenStackSecurityGroup.i_am_singular
i_am_plural = OpenStackSecurityGroup.i_am_plural
network_whitelist = []
def __init__(self, accounts=None, debug=False):
super(OpenStackSecurityGroupAuditor, self).__init__(accounts=accounts, debug=debug)
def check_securitygroup_ec2_rfc1918(self, sg_item):
pass
def _check_internet_cidr(self, cidr):
''' some public clouds default to none for any source '''
return not cidr or super(OpenStackSecurityGroupAuditor, self)._check_internet_cidr(cidr)
| apache-2.0 | 8,831,156,913,663,788,000 | 40.425 | 102 | 0.732649 | false |
dd00/commandergenius | project/jni/python/src/Lib/test/test_popen2.py | 51 | 3151 | #! /usr/bin/env python
"""Test script for popen2.py"""
import warnings
warnings.filterwarnings("ignore", ".*popen2 module is deprecated.*",
DeprecationWarning)
warnings.filterwarnings("ignore", "os\.popen. is deprecated.*",
DeprecationWarning)
import os
import sys
import unittest
import popen2
from test.test_support import TestSkipped, run_unittest, reap_children
if sys.platform[:4] == 'beos' or sys.platform[:6] == 'atheos':
# Locks get messed up or something. Generally we're supposed
# to avoid mixing "posix" fork & exec with native threads, and
# they may be right about that after all.
raise TestSkipped("popen2() doesn't work on " + sys.platform)
# if we don't have os.popen, check that
# we have os.fork. if not, skip the test
# (by raising an ImportError)
try:
from os import popen
del popen
except ImportError:
from os import fork
del fork
class Popen2Test(unittest.TestCase):
cmd = "cat"
if os.name == "nt":
cmd = "more"
teststr = "ab cd\n"
# "more" doesn't act the same way across Windows flavors,
# sometimes adding an extra newline at the start or the
# end. So we strip whitespace off both ends for comparison.
expected = teststr.strip()
def setUp(self):
popen2._cleanup()
# When the test runs, there shouldn't be any open pipes
self.assertFalse(popen2._active, "Active pipes when test starts" +
repr([c.cmd for c in popen2._active]))
def tearDown(self):
for inst in popen2._active:
inst.wait()
popen2._cleanup()
self.assertFalse(popen2._active, "_active not empty")
reap_children()
def validate_output(self, teststr, expected_out, r, w, e=None):
w.write(teststr)
w.close()
got = r.read()
self.assertEquals(expected_out, got.strip(), "wrote %r read %r" %
(teststr, got))
if e is not None:
got = e.read()
self.assertFalse(got, "unexpected %r on stderr" % got)
def test_popen2(self):
r, w = popen2.popen2(self.cmd)
self.validate_output(self.teststr, self.expected, r, w)
def test_popen3(self):
if os.name == 'posix':
r, w, e = popen2.popen3([self.cmd])
self.validate_output(self.teststr, self.expected, r, w, e)
r, w, e = popen2.popen3(self.cmd)
self.validate_output(self.teststr, self.expected, r, w, e)
def test_os_popen2(self):
# same test as test_popen2(), but using the os.popen*() API
w, r = os.popen2(self.cmd)
self.validate_output(self.teststr, self.expected, r, w)
def test_os_popen3(self):
# same test as test_popen3(), but using the os.popen*() API
if os.name == 'posix':
w, r, e = os.popen3([self.cmd])
self.validate_output(self.teststr, self.expected, r, w, e)
w, r, e = os.popen3(self.cmd)
self.validate_output(self.teststr, self.expected, r, w, e)
def test_main():
run_unittest(Popen2Test)
if __name__ == "__main__":
test_main()
| lgpl-2.1 | -7,763,450,345,820,361,000 | 31.153061 | 74 | 0.607426 | false |
dudepare/django | django/middleware/gzip.py | 478 | 1831 | import re
from django.utils.cache import patch_vary_headers
from django.utils.text import compress_sequence, compress_string
re_accepts_gzip = re.compile(r'\bgzip\b')
class GZipMiddleware(object):
"""
This middleware compresses content if the browser allows gzip compression.
It sets the Vary header accordingly, so that caches will base their storage
on the Accept-Encoding header.
"""
def process_response(self, request, response):
# It's not worth attempting to compress really short responses.
if not response.streaming and len(response.content) < 200:
return response
# Avoid gzipping if we've already got a content-encoding.
if response.has_header('Content-Encoding'):
return response
patch_vary_headers(response, ('Accept-Encoding',))
ae = request.META.get('HTTP_ACCEPT_ENCODING', '')
if not re_accepts_gzip.search(ae):
return response
if response.streaming:
# Delete the `Content-Length` header for streaming content, because
# we won't know the compressed size until we stream it.
response.streaming_content = compress_sequence(response.streaming_content)
del response['Content-Length']
else:
# Return the compressed content only if it's actually shorter.
compressed_content = compress_string(response.content)
if len(compressed_content) >= len(response.content):
return response
response.content = compressed_content
response['Content-Length'] = str(len(response.content))
if response.has_header('ETag'):
response['ETag'] = re.sub('"$', ';gzip"', response['ETag'])
response['Content-Encoding'] = 'gzip'
return response
| bsd-3-clause | -2,015,083,589,667,243,300 | 37.957447 | 86 | 0.650464 | false |
prculley/gramps | gramps/gui/widgets/selectionwidget.py | 3 | 31444 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2013 Artem Glebov <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# GTK/Gnome modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from gi.repository import GObject
#-------------------------------------------------------------------------
#
# gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.display.name import displayer as name_displayer
#-------------------------------------------------------------------------
#
# grabbers constants and routines
#
#-------------------------------------------------------------------------
from .grabbers import (grabber_generators, can_grab, grabber_position,
switch_grabber, CURSORS, GRABBER_INSIDE, INSIDE,
INNER_GRABBERS, OUTER_GRABBERS, MOTION_FUNCTIONS)
#-------------------------------------------------------------------------
#
# PhotoTaggingGramplet
#
#-------------------------------------------------------------------------
RESIZE_RATIO = 1.5
MAX_ZOOM = 10
MIN_ZOOM = 0.05
MAX_SIZE = 2000
MIN_SIZE = 50
SHADING_OPACITY = 0.7
MIN_SELECTION_SIZE = 10
def scale_to_fit(orig_x, orig_y, target_x, target_y):
"""
Calculates the scale factor to fit the rectangle
orig_x * orig_y by scaling keeping the aspect ratio.
"""
orig_aspect = orig_x / orig_y
target_aspect = target_x / target_y
if orig_aspect > target_aspect:
return target_x / orig_x
else:
return target_y / orig_y
def resize_keep_aspect(orig_x, orig_y, target_x, target_y):
"""
Calculates the dimensions of the rectangle obtained from
the rectangle orig_x * orig_y by scaling to fit
target_x * target_y keeping the aspect ratio.
"""
orig_aspect = orig_x / orig_y
target_aspect = target_x / target_y
if orig_aspect > target_aspect:
return (target_x, target_x * orig_y // orig_x)
else:
return (target_y * orig_x // orig_y, target_y)
def order_coordinates(point1, point2):
"""
Returns the rectangle (x1, y1, x2, y2) based on point1 and point2,
such that x1 <= x2 and y1 <= y2.
"""
x1 = min(point1[0], point2[0])
x2 = max(point1[0], point2[0])
y1 = min(point1[1], point2[1])
y2 = max(point1[1], point2[1])
return (x1, y1, x2, y2)
def minimum_region(point1, point2):
"""
Returns whether the rectangle defined by the corner points point1
and point2 exceeds the minimum dimensions.
"""
return (abs(point1[0] - point2[0]) >= MIN_SELECTION_SIZE and
abs(point1[1] - point2[1]) >= MIN_SELECTION_SIZE)
class Region:
"""
Representation of a region of image that can be associated with
a person.
"""
def __init__(self, x1, y1, x2, y2):
"""
Creates a new region with the specified coordinates.
"""
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
self.person = None
self.mediaref = None
def coords(self):
"""
Returns the coordinates of the region as a 4-tuple in the
format (x1, y1, x2, y2).
"""
return (self.x1, self.y1, self.x2, self.y2)
def set_coords(self, x1, y1, x2, y2):
"""
Sets the coordinates of this region.
"""
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
def contains(self, x, y):
"""
Returns whether the point with coordinates (x, y) lies insided
this region.
"""
return self.x1 <= x <= self.x2 and self.y1 <= y <= self.y2
def contains_rect(self, other):
"""
Returns whether this region fully contains the region other.
"""
return (self.contains(other.x1, other.y1) and
self.contains(other.x2, other.y2))
def area(self):
"""
Returns the area of this region.
"""
return abs(self.x1 - self.x2) * abs(self.y1 - self.y2)
def intersects(self, other):
"""
Returns whether the current region intersects other.
"""
# assumes that x1 <= x2 and y1 <= y2
return not (self.x2 < other.x1 or self.x1 > other.x2 or
self.y2 < other.y1 or self.y1 > other.y2)
class SelectionWidget(Gtk.ScrolledWindow):
"""
A widget that displays an image and permits GIMP-like selection of regions
within the image. The widget derives from gtk.ScrolledWindow.
"""
__gsignals__ = {
"region-modified": (GObject.SignalFlags.RUN_FIRST, None, ()),
"region-created": (GObject.SignalFlags.RUN_FIRST, None, ()),
"region-selected": (GObject.SignalFlags.RUN_FIRST, None, ()),
"selection-cleared": (GObject.SignalFlags.RUN_FIRST, None, ()),
"right-button-clicked": (GObject.SignalFlags.RUN_FIRST, None, ()),
"zoomed-in": (GObject.SignalFlags.RUN_FIRST, None, ()),
"zoomed-out": (GObject.SignalFlags.RUN_FIRST, None, ())
}
def __init__(self):
"""
Creates a new selection widget.
"""
self.multiple_selection = True
self.loaded = False
self.start_point_screen = None
self.selection = None
self.current = None
self.in_region = None
self.grabber = None
self.regions = []
self.translation = None
self.pixbuf = None
self.scaled_pixbuf = None
self.scale = 1.0
self.old_viewport_size = None
Gtk.ScrolledWindow.__init__(self)
self.add(self._build_gui())
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
def _build_gui(self):
"""
Builds and lays out the GUI of the widget.
"""
self.image = Gtk.Image()
self.image.set_has_tooltip(True)
self.image.connect_after("draw", self._expose_handler)
self.image.connect("query-tooltip", self._show_tooltip)
self.event_box = Gtk.EventBox()
self.event_box.connect('button-press-event',
self._button_press_event)
self.event_box.connect('button-release-event',
self._button_release_event)
self.connect('motion-notify-event',
self._motion_notify_event)
self.connect('scroll-event',
self._motion_scroll_event)
self.event_box.add_events(Gdk.EventMask.BUTTON_PRESS_MASK)
self.event_box.add_events(Gdk.EventMask.BUTTON_RELEASE_MASK)
self.event_box.add_events(Gdk.EventMask.POINTER_MOTION_MASK)
self.event_box.add(self.image)
self.viewport = Gtk.Viewport()
self.connect("size-allocate", self._resize)
self.viewport.add(self.event_box)
return self.viewport
# ======================================================
# public field accessors
# ======================================================
def get_multiple_selection(self):
"""
Return whether multiple selection is enabled.
"""
return self.multiple_selection
def set_multiple_selection(self, enable):
"""
Enables or disables multiple selection.
"""
self.multiple_selection = enable
def is_image_loaded(self):
"""
Returns whether an image has been loaded into this selection widget.
"""
return self.loaded
def set_regions(self, regions):
"""
Sets the list of regions to be displayed in the widget.
"""
self.regions = regions
def get_current(self):
"""
Returns the currently active region.
"""
return self.current
def set_current(self, region):
"""
Activates the given region in the widget.
"""
self.current = region
def get_selection(self):
"""
Returns the coordinates of the current selection.
"""
return self.selection
# ======================================================
# loading the image
# ======================================================
def load_image(self, image_path):
"""
Loads an image from a given path into this selection widget.
"""
self.start_point_screen = None
self.selection = None
self.in_region = None
self.grabber_position = None
self.grabber_to_draw = None
try:
self.pixbuf = GdkPixbuf.Pixbuf.new_from_file(image_path)
self.original_image_size = (self.pixbuf.get_width(),
self.pixbuf.get_height())
viewport_size = self.viewport.get_allocation()
self.old_viewport_size = viewport_size
self.scale = scale_to_fit(self.pixbuf.get_width(),
self.pixbuf.get_height(),
viewport_size.width,
viewport_size.height)
self._rescale()
self.loaded = True
except (GObject.GError, OSError):
self.show_missing()
def show_missing(self):
"""
Displays a 'missing image' icon in the widget.
"""
self.pixbuf = None
self.image.set_from_icon_name('image-missing', Gtk.IconSize.DIALOG)
self.image.queue_draw()
def _resize(self, *dummy):
"""
Handles size-allocate' events from Gtk.
"""
if self.pixbuf:
viewport_size = self.viewport.get_allocation()
if viewport_size.height != self.old_viewport_size.height or \
viewport_size.width != self.old_viewport_size.width or \
not self.image.get_pixbuf():
self.scale = scale_to_fit(self.pixbuf.get_width(),
self.pixbuf.get_height(),
viewport_size.width,
viewport_size.height)
self._rescale()
self.old_viewport_size = viewport_size
return False
def expander(self, *dummy):
""" Handler for expander in caller; needed because Gtk doesn't handle
verticle expansion right
"""
self.image.clear()
self.image.set_size_request(2, 2)
self.event_box.set_size_request(2, 2)
return False
# ======================================================
# coordinate transformations (public methods)
# ======================================================
def proportional_to_real_rect(self, rect):
"""
Translates proportional (ranging from 0 to 100) coordinates to image
coordinates (in pixels).
"""
x1, y1, x2, y2 = rect
return (self._proportional_to_real((x1, y1)) +
self._proportional_to_real((x2, y2)))
def real_to_proportional_rect(self, rect):
"""
Translates image coordinates (in pixels) to proportional (ranging
from 0 to 100).
"""
x1, y1, x2, y2 = rect
return (self._real_to_proportional((x1, y1)) +
self._real_to_proportional((x2, y2)))
# ======================================================
# widget manipulation
# ======================================================
def refresh(self):
"""
Schedules a redraw of the image.
"""
self.image.queue_draw()
def can_zoom_in(self):
"""
Returns whether it is possible to zoom in the image.
"""
if self.original_image_size:
scaled_size = (self.original_image_size[0] * self.scale * RESIZE_RATIO,
self.original_image_size[1] * self.scale * RESIZE_RATIO)
return scaled_size[0] < MAX_SIZE and scaled_size[1] < MAX_SIZE
return False
def can_zoom_out(self):
"""
Returns whether it is possible to zoom out the image.
"""
if self.original_image_size:
scaled_size = (self.original_image_size[0] * self.scale * RESIZE_RATIO,
self.original_image_size[1] * self.scale * RESIZE_RATIO)
return scaled_size[0] >= MIN_SIZE and scaled_size[1] >= MIN_SIZE
return False
def zoom_in(self):
"""
Zooms in the image. The zoom factor is defined by RESIZE_RATIO.
"""
if self.can_zoom_in():
self.scale *= RESIZE_RATIO
self._rescale()
self.emit("zoomed-in")
def zoom_out(self):
"""
Zooms out the image. The zoom factor is defined by RESIZE_RATIO.
"""
if self.can_zoom_out():
self.scale /= RESIZE_RATIO
self._rescale()
self.emit("zoomed-out")
def select(self, region):
"""
Highlights the given region in the image.
"""
self.current = region
if self.current is not None:
self.selection = self.current.coords()
self.image.queue_draw()
def clear_selection(self):
"""
Clears the selection.
"""
self.current = None
self.selection = None
self.image.queue_draw()
def find_region(self, x ,y):
"""
Given screen coordinates, find where that point is in the image.
"""
return self._find_region(*self._screen_to_image((x, y)))
# ======================================================
# thumbnails
# ======================================================
def get_thumbnail(self, region, thumbnail_size):
"""
Returns the thumbnail of the given region.
"""
w = region.x2 - region.x1
h = region.y2 - region.y1
if w >= 1 and h >= 1 and self.pixbuf:
subpixbuf = self.pixbuf.new_subpixbuf(region.x1, region.y1, w, h)
size = resize_keep_aspect(w, h, *thumbnail_size)
return subpixbuf.scale_simple(size[0], size[1],
GdkPixbuf.InterpType.BILINEAR)
else:
return None
# ======================================================
# utility functions for retrieving properties
# ======================================================
def _get_original_image_size(self):
"""
Returns the size of the image before scaling.
"""
return self.original_image_size
def _get_scaled_image_size(self):
"""
Returns the size of images scaled with the current scaled.
"""
unscaled_size = self._get_original_image_size()
return (unscaled_size[0] * self.scale, unscaled_size[1] * self.scale)
# ======================================================
# coordinate transformations
# ======================================================
def _proportional_to_real(self, coord):
"""
Translates proportional (ranging from 0 to 100) coordinates to image
coordinates (in pixels).
"""
w, h = self.original_image_size
return (int(round(coord[0] * w / 100)), int(round(coord[1] * h / 100)))
def _real_to_proportional(self, coord):
"""
Translates image coordinates (in pixels) to proportional (ranging
from 0 to 100).
"""
w, h = self.original_image_size
return (int(round(coord[0] * 100 / w)), int(round(coord[1] * 100 / h)))
def _image_to_screen(self, coords):
"""
Translates image coordinates to viewport coordinates using the current
scale and viewport size.
"""
viewport_rect = self.viewport.get_allocation()
image_rect = self.scaled_size
if image_rect[0] < viewport_rect.width:
offset_x = (image_rect[0] - viewport_rect.width) / 2
else:
offset_x = 0.0
if image_rect[1] < viewport_rect.height:
offset_y = (image_rect[1] - viewport_rect.height) / 2
else:
offset_y = 0.0
return (int(coords[0] * self.scale - offset_x),
int(coords[1] * self.scale - offset_y))
def _screen_to_image(self, coords):
"""
Translates viewport coordinates to original (unscaled) image coordinates
using the current scale and viewport size.
"""
viewport_rect = self.viewport.get_allocation()
image_rect = self.scaled_size
if image_rect[0] < viewport_rect.width:
offset_x = (image_rect[0] - viewport_rect.width) / 2
else:
offset_x = 0.0
if image_rect[1] < viewport_rect.height:
offset_y = (image_rect[1] - viewport_rect.height) / 2
else:
offset_y = 0.0
return (int((coords[0] + offset_x) / self.scale),
int((coords[1] + offset_y) / self.scale))
def _truncate_to_image_size(self, coords):
"""
Modifies the coordinates of the given point to ensure that it lies
within the image. Negative values are replaced with 0, positive values
exceeding the image dimensions - with those corresponding dimensions.
"""
x, y = coords
(image_width, image_height) = self._get_original_image_size()
x = max(x, 0)
x = min(x, image_width)
y = max(y, 0)
y = min(y, image_height)
return self._proportional_to_real(self._real_to_proportional((x, y)))
def _screen_to_truncated(self, coords):
"""
Transforms the screen coordinates to image coordinates and truncate to
the image size.
"""
return self._truncate_to_image_size(self._screen_to_image(coords))
def _rect_image_to_screen(self, rect):
"""
Translates the coordinates of the rectangle from image to screen.
"""
x1, y1, x2, y2 = rect
x1, y1 = self._image_to_screen((x1, y1))
x2, y2 = self._image_to_screen((x2, y2))
return (x1, y1, x2, y2)
# ======================================================
# drawing and scaling the image
# ======================================================
def _expose_handler(self, widget, cr):
"""
Handles the expose-event signal of the underlying widget.
"""
if self.pixbuf:
self._draw_selection(widget, cr)
def _draw_selection(self, widget, cr):
"""
Draws the image, the selection boxes and does the necessary
shading.
"""
if not self.scaled_size:
return
w, h = self.scaled_size
offset_x, offset_y = self._image_to_screen((0, 0))
offset_x -= 1
offset_y -= 1
if self.selection:
x1, y1, x2, y2 = self._rect_image_to_screen(self.selection)
# transparent shading
self._draw_transparent_shading(cr, x1, y1, x2, y2, w, h,
offset_x, offset_y)
# selection frame
self._draw_selection_frame(cr, x1, y1, x2, y2)
# draw grabber
self._draw_grabber(cr)
else:
# selection frame
for region in self.regions:
x1, y1, x2, y2 = self._rect_image_to_screen(region.coords())
self._draw_region_frame(cr, x1, y1, x2, y2)
def _draw_transparent_shading(self, cr, x1, y1, x2, y2, w, h,
offset_x, offset_y):
"""
Draws the shading for a selection box.
"""
cr.set_source_rgba(1.0, 1.0, 1.0, SHADING_OPACITY)
cr.rectangle(offset_x, offset_y, x1 - offset_x, y1 - offset_y)
cr.rectangle(offset_x, y1, x1 - offset_x, y2 - y1)
cr.rectangle(offset_x, y2, x1 - offset_x, h - y2 + offset_y)
cr.rectangle(x1, y2 + 1, x2 - x1 + 1, h - y2 + offset_y)
cr.rectangle(x2 + 1, y2 + 1, w - x2 + offset_x, h - y2 + offset_y)
cr.rectangle(x2 + 1, y1, w - x2 + offset_x, y2 - y1 + 1)
cr.rectangle(x2 + 1, offset_y, w - x2 + offset_x, y2 - offset_y)
cr.rectangle(x1, offset_y, x2 - x1 + 1, y1 - offset_y)
cr.fill()
def _draw_selection_frame(self, cr, x1, y1, x2, y2):
"""
Draws the frame during selection.
"""
self._draw_region_frame(cr, x1, y1, x2, y2)
def _draw_region_frame(self, cr, x1, y1, x2, y2):
"""
Draws a region frame.
"""
cr.set_source_rgb(1.0, 1.0, 1.0) # white
cr.rectangle(x1, y1, x2 - x1, y2 - y1)
cr.stroke()
cr.set_source_rgb(0.0, 0.0, 1.0) # blue
cr.rectangle(x1 - 2, y1 - 2, x2 - x1 + 4, y2 - y1 + 4)
cr.stroke()
def _draw_grabber(self, cr):
"""
Draws a grabber.
"""
if self.selection is not None and self.grabber is not None:
selection_rect = self._rect_image_to_screen(self.selection)
cr.set_source_rgb(1.0, 0, 0)
if self.grabber_position is None:
generators = grabber_generators(selection_rect)
elif self.grabber_position == GRABBER_INSIDE:
generators = INNER_GRABBERS
else:
generators = OUTER_GRABBERS
if self.grabber_to_draw is not None:
generator = generators[self.grabber_to_draw]
else:
generator = generators[self.grabber]
if generator is not None:
x1, y1, x2, y2 = generator(*selection_rect)
cr.rectangle(x1, y1, x2 - x1, y2 - y1)
cr.stroke()
def _rescale(self):
"""
Recalculates the sizes using the current scale and updates
the buffers.
"""
self.scaled_size = (int(self.original_image_size[0] * self.scale),
int(self.original_image_size[1] * self.scale))
self.scaled_image = self.pixbuf.scale_simple(self.scaled_size[0],
self.scaled_size[1],
GdkPixbuf.InterpType.BILINEAR)
self.image.set_from_pixbuf(self.scaled_image)
self.image.set_size_request(*self.scaled_size)
self.event_box.set_size_request(*self.scaled_size)
# ======================================================
# managing regions
# ======================================================
def _find_region(self, x, y):
"""
Finds the smallest region containing point (x, y).
"""
result = None
for region in self.regions:
if region.contains(x, y):
if result is None or result.area() > region.area():
result = region
return result
# ======================================================
# mouse event handlers
# ======================================================
def _button_press_event(self, obj, event):
"""
Handles the button-press-event signal.
"""
if not self.is_image_loaded():
return
if event.button == 1: # left button
self.start_point_screen = (event.x, event.y)
if self.current is not None and self.grabber is None and \
self.multiple_selection:
self.current = None
self.selection = None
self.refresh()
self.emit("selection-cleared")
elif event.button == 3: # right button
# select a region, if clicked inside one
click_point = self._screen_to_image((event.x, event.y))
self.current = self._find_region(*click_point)
self.selection = \
self.current.coords() if self.current is not None else None
self.start_point_screen = None
self.refresh()
if self.current is not None:
self.emit("region-selected")
self.emit("right-button-clicked")
else:
self.emit("selection-cleared")
return True # don't propagate the event further
def _button_release_event(self, obj, event):
"""
Handles the button-release-event signal.
"""
if not self.is_image_loaded():
return
if event.button == 1:
if self.start_point_screen:
if self.current is not None:
# a box is currently selected
if self.grabber and self.grabber != INSIDE:
# clicked on one of the grabbers
dx, dy = (event.x - self.start_point_screen[0],
event.y - self.start_point_screen[1])
self.grabber_to_draw = self._modify_selection(dx, dy)
self.current.set_coords(*self.selection)
self.emit("region-modified")
elif self.grabber is None and self.multiple_selection:
# clicked outside of the grabbing area
self.current = None
self.selection = None
self.emit("selection-cleared")
else:
# update current selection
self.current.set_coords(*self.selection)
self.region = self.current
self.emit("region-modified")
else:
# nothing is currently selected
if (minimum_region(self.start_point_screen,
(event.x, event.y)) and
self._can_select()):
# region selection
region = Region(*self.selection)
self.regions.append(region)
self.current = region
self.emit("region-created")
else:
# nothing selected, just a click
click_point = \
self._screen_to_image(self.start_point_screen)
self.current = self._find_region(*click_point)
self.selection = \
self.current.coords() if self.current is not None \
else None
self.emit("region-selected")
self.start_point_screen = None
self.refresh()
def _motion_notify_event(self, widget, event):
"""
Handles the motion-notify-event signal.
"""
if not self.is_image_loaded():
return
end_point_orig = self._screen_to_image((event.x, event.y))
end_point = self._truncate_to_image_size(end_point_orig)
if self.start_point_screen:
# selection or dragging (mouse button pressed)
if self.grabber is not None and self.grabber != INSIDE:
# dragging the grabber
dx, dy = (event.x - self.start_point_screen[0],
event.y - self.start_point_screen[1])
self.grabber_to_draw = self._modify_selection(dx, dy)
else:
# making new selection
start_point = self._screen_to_truncated(self.start_point_screen)
self.selection = order_coordinates(start_point, end_point)
else:
# motion (mouse button is not pressed)
self.in_region = self._find_region(*end_point_orig)
if self.current is not None:
# a box is active, so check if the pointer is inside a grabber
rect = self._rect_image_to_screen(self.current.coords())
self.grabber = can_grab(rect, event.x, event.y)
if self.grabber is not None:
self.grabber_to_draw = self.grabber
self.grabber_position = grabber_position(rect)
self.event_box.get_window().set_cursor(CURSORS[self.grabber])
else:
self.grabber_to_draw = None
self.grabber_position = None
self.event_box.get_window().set_cursor(None)
else:
# nothing is active
self.grabber = None
self.grabber_to_draw = None
self.grabber_position = None
self.event_box.get_window().set_cursor(None)
self.image.queue_draw()
def _motion_scroll_event(self, widget, event):
"""
Handles the motion-scroll-event signal.
"""
if not self.is_image_loaded():
return
if event.direction == Gdk.ScrollDirection.UP:
self.zoom_in()
elif event.direction == Gdk.ScrollDirection.DOWN:
self.zoom_out()
# ======================================================
# helpers for mouse event handlers
# ======================================================
def _can_select(self):
"""
Returns whether selection is currently possible, which is when
multiple selection is enabled or otherwise when no region is
currently selected.
"""
return self.multiple_selection or len(self.regions) < 1
def _modify_selection(self, dx, dy):
"""
Changes the selection when a grabber is dragged, returns the new
grabber if a grabber switch has happened, and the current grabber
otherwise.
"""
x1, y1, x2, y2 = self._rect_image_to_screen(self.current.coords())
x1, y1, x2, y2 = MOTION_FUNCTIONS[self.grabber](x1, y1, x2, y2, dx, dy)
(x1, y1) = self._screen_to_truncated((x1, y1))
(x2, y2) = self._screen_to_truncated((x2, y2))
grabber = switch_grabber(self.grabber, x1, y1, x2, y2)
self.selection = order_coordinates((x1, y1), (x2, y2))
return grabber
# ======================================================
# tooltips
# ======================================================
def _show_tooltip(self, widget, x, y, keyboard_mode, tooltip):
"""
Handles the query-tooltip signal.
"""
if self.in_region:
person = self.in_region.person
if person:
name = name_displayer.display(person)
else:
return False
tooltip.set_text(name)
return True
else:
return False
| gpl-2.0 | -2,122,692,044,978,758,400 | 35.351445 | 83 | 0.51393 | false |
596acres/livinglots-philly | livinglotsphilly/survey/forms.py | 2 | 1197 | from django import forms
from django.contrib.contenttypes.models import ContentType
from forms_builder.forms.forms import FormForForm
from .models import SurveyFieldEntry, SurveyFormEntry
class SurveyFormForForm(FormForForm):
field_entry_model = SurveyFieldEntry
content_type = forms.ModelChoiceField(
queryset=ContentType.objects.all(),
widget=forms.HiddenInput,
)
class Meta(FormForForm.Meta):
model = SurveyFormEntry
widgets = {
'object_id': forms.HiddenInput,
'survey_form': forms.HiddenInput,
}
def __init__(self, *args, **kwargs):
# Get the model instance that the resulting entry will be tied to
initial = kwargs.pop('initial', {})
content_object = initial.pop('content_object', None)
survey_form = initial.pop('survey_form', None)
super(SurveyFormForForm, self).__init__(*args, initial=initial, **kwargs)
if content_object:
self.initial.update({
'content_type': ContentType.objects.get_for_model(content_object),
'object_id': content_object.pk,
'survey_form': survey_form,
})
| gpl-3.0 | -8,244,202,802,462,454,000 | 31.351351 | 82 | 0.637427 | false |
xiangel/hue | desktop/core/ext-py/Django-1.6.10/tests/utils_tests/test_ipv6.py | 117 | 2847 | from __future__ import unicode_literals
from django.utils import unittest
from django.utils.ipv6 import is_valid_ipv6_address, clean_ipv6_address
class TestUtilsIPv6(unittest.TestCase):
def test_validates_correct_plain_address(self):
self.assertTrue(is_valid_ipv6_address('fe80::223:6cff:fe8a:2e8a'))
self.assertTrue(is_valid_ipv6_address('2a02::223:6cff:fe8a:2e8a'))
self.assertTrue(is_valid_ipv6_address('1::2:3:4:5:6:7'))
self.assertTrue(is_valid_ipv6_address('::'))
self.assertTrue(is_valid_ipv6_address('::a'))
self.assertTrue(is_valid_ipv6_address('2::'))
def test_validates_correct_with_v4mapping(self):
self.assertTrue(is_valid_ipv6_address('::ffff:254.42.16.14'))
self.assertTrue(is_valid_ipv6_address('::ffff:0a0a:0a0a'))
def test_validates_incorrect_plain_address(self):
self.assertFalse(is_valid_ipv6_address('foo'))
self.assertFalse(is_valid_ipv6_address('127.0.0.1'))
self.assertFalse(is_valid_ipv6_address('12345::'))
self.assertFalse(is_valid_ipv6_address('1::2:3::4'))
self.assertFalse(is_valid_ipv6_address('1::zzz'))
self.assertFalse(is_valid_ipv6_address('1::2:3:4:5:6:7:8'))
self.assertFalse(is_valid_ipv6_address('1:2'))
self.assertFalse(is_valid_ipv6_address('1:::2'))
def test_validates_incorrect_with_v4mapping(self):
self.assertFalse(is_valid_ipv6_address('::ffff:999.42.16.14'))
self.assertFalse(is_valid_ipv6_address('::ffff:zzzz:0a0a'))
# The ::1.2.3.4 format used to be valid but was deprecated
# in rfc4291 section 2.5.5.1
self.assertTrue(is_valid_ipv6_address('::254.42.16.14'))
self.assertTrue(is_valid_ipv6_address('::0a0a:0a0a'))
self.assertFalse(is_valid_ipv6_address('::999.42.16.14'))
self.assertFalse(is_valid_ipv6_address('::zzzz:0a0a'))
def test_cleanes_plain_address(self):
self.assertEqual(clean_ipv6_address('DEAD::0:BEEF'), 'dead::beef')
self.assertEqual(clean_ipv6_address('2001:000:a:0000:0:fe:fe:beef'), '2001:0:a::fe:fe:beef')
self.assertEqual(clean_ipv6_address('2001::a:0000:0:fe:fe:beef'), '2001:0:a::fe:fe:beef')
def test_cleanes_with_v4_mapping(self):
self.assertEqual(clean_ipv6_address('::ffff:0a0a:0a0a'), '::ffff:10.10.10.10')
self.assertEqual(clean_ipv6_address('::ffff:1234:1234'), '::ffff:18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:18.52.18.52'), '::ffff:18.52.18.52')
def test_unpacks_ipv4(self):
self.assertEqual(clean_ipv6_address('::ffff:0a0a:0a0a', unpack_ipv4=True), '10.10.10.10')
self.assertEqual(clean_ipv6_address('::ffff:1234:1234', unpack_ipv4=True), '18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:18.52.18.52', unpack_ipv4=True), '18.52.18.52')
| apache-2.0 | 1,071,698,958,201,193,100 | 52.716981 | 100 | 0.659642 | false |
nishigori/boto | boto/s3/bucketlogging.py | 153 | 3183 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import xml.sax.saxutils
from boto.s3.acl import Grant
class BucketLogging(object):
def __init__(self, target=None, prefix=None, grants=None):
self.target = target
self.prefix = prefix
if grants is None:
self.grants = []
else:
self.grants = grants
def __repr__(self):
if self.target is None:
return "<BucketLoggingStatus: Disabled>"
grants = []
for g in self.grants:
if g.type == 'CanonicalUser':
u = g.display_name
elif g.type == 'Group':
u = g.uri
else:
u = g.email_address
grants.append("%s = %s" % (u, g.permission))
return "<BucketLoggingStatus: %s/%s (%s)>" % (self.target, self.prefix, ", ".join(grants))
def add_grant(self, grant):
self.grants.append(grant)
def startElement(self, name, attrs, connection):
if name == 'Grant':
self.grants.append(Grant())
return self.grants[-1]
else:
return None
def endElement(self, name, value, connection):
if name == 'TargetBucket':
self.target = value
elif name == 'TargetPrefix':
self.prefix = value
else:
setattr(self, name, value)
def to_xml(self):
# caller is responsible to encode to utf-8
s = u'<?xml version="1.0" encoding="UTF-8"?>'
s += u'<BucketLoggingStatus xmlns="http://doc.s3.amazonaws.com/2006-03-01">'
if self.target is not None:
s += u'<LoggingEnabled>'
s += u'<TargetBucket>%s</TargetBucket>' % self.target
prefix = self.prefix or ''
s += u'<TargetPrefix>%s</TargetPrefix>' % xml.sax.saxutils.escape(prefix)
if self.grants:
s += '<TargetGrants>'
for grant in self.grants:
s += grant.to_xml()
s += '</TargetGrants>'
s += u'</LoggingEnabled>'
s += u'</BucketLoggingStatus>'
return s
| mit | -8,182,228,788,610,597,000 | 37.349398 | 98 | 0.605718 | false |
jbfavre/exaproxy | lib/exaproxy/util/cache.py | 4 | 1214 | # encoding: utf-8
try:
from collections import OrderedDict
except ImportError:
# support installable ordereddict module in older python versions
from ordereddict import OrderedDict
from time import time
class TimeCache (dict):
__default = object()
def __init__ (self,timeout):
self.timeout = timeout
self.last = None
self.time = OrderedDict()
dict.__init__(self)
def __setitem__ (self,key,value):
dict.__setitem__(self,key,value)
if self.timeout > 0:
self.time[key] = time()
def __delitem__ (self,key):
if key in self.time:
del self.time[key]
dict.__delitem__(self,key)
# Cpython implementation of dict.pop does not call __delitem__ - sigh !
def pop (self,key,default=__default):
if key in self.time:
del self.time[key]
if default is self.__default:
return dict.pop(self,key)
return dict.pop(self,key,default)
def expired (self,maximum):
expire = time() - self.timeout
if self.last:
k,t = self.last
if t > expire:
return
if k in self:
maximum -= 1
yield k
self.last = None
while self.time and maximum:
k,t = self.time.popitem(False)
if t > expire:
self.last = k,t
break
if k in self:
maximum -= 1
yield k
| bsd-2-clause | 2,259,029,456,114,532,400 | 20.298246 | 72 | 0.658155 | false |
SmartElect/SmartElect | civil_registry/migrations/0001_initial.py | 1 | 5409 | # Generated by Django 2.2 on 2019-05-03 14:05
import civil_registry.models
from django.db import migrations, models
import libya_elections.libya_bread
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Citizen',
fields=[
('civil_registry_id', models.BigIntegerField(help_text='Uniquely identifies a person, even across changes of national ID', primary_key=True, serialize=False, verbose_name='civil registry id')),
('national_id', models.BigIntegerField(db_index=True, help_text="The citizen's 12-digit national ID number", unique=True, validators=[civil_registry.models.national_id_validator], verbose_name='national id')),
('fbr_number', models.CharField(help_text='Family Book Record Number', max_length=20, validators=[civil_registry.models.fbr_number_validator], verbose_name='family book record number')),
('first_name', models.CharField(blank=True, db_index=True, max_length=255, verbose_name='first name')),
('father_name', models.CharField(blank=True, max_length=255, verbose_name='father name')),
('grandfather_name', models.CharField(blank=True, max_length=255, verbose_name='grandfather name')),
('family_name', models.CharField(blank=True, db_index=True, max_length=255, verbose_name='family name')),
('mother_name', models.CharField(blank=True, max_length=255, verbose_name='mother name')),
('birth_date', models.DateField(db_index=True, verbose_name='birth date')),
('gender', models.IntegerField(choices=[(2, 'Female'), (1, 'Male')], db_index=True, verbose_name='gender')),
('address', models.CharField(blank=True, max_length=1024, verbose_name='address')),
('office_id', models.IntegerField(default=0, verbose_name='office id')),
('branch_id', models.IntegerField(default=0, verbose_name='branch id')),
('state', models.IntegerField(default=0, verbose_name='state')),
('missing', models.DateTimeField(blank=True, help_text='If set, this citizen was not in the last data dump.', null=True, verbose_name='missing')),
],
options={
'verbose_name': 'citizen',
'verbose_name_plural': 'citizens',
'ordering': ['national_id'],
'permissions': (('read_citizen', 'Can read citizens'), ('browse_citizen', 'Can browse citizens')),
},
bases=(libya_elections.libya_bread.BirthDateFormatterMixin, models.Model),
),
migrations.CreateModel(
name='CitizenMetadata',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dump_time', models.DateTimeField()),
],
),
migrations.CreateModel(
name='DumpFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filename', models.CharField(max_length=256)),
],
),
migrations.CreateModel(
name='TempCitizen',
fields=[
('civil_registry_id', models.BigIntegerField(help_text='Uniquely identifies a person, even across changes of national ID', primary_key=True, serialize=False, verbose_name='civil registry id')),
('national_id', models.BigIntegerField(db_index=True, help_text="The citizen's 12-digit national ID number", unique=True, validators=[civil_registry.models.national_id_validator], verbose_name='national id')),
('fbr_number', models.CharField(help_text='Family Book Record Number', max_length=20, validators=[civil_registry.models.fbr_number_validator], verbose_name='family book record number')),
('first_name', models.CharField(blank=True, db_index=True, max_length=255, verbose_name='first name')),
('father_name', models.CharField(blank=True, max_length=255, verbose_name='father name')),
('grandfather_name', models.CharField(blank=True, max_length=255, verbose_name='grandfather name')),
('family_name', models.CharField(blank=True, db_index=True, max_length=255, verbose_name='family name')),
('mother_name', models.CharField(blank=True, max_length=255, verbose_name='mother name')),
('birth_date', models.DateField(db_index=True, verbose_name='birth date')),
('gender', models.IntegerField(choices=[(2, 'Female'), (1, 'Male')], db_index=True, verbose_name='gender')),
('address', models.CharField(blank=True, max_length=1024, verbose_name='address')),
('office_id', models.IntegerField(default=0, verbose_name='office id')),
('branch_id', models.IntegerField(default=0, verbose_name='branch id')),
('state', models.IntegerField(default=0, verbose_name='state')),
('missing', models.DateTimeField(blank=True, help_text='If set, this citizen was not in the last data dump.', null=True, verbose_name='missing')),
],
options={
'abstract': False,
},
),
]
| apache-2.0 | -8,229,213,199,297,642,000 | 66.6125 | 225 | 0.615086 | false |
nirmeshk/oh-mainline | vendor/packages/Django/django/conf/__init__.py | 95 | 9136 | """
Settings and configuration for Django.
Values will be read from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global settings file for
a list of all possible variables.
"""
import logging
import os
import sys
import time # Needed for Windows
import warnings
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import LazyObject, empty
from django.utils import importlib
from django.utils import six
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
is used the first time we need any settings at all, if the user has not
previously configured the settings manually.
"""
try:
settings_module = os.environ[ENVIRONMENT_VARIABLE]
if not settings_module: # If it's set but is an empty string.
raise KeyError
except KeyError:
desc = ("setting %s" % name) if name else "settings"
raise ImproperlyConfigured(
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
% (desc, ENVIRONMENT_VARIABLE))
self._wrapped = Settings(settings_module)
self._configure_logging()
def __getattr__(self, name):
if self._wrapped is empty:
self._setup(name)
return getattr(self._wrapped, name)
def _configure_logging(self):
"""
Setup logging from LOGGING_CONFIG and LOGGING settings.
"""
if not sys.warnoptions:
try:
# Route warnings through python logging
logging.captureWarnings(True)
# Allow DeprecationWarnings through the warnings filters
warnings.simplefilter("default", DeprecationWarning)
except AttributeError:
# No captureWarnings on Python 2.6, DeprecationWarnings are on anyway
pass
if self.LOGGING_CONFIG:
from django.utils.log import DEFAULT_LOGGING
# First find the logging configuration function ...
logging_config_path, logging_config_func_name = self.LOGGING_CONFIG.rsplit('.', 1)
logging_config_module = importlib.import_module(logging_config_path)
logging_config_func = getattr(logging_config_module, logging_config_func_name)
logging_config_func(DEFAULT_LOGGING)
if self.LOGGING:
# Backwards-compatibility shim for #16288 fix
compat_patch_logging_config(self.LOGGING)
# ... then invoke it with the logging settings
logging_config_func(self.LOGGING)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError('Settings already configured.')
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
setattr(holder, name, value)
self._wrapped = holder
self._configure_logging()
@property
def configured(self):
"""
Returns True if the settings have already been configured.
"""
return self._wrapped is not empty
class BaseSettings(object):
"""
Common logic for settings whether set by a module or by the user.
"""
def __setattr__(self, name, value):
if name in ("MEDIA_URL", "STATIC_URL") and value and not value.endswith('/'):
raise ImproperlyConfigured("If set, %s must end with a slash" % name)
elif name == "ALLOWED_INCLUDE_ROOTS" and isinstance(value, six.string_types):
raise ValueError("The ALLOWED_INCLUDE_ROOTS setting must be set "
"to a tuple, not a string.")
object.__setattr__(self, name, value)
class Settings(BaseSettings):
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting == setting.upper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
try:
mod = importlib.import_module(self.SETTINGS_MODULE)
except ImportError as e:
raise ImportError("Could not import settings '%s' (Is it on sys.path?): %s" % (self.SETTINGS_MODULE, e))
# Settings that should be converted into tuples if they're mistakenly entered
# as strings.
tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS")
for setting in dir(mod):
if setting == setting.upper():
setting_value = getattr(mod, setting)
if setting in tuple_settings and \
isinstance(setting_value, six.string_types):
warnings.warn("The %s setting must be a tuple. Please fix your "
"settings, as auto-correction is now deprecated." % setting,
PendingDeprecationWarning)
setting_value = (setting_value,) # In case the user forgot the comma.
setattr(self, setting, setting_value)
if not self.SECRET_KEY:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
if hasattr(time, 'tzset') and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = '/usr/share/zoneinfo'
if (os.path.exists(zoneinfo_root) and not
os.path.exists(os.path.join(zoneinfo_root, *(self.TIME_ZONE.split('/'))))):
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = self.TIME_ZONE
time.tzset()
class UserSettingsHolder(BaseSettings):
"""
Holder for user configured settings.
"""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__['_deleted'] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
return super(UserSettingsHolder, self).__setattr__(name, value)
def __delattr__(self, name):
self._deleted.add(name)
return super(UserSettingsHolder, self).__delattr__(name)
def __dir__(self):
return list(self.__dict__) + dir(self.default_settings)
settings = LazySettings()
def compat_patch_logging_config(logging_config):
"""
Backwards-compatibility shim for #16288 fix. Takes initial value of
``LOGGING`` setting and patches it in-place (issuing deprecation warning)
if "mail_admins" logging handler is configured but has no filters.
"""
# Shim only if LOGGING["handlers"]["mail_admins"] exists,
# but has no "filters" key
if "filters" not in logging_config.get(
"handlers", {}).get(
"mail_admins", {"filters": []}):
warnings.warn(
"You have no filters defined on the 'mail_admins' logging "
"handler: adding implicit debug-false-only filter. "
"See http://docs.djangoproject.com/en/dev/releases/1.4/"
"#request-exceptions-are-now-always-logged",
DeprecationWarning)
filter_name = "require_debug_false"
filters = logging_config.setdefault("filters", {})
while filter_name in filters:
filter_name = filter_name + "_"
filters[filter_name] = {
"()": "django.utils.log.RequireDebugFalse",
}
logging_config["handlers"]["mail_admins"]["filters"] = [filter_name]
| agpl-3.0 | -2,055,214,328,506,800,600 | 38.2103 | 116 | 0.624124 | false |
epssy/hue | desktop/core/ext-py/pysaml2-2.4.0/example/idp2_repoze/idp_user.py | 33 | 1617 | USERS = {
"haho0032": {
"sn": "Hoerberg",
"givenName": "Hans",
"eduPersonScopedAffiliation": "[email protected]",
"eduPersonPrincipalName": "[email protected]",
"uid": "haho",
"eduPersonTargetedID": "one!for!all",
"c": "SE",
"o": "Example Co.",
"ou": "IT",
"initials": "P",
"schacHomeOrganization": "example.com",
"email": "[email protected]",
"displayName": "Hans Hoerberg",
"labeledURL": "http://www.example.com/haho My homepage",
"norEduPersonNIN": "SE199012315555"
},
"roland": {
"sn": "Hedberg",
"givenName": "Roland",
"eduPersonScopedAffiliation": "[email protected]",
"eduPersonPrincipalName": "[email protected]",
"uid": "rohe",
"eduPersonTargetedID": "one!for!all",
"c": "SE",
"o": "Example Co.",
"ou": "IT",
"initials": "P",
#"schacHomeOrganization": "example.com",
"email": "[email protected]",
"displayName": "P. Roland Hedberg",
"labeledURL": "http://www.example.com/rohe My homepage",
"norEduPersonNIN": "SE197001012222"
},
"babs": {
"surname": "Babs",
"givenName": "Ozzie",
"eduPersonAffiliation": "affiliate"
},
"upper": {
"surname": "Jeter",
"givenName": "Derek",
"eduPersonAffiliation": "affiliate"
},
}
EXTRA = {
"roland": {
"eduPersonEntitlement": "urn:mace:swamid.se:foo:bar",
"schacGender": "male",
"schacUserPresenceID": "skype:pepe.perez"
}
} | apache-2.0 | 4,938,187,637,861,631,000 | 28.962963 | 64 | 0.525665 | false |
moijes12/oh-mainline | vendor/packages/Django/django/middleware/common.py | 101 | 7433 | import hashlib
import logging
import re
from django.conf import settings
from django import http
from django.core.mail import mail_managers
from django.utils.http import urlquote
from django.utils import six
from django.core import urlresolvers
logger = logging.getLogger('django.request')
class CommonMiddleware(object):
"""
"Common" middleware for taking care of some basic operations:
- Forbids access to User-Agents in settings.DISALLOWED_USER_AGENTS
- URL rewriting: Based on the APPEND_SLASH and PREPEND_WWW settings,
this middleware appends missing slashes and/or prepends missing
"www."s.
- If APPEND_SLASH is set and the initial URL doesn't end with a
slash, and it is not found in urlpatterns, a new URL is formed by
appending a slash at the end. If this new URL is found in
urlpatterns, then an HTTP-redirect is returned to this new URL;
otherwise the initial URL is processed as usual.
- ETags: If the USE_ETAGS setting is set, ETags will be calculated from
the entire page content and Not Modified responses will be returned
appropriately.
"""
def process_request(self, request):
"""
Check for denied User-Agents and rewrite the URL based on
settings.APPEND_SLASH and settings.PREPEND_WWW
"""
# Check for denied User-Agents
if 'HTTP_USER_AGENT' in request.META:
for user_agent_regex in settings.DISALLOWED_USER_AGENTS:
if user_agent_regex.search(request.META['HTTP_USER_AGENT']):
logger.warning('Forbidden (User agent): %s', request.path,
extra={
'status_code': 403,
'request': request
}
)
return http.HttpResponseForbidden('<h1>Forbidden</h1>')
# Check for a redirect based on settings.APPEND_SLASH
# and settings.PREPEND_WWW
host = request.get_host()
old_url = [host, request.path]
new_url = old_url[:]
if (settings.PREPEND_WWW and old_url[0] and
not old_url[0].startswith('www.')):
new_url[0] = 'www.' + old_url[0]
# Append a slash if APPEND_SLASH is set and the URL doesn't have a
# trailing slash and there is no pattern for the current path
if settings.APPEND_SLASH and (not old_url[1].endswith('/')):
urlconf = getattr(request, 'urlconf', None)
if (not urlresolvers.is_valid_path(request.path_info, urlconf) and
urlresolvers.is_valid_path("%s/" % request.path_info, urlconf)):
new_url[1] = new_url[1] + '/'
if settings.DEBUG and request.method == 'POST':
raise RuntimeError((""
"You called this URL via POST, but the URL doesn't end "
"in a slash and you have APPEND_SLASH set. Django can't "
"redirect to the slash URL while maintaining POST data. "
"Change your form to point to %s%s (note the trailing "
"slash), or set APPEND_SLASH=False in your Django "
"settings.") % (new_url[0], new_url[1]))
if new_url == old_url:
# No redirects required.
return
if new_url[0]:
newurl = "%s://%s%s" % (
request.is_secure() and 'https' or 'http',
new_url[0], urlquote(new_url[1]))
else:
newurl = urlquote(new_url[1])
if request.META.get('QUERY_STRING', ''):
if six.PY3:
newurl += '?' + request.META['QUERY_STRING']
else:
# `query_string` is a bytestring. Appending it to the unicode
# string `newurl` will fail if it isn't ASCII-only. This isn't
# allowed; only broken software generates such query strings.
# Better drop the invalid query string than crash (#15152).
try:
newurl += '?' + request.META['QUERY_STRING'].decode()
except UnicodeDecodeError:
pass
return http.HttpResponsePermanentRedirect(newurl)
def process_response(self, request, response):
"Send broken link emails and calculate the Etag, if needed."
if response.status_code == 404:
if settings.SEND_BROKEN_LINK_EMAILS and not settings.DEBUG:
# If the referrer was from an internal link or a non-search-engine site,
# send a note to the managers.
domain = request.get_host()
referer = request.META.get('HTTP_REFERER', None)
is_internal = _is_internal_request(domain, referer)
path = request.get_full_path()
if referer and not _is_ignorable_404(path) and (is_internal or '?' not in referer):
ua = request.META.get('HTTP_USER_AGENT', '<none>')
ip = request.META.get('REMOTE_ADDR', '<none>')
mail_managers("Broken %slink on %s" % ((is_internal and 'INTERNAL ' or ''), domain),
"Referrer: %s\nRequested URL: %s\nUser agent: %s\nIP address: %s\n" \
% (referer, request.get_full_path(), ua, ip),
fail_silently=True)
return response
# Use ETags, if requested.
if settings.USE_ETAGS:
if response.has_header('ETag'):
etag = response['ETag']
elif response.streaming:
etag = None
else:
etag = '"%s"' % hashlib.md5(response.content).hexdigest()
if etag is not None:
if (200 <= response.status_code < 300
and request.META.get('HTTP_IF_NONE_MATCH') == etag):
cookies = response.cookies
response = http.HttpResponseNotModified()
response.cookies = cookies
else:
response['ETag'] = etag
return response
def _is_ignorable_404(uri):
"""
Returns True if a 404 at the given URL *shouldn't* notify the site managers.
"""
if getattr(settings, 'IGNORABLE_404_STARTS', ()):
import warnings
warnings.warn('The IGNORABLE_404_STARTS setting has been deprecated '
'in favor of IGNORABLE_404_URLS.', DeprecationWarning)
for start in settings.IGNORABLE_404_STARTS:
if uri.startswith(start):
return True
if getattr(settings, 'IGNORABLE_404_ENDS', ()):
import warnings
warnings.warn('The IGNORABLE_404_ENDS setting has been deprecated '
'in favor of IGNORABLE_404_URLS.', DeprecationWarning)
for end in settings.IGNORABLE_404_ENDS:
if uri.endswith(end):
return True
return any(pattern.search(uri) for pattern in settings.IGNORABLE_404_URLS)
def _is_internal_request(domain, referer):
"""
Returns true if the referring URL is the same domain as the current request.
"""
# Different subdomains are treated as different domains.
return referer is not None and re.match("^https?://%s/" % re.escape(domain), referer)
| agpl-3.0 | 5,329,805,152,882,812,000 | 43.508982 | 104 | 0.56599 | false |
neumerance/deploy | .venv/lib/python2.7/site-packages/requests/structures.py | 67 | 3576 | # -*- coding: utf-8 -*-
"""
requests.structures
~~~~~~~~~~~~~~~~~~~
Data structures that power Requests.
"""
import os
import collections
from itertools import islice
class IteratorProxy(object):
"""docstring for IteratorProxy"""
def __init__(self, i):
self.i = i
# self.i = chain.from_iterable(i)
def __iter__(self):
return self.i
def __len__(self):
if hasattr(self.i, '__len__'):
return len(self.i)
if hasattr(self.i, 'len'):
return self.i.len
if hasattr(self.i, 'fileno'):
return os.fstat(self.i.fileno()).st_size
def read(self, n):
return "".join(islice(self.i, None, n))
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive:
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = dict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.lower()] = (key, value)
def __getitem__(self, key):
return self._store[key.lower()][1]
def __delitem__(self, key):
del self._store[key.lower()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
class LookupDict(dict):
"""Dictionary lookup object."""
def __init__(self, name=None):
self.name = name
super(LookupDict, self).__init__()
def __repr__(self):
return '<lookup \'%s\'>' % (self.name)
def __getitem__(self, key):
# We allow fall-through here, so values default to None
return self.__dict__.get(key, None)
def get(self, key, default=None):
return self.__dict__.get(key, default)
| apache-2.0 | 441,589,889,107,711,940 | 26.9375 | 75 | 0.582494 | false |
xq262144/hue | desktop/core/ext-py/pysaml2-2.4.0/src/saml2/metadata.py | 31 | 24394 | #!/usr/bin/env python
from saml2.md import AttributeProfile
from saml2.sigver import security_context
from saml2.config import Config
from saml2.validate import valid_instance
from saml2.time_util import in_a_while
from saml2.extension import mdui
from saml2.extension import idpdisc
from saml2.extension import shibmd
from saml2.extension import mdattr
from saml2.saml import NAME_FORMAT_URI
from saml2.saml import AttributeValue
from saml2.saml import Attribute
from saml2.attribute_converter import from_local_name
from saml2 import md, SAMLError
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_HTTP_REDIRECT
from saml2 import BINDING_SOAP
from saml2 import samlp
from saml2 import class_name
import xmldsig as ds
from saml2.sigver import pre_signature_part
from saml2.s_utils import factory
from saml2.s_utils import rec_factory
from saml2.s_utils import sid
__author__ = 'rolandh'
NSPAIR = {
"saml2p": "urn:oasis:names:tc:SAML:2.0:protocol",
"saml2": "urn:oasis:names:tc:SAML:2.0:assertion",
"soap11": "http://schemas.xmlsoap.org/soap/envelope/",
"meta": "urn:oasis:names:tc:SAML:2.0:metadata",
"xsi": "http://www.w3.org/2001/XMLSchema-instance",
"ds": "http://www.w3.org/2000/09/xmldsig#",
"shibmd": "urn:mace:shibboleth:metadata:1.0",
"md": "urn:oasis:names:tc:SAML:2.0:metadata",
}
DEFAULTS = {
"want_assertions_signed": "true",
"authn_requests_signed": "false",
"want_authn_requests_signed": "true",
"want_authn_requests_only_with_valid_cert": "false",
}
ORG_ATTR_TRANSL = {
"organization_name": ("name", md.OrganizationName),
"organization_display_name": ("display_name", md.OrganizationDisplayName),
"organization_url": ("url", md.OrganizationURL)
}
MDNS = '"urn:oasis:names:tc:SAML:2.0:metadata"'
XMLNSXS = " xmlns:xs=\"http://www.w3.org/2001/XMLSchema\""
def metadata_tostring_fix(desc, nspair, xmlstring=""):
if not xmlstring:
xmlstring = desc.to_string(nspair)
if "\"xs:string\"" in xmlstring and XMLNSXS not in xmlstring:
xmlstring = xmlstring.replace(MDNS, MDNS+XMLNSXS)
return xmlstring
def create_metadata_string(configfile, config, valid, cert, keyfile, mid, name,
sign):
valid_for = 0
nspair = {"xs": "http://www.w3.org/2001/XMLSchema"}
#paths = [".", "/opt/local/bin"]
if valid:
valid_for = int(valid) # Hours
eds = []
if config is not None:
eds.append(entity_descriptor(config))
else:
if configfile.endswith(".py"):
configfile = configfile[:-3]
config = Config().load_file(configfile, metadata_construction=True)
eds.append(entity_descriptor(config))
conf = Config()
conf.key_file = keyfile
conf.cert_file = cert
conf.debug = 1
conf.xmlsec_binary = config.xmlsec_binary
secc = security_context(conf)
if mid:
desc = entities_descriptor(eds, valid_for, name, mid,
sign, secc)
valid_instance(desc)
return metadata_tostring_fix(desc, nspair)
else:
eid = eds[0]
if sign:
eid, xmldoc = sign_entity_descriptor(eid, mid, secc)
else:
xmldoc = None
valid_instance(eid)
xmldoc = metadata_tostring_fix(eid, nspair, xmldoc)
return xmldoc
def _localized_name(val, klass):
"""If no language is defined 'en' is the default"""
try:
(text, lang) = val
return klass(text=text, lang=lang)
except ValueError:
return klass(text=val, lang="en")
def do_organization_info(ava):
"""
Description of an organization in the configuration is
a dictionary of keys and values, where the values might be tuples::
"organization": {
"name": ("AB Exempel", "se"),
"display_name": ("AB Exempel", "se"),
"url": "http://www.example.org"
}
"""
if ava is None:
return None
org = md.Organization()
for dkey, (ckey, klass) in ORG_ATTR_TRANSL.items():
if ckey not in ava:
continue
if isinstance(ava[ckey], basestring):
setattr(org, dkey, [_localized_name(ava[ckey], klass)])
elif isinstance(ava[ckey], list):
setattr(org, dkey,
[_localized_name(n, klass) for n in ava[ckey]])
else:
setattr(org, dkey, [_localized_name(ava[ckey], klass)])
return org
def do_contact_person_info(lava):
""" Creates a ContactPerson instance from configuration information"""
cps = []
if lava is None:
return cps
contact_person = md.ContactPerson
for ava in lava:
cper = md.ContactPerson()
for (key, classpec) in contact_person.c_children.values():
try:
value = ava[key]
data = []
if isinstance(classpec, list):
# What if value is not a list ?
if isinstance(value, basestring):
data = [classpec[0](text=value)]
else:
for val in value:
data.append(classpec[0](text=val))
else:
data = classpec(text=value)
setattr(cper, key, data)
except KeyError:
pass
for (prop, classpec, _) in contact_person.c_attributes.values():
try:
# should do a check for valid value
setattr(cper, prop, ava[prop])
except KeyError:
pass
# ContactType must have a value
typ = getattr(cper, "contact_type")
if not typ:
setattr(cper, "contact_type", "technical")
cps.append(cper)
return cps
def do_key_descriptor(cert, use="both"):
if use == "both":
return [
md.KeyDescriptor(
key_info=ds.KeyInfo(
x509_data=ds.X509Data(
x509_certificate=ds.X509Certificate(text=cert)
)
),
use="encryption"
),
md.KeyDescriptor(
key_info=ds.KeyInfo(
x509_data=ds.X509Data(
x509_certificate=ds.X509Certificate(text=cert)
)
),
use="signing"
)
]
elif use in ["signing", "encryption"]:
return md.KeyDescriptor(
key_info=ds.KeyInfo(
x509_data=ds.X509Data(
x509_certificate=ds.X509Certificate(text=cert)
)
),
use=use
)
else:
return md.KeyDescriptor(
key_info=ds.KeyInfo(
x509_data=ds.X509Data(
x509_certificate=ds.X509Certificate(text=cert)
)
)
)
def do_requested_attribute(attributes, acs, is_required="false"):
lista = []
for attr in attributes:
attr = from_local_name(acs, attr, NAME_FORMAT_URI)
args = {}
for key in attr.keyswv():
args[key] = getattr(attr, key)
args["is_required"] = is_required
args["name_format"] = NAME_FORMAT_URI
lista.append(md.RequestedAttribute(**args))
return lista
def do_uiinfo(_uiinfo):
uii = mdui.UIInfo()
for attr in ['display_name', 'description', "information_url",
'privacy_statement_url']:
try:
val = _uiinfo[attr]
except KeyError:
continue
aclass = uii.child_class(attr)
inst = getattr(uii, attr)
if isinstance(val, basestring):
ainst = aclass(text=val)
inst.append(ainst)
elif isinstance(val, dict):
ainst = aclass()
ainst.text = val["text"]
ainst.lang = val["lang"]
inst.append(ainst)
else:
for value in val:
if isinstance(value, basestring):
ainst = aclass(text=value)
inst.append(ainst)
elif isinstance(value, dict):
ainst = aclass()
ainst.text = value["text"]
ainst.lang = value["lang"]
inst.append(ainst)
try:
_attr = "logo"
val = _uiinfo[_attr]
inst = getattr(uii, _attr)
# dictionary or list of dictionaries
if isinstance(val, dict):
logo = mdui.Logo()
for attr, value in val.items():
if attr in logo.keys():
setattr(logo, attr, value)
inst.append(logo)
elif isinstance(val, list):
for logga in val:
if not isinstance(logga, dict):
raise SAMLError("Configuration error !!")
logo = mdui.Logo()
for attr, value in logga.items():
if attr in logo.keys():
setattr(logo, attr, value)
inst.append(logo)
except KeyError:
pass
try:
_attr = "keywords"
val = _uiinfo[_attr]
inst = getattr(uii, _attr)
# list of basestrings, dictionary or list of dictionaries
if isinstance(val, list):
for value in val:
keyw = mdui.Keywords()
if isinstance(value, basestring):
keyw.text = value
elif isinstance(value, dict):
keyw.text = " ".join(value["text"])
try:
keyw.lang = value["lang"]
except KeyError:
pass
else:
raise SAMLError("Configuration error: ui_info keywords")
inst.append(keyw)
elif isinstance(val, dict):
keyw = mdui.Keywords()
keyw.text = " ".join(val["text"])
try:
keyw.lang = val["lang"]
except KeyError:
pass
inst.append(keyw)
else:
raise SAMLError("Configuration Error: ui_info keywords")
except KeyError:
pass
return uii
def do_idpdisc(discovery_response):
return idpdisc.DiscoveryResponse(index="0", location=discovery_response,
binding=idpdisc.NAMESPACE)
ENDPOINTS = {
"sp": {
"artifact_resolution_service": (md.ArtifactResolutionService, True),
"single_logout_service": (md.SingleLogoutService, False),
"manage_name_id_service": (md.ManageNameIDService, False),
"assertion_consumer_service": (md.AssertionConsumerService, True),
},
"idp": {
"artifact_resolution_service": (md.ArtifactResolutionService, True),
"single_logout_service": (md.SingleLogoutService, False),
"manage_name_id_service": (md.ManageNameIDService, False),
"single_sign_on_service": (md.SingleSignOnService, False),
"name_id_mapping_service": (md.NameIDMappingService, False),
"assertion_id_request_service": (md.AssertionIDRequestService, False),
},
"aa": {
"artifact_resolution_service": (md.ArtifactResolutionService, True),
"single_logout_service": (md.SingleLogoutService, False),
"manage_name_id_service": (md.ManageNameIDService, False),
"assertion_id_request_service": (md.AssertionIDRequestService, False),
"attribute_service": (md.AttributeService, False)
},
"pdp": {
"authz_service": (md.AuthzService, True)
},
"aq": {
"authn_query_service": (md.AuthnQueryService, True)
}
}
ENDPOINT_EXT = {
"sp": {
"discovery_response": (idpdisc.DiscoveryResponse, True)
}
}
DEFAULT_BINDING = {
"assertion_consumer_service": BINDING_HTTP_POST,
"single_sign_on_service": BINDING_HTTP_REDIRECT,
"single_logout_service": BINDING_HTTP_POST,
"attribute_service": BINDING_SOAP,
"artifact_resolution_service": BINDING_SOAP,
"authn_query_service": BINDING_SOAP
}
def do_extensions(mname, item):
try:
_mod = __import__("saml2.extension.%s" % mname, globals(), locals(),
mname)
except ImportError:
return None
else:
res = []
for _cname, ava in item.items():
cls = getattr(_mod, _cname)
res.append(rec_factory(cls, **ava))
return res
def _do_nameid_format(cls, conf, typ):
namef = conf.getattr("name_id_format", typ)
if namef:
if isinstance(namef, basestring):
ids = [md.NameIDFormat(namef)]
else:
ids = [md.NameIDFormat(text=form) for form in namef]
setattr(cls, "name_id_format", ids)
def do_endpoints(conf, endpoints):
service = {}
for endpoint, (eclass, indexed) in endpoints.items():
try:
servs = []
i = 1
for args in conf[endpoint]:
if isinstance(args, basestring): # Assume it's the location
args = {"location": args,
"binding": DEFAULT_BINDING[endpoint]}
elif isinstance(args, tuple) or isinstance(args, list):
if len(args) == 2: # (location, binding)
args = {"location": args[0], "binding": args[1]}
elif len(args) == 3: # (location, binding, index)
args = {"location": args[0], "binding": args[1],
"index": args[2]}
if indexed:
if "index" not in args:
args["index"] = "%d" % i
i += 1
else:
try:
int(args["index"])
except ValueError:
raise
else:
args["index"] = str(args["index"])
servs.append(factory(eclass, **args))
service[endpoint] = servs
except KeyError:
pass
return service
DEFAULT = {
"want_assertions_signed": "true",
"authn_requests_signed": "false",
"want_authn_requests_signed": "false",
#"want_authn_requests_only_with_valid_cert": "false",
}
def do_attribute_consuming_service(conf, spsso):
service_description = service_name = None
requested_attributes = []
acs = conf.attribute_converters
req = conf.getattr("required_attributes", "sp")
if req:
requested_attributes.extend(do_requested_attribute(req, acs,
is_required="true"))
opt = conf.getattr("optional_attributes", "sp")
if opt:
requested_attributes.extend(do_requested_attribute(opt, acs))
try:
if conf.description:
try:
(text, lang) = conf.description
except ValueError:
text = conf.description
lang = "en"
service_description = [md.ServiceDescription(text=text, lang=lang)]
except KeyError:
pass
try:
if conf.name:
try:
(text, lang) = conf.name
except ValueError:
text = conf.name
lang = "en"
service_name = [md.ServiceName(text=text, lang=lang)]
except KeyError:
pass
# Must be both requested attributes and service name
if requested_attributes:
if not service_name:
service_name = [md.ServiceName(text="", lang="en")]
ac_serv = md.AttributeConsumingService(
index="1", service_name=service_name,
requested_attribute=requested_attributes)
if service_description:
ac_serv.service_description = service_description
spsso.attribute_consuming_service = [ac_serv]
def do_spsso_descriptor(conf, cert=None):
spsso = md.SPSSODescriptor()
spsso.protocol_support_enumeration = samlp.NAMESPACE
exts = conf.getattr("extensions", "sp")
if exts:
if spsso.extensions is None:
spsso.extensions = md.Extensions()
for key, val in exts.items():
_ext = do_extensions(key, val)
if _ext:
for _e in _ext:
spsso.extensions.add_extension_element(_e)
endps = conf.getattr("endpoints", "sp")
if endps:
for (endpoint, instlist) in do_endpoints(endps,
ENDPOINTS["sp"]).items():
setattr(spsso, endpoint, instlist)
ext = do_endpoints(endps, ENDPOINT_EXT["sp"])
if ext:
if spsso.extensions is None:
spsso.extensions = md.Extensions()
for vals in ext.values():
for val in vals:
spsso.extensions.add_extension_element(val)
ui_info = conf.getattr("ui_info", "sp")
if ui_info:
if spsso.extensions is None:
spsso.extensions = md.Extensions()
spsso.extensions.add_extension_element(do_uiinfo(ui_info))
if cert:
encryption_type = conf.encryption_type
spsso.key_descriptor = do_key_descriptor(cert, encryption_type)
for key in ["want_assertions_signed", "authn_requests_signed"]:
try:
val = conf.getattr(key, "sp")
if val is None:
setattr(spsso, key, DEFAULT[key]) # default ?!
else:
strval = "{0:>s}".format(str(val))
setattr(spsso, key, strval.lower())
except KeyError:
setattr(spsso, key, DEFAULTS[key])
do_attribute_consuming_service(conf, spsso)
_do_nameid_format(spsso, conf, "sp")
return spsso
def do_idpsso_descriptor(conf, cert=None):
idpsso = md.IDPSSODescriptor()
idpsso.protocol_support_enumeration = samlp.NAMESPACE
endps = conf.getattr("endpoints", "idp")
if endps:
for (endpoint, instlist) in do_endpoints(endps,
ENDPOINTS["idp"]).items():
setattr(idpsso, endpoint, instlist)
_do_nameid_format(idpsso, conf, "idp")
scopes = conf.getattr("scope", "idp")
if scopes:
if idpsso.extensions is None:
idpsso.extensions = md.Extensions()
for scope in scopes:
mdscope = shibmd.Scope()
mdscope.text = scope
# unless scope contains '*'/'+'/'?' assume non regexp ?
mdscope.regexp = "false"
idpsso.extensions.add_extension_element(mdscope)
ui_info = conf.getattr("ui_info", "idp")
if ui_info:
if idpsso.extensions is None:
idpsso.extensions = md.Extensions()
idpsso.extensions.add_extension_element(do_uiinfo(ui_info))
if cert:
idpsso.key_descriptor = do_key_descriptor(cert)
for key in ["want_authn_requests_signed"]:
#"want_authn_requests_only_with_valid_cert"]:
try:
val = conf.getattr(key, "idp")
if val is None:
setattr(idpsso, key, DEFAULT[key])
else:
setattr(idpsso, key, ("%s" % val).lower())
except KeyError:
setattr(idpsso, key, DEFAULTS[key])
return idpsso
def do_aa_descriptor(conf, cert):
aad = md.AttributeAuthorityDescriptor()
aad.protocol_support_enumeration = samlp.NAMESPACE
endps = conf.getattr("endpoints", "aa")
if endps:
for (endpoint, instlist) in do_endpoints(endps,
ENDPOINTS["aa"]).items():
setattr(aad, endpoint, instlist)
_do_nameid_format(aad, conf, "aa")
if cert:
aad.key_descriptor = do_key_descriptor(cert)
attributes = conf.getattr("attribute", "aa")
if attributes:
for attribute in attributes:
aad.attribute.append(Attribute(text=attribute))
attribute_profiles = conf.getattr("attribute_profile", "aa")
if attribute_profiles:
for attribute_profile in attribute_profiles:
aad.attribute.append(AttributeProfile(text=attribute_profile))
return aad
def do_aq_descriptor(conf, cert):
aqs = md.AuthnAuthorityDescriptor()
aqs.protocol_support_enumeration = samlp.NAMESPACE
endps = conf.getattr("endpoints", "aq")
if endps:
for (endpoint, instlist) in do_endpoints(endps,
ENDPOINTS["aq"]).items():
setattr(aqs, endpoint, instlist)
_do_nameid_format(aqs, conf, "aq")
if cert:
aqs.key_descriptor = do_key_descriptor(cert)
return aqs
def do_pdp_descriptor(conf, cert):
""" Create a Policy Decision Point descriptor """
pdp = md.PDPDescriptor()
pdp.protocol_support_enumeration = samlp.NAMESPACE
endps = conf.getattr("endpoints", "pdp")
if endps:
for (endpoint, instlist) in do_endpoints(endps,
ENDPOINTS["pdp"]).items():
setattr(pdp, endpoint, instlist)
_do_nameid_format(pdp, conf, "pdp")
if cert:
pdp.key_descriptor = do_key_descriptor(cert)
return pdp
def entity_descriptor(confd):
mycert = "".join(open(confd.cert_file).readlines()[1:-1])
entd = md.EntityDescriptor()
entd.entity_id = confd.entityid
if confd.valid_for:
entd.valid_until = in_a_while(hours=int(confd.valid_for))
if confd.organization is not None:
entd.organization = do_organization_info(confd.organization)
if confd.contact_person is not None:
entd.contact_person = do_contact_person_info(confd.contact_person)
if confd.entity_category:
entd.extensions = md.Extensions()
ava = [AttributeValue(text=c) for c in confd.entity_category]
attr = Attribute(attribute_value=ava,
name="http://macedir.org/entity-category")
item = mdattr.EntityAttributes(attribute=attr)
entd.extensions.add_extension_element(item)
serves = confd.serves
if not serves:
raise SAMLError(
'No service type ("sp","idp","aa") provided in the configuration')
if "sp" in serves:
confd.context = "sp"
entd.spsso_descriptor = do_spsso_descriptor(confd, mycert)
if "idp" in serves:
confd.context = "idp"
entd.idpsso_descriptor = do_idpsso_descriptor(confd, mycert)
if "aa" in serves:
confd.context = "aa"
entd.attribute_authority_descriptor = do_aa_descriptor(confd, mycert)
if "pdp" in serves:
confd.context = "pdp"
entd.pdp_descriptor = do_pdp_descriptor(confd, mycert)
if "aq" in serves:
confd.context = "aq"
entd.authn_authority_descriptor = do_aq_descriptor(confd, mycert)
return entd
def entities_descriptor(eds, valid_for, name, ident, sign, secc):
entities = md.EntitiesDescriptor(entity_descriptor=eds)
if valid_for:
entities.valid_until = in_a_while(hours=valid_for)
if name:
entities.name = name
if ident:
entities.id = ident
if sign:
if not ident:
ident = sid()
if not secc.key_file:
raise SAMLError("If you want to do signing you should define " +
"a key to sign with")
if not secc.my_cert:
raise SAMLError("If you want to do signing you should define " +
"where your public key are")
entities.signature = pre_signature_part(ident, secc.my_cert, 1)
entities.id = ident
xmldoc = secc.sign_statement("%s" % entities, class_name(entities))
entities = md.entities_descriptor_from_string(xmldoc)
else:
xmldoc = None
return entities, xmldoc
def sign_entity_descriptor(edesc, ident, secc):
"""
:param edesc: EntityDescriptor instance
:param ident: EntityDescriptor identifier
:param secc: Security context
:return: Tuple with EntityDescriptor instance and Signed XML document
"""
if not ident:
ident = sid()
edesc.signature = pre_signature_part(ident, secc.my_cert, 1)
edesc.id = ident
xmldoc = secc.sign_statement("%s" % edesc, class_name(edesc))
edesc = md.entity_descriptor_from_string(xmldoc)
return edesc, xmldoc | apache-2.0 | 977,239,929,829,353,700 | 30.805737 | 79 | 0.563745 | false |
mysz/versionner | versionner/config.py | 2 | 5820 | """Configuration-related classes for versionner"""
import codecs
import configparser
import pathlib
import re
import sys
from versionner import defaults
ENV_VERSIONNER_PROJECT_CONFIG_FILE = 'VERSIONNER_PROJECT_CONFIG_FILE'
# pylint: disable=too-many-instance-attributes,too-few-public-methods
class FileConfig:
"""Single project file configuration"""
def __init__(self, filename, cfg):
"""
Evaluate single file configuration
:param filename:
:param cfg:
"""
self.filename = filename
self.file = pathlib.Path(filename)
self.enabled = cfg.getboolean('enabled', True)
self.search = cfg['search']
self.replace = cfg['replace']
self.date_format = cfg.get('date_format', None)
self.match = cfg.get('match', 'line')
self.search_flags = 0
self.encoding = cfg.get('encoding', 'utf-8')
search_flags = cfg.get('search_flags', '')
if search_flags:
search_flags = re.split(r'\s*,\s*', search_flags)
for search_flag in search_flags:
self.search_flags |= getattr(re, search_flag.upper())
def validate(self):
"""Validate current file configuration
:raise ValueError:
"""
if not self.file.exists():
raise ValueError("File \"%s\" doesn't exists")
if not self.search:
raise ValueError("Search cannot be empty")
if not self.replace:
raise ValueError("Replace cannot be empty")
if self.match not in ('file', 'line'):
raise ValueError("Match must be one of: file, line")
try:
codecs.lookup(self.encoding)
except LookupError:
raise ValueError("Unknown encoding: \"%s\"" % self.encoding)
def __repr__(self):
return '<FileConfig(%s)>' % self.filename
class Config:
"""Configuration"""
__slots__ = (
'command',
'commit',
'date_format',
'default_init_version',
'default_increase_value',
'files',
'value',
'up_part',
'vcs_commit_message',
'vcs_engine',
'vcs_tag_params',
'verbose',
'version_file',
)
def __init__(self, files=None):
"""Evaluate configuration
:return:
"""
self.command = None
self.commit = False
self.date_format = defaults.DEFAULT_DATE_FORMAT
self.default_init_version = defaults.DEFAULT_INIT_VERSION
self.default_increase_value = defaults.DEFAULT_INCREASE_VALUE
self.files = []
self.value = None
self.up_part = defaults.DEFAULT_UP_PART
self.vcs_commit_message = defaults.DEFAULT_VCS_COMMIT_MESSAGE
self.vcs_engine = 'git'
self.vcs_tag_params = []
self.verbose = False
self.version_file = defaults.DEFAULT_VERSION_FILE
if files:
self._parse_config_file(files)
def _parse_config_file(self, cfg_files):
"""Parse config file (ini) and set properties
:return:
"""
cfg_handler = configparser.ConfigParser(interpolation=None)
if not cfg_handler.read(map(str, cfg_files)):
return
self._parse_global_section(cfg_handler)
self._parse_vcs_section(cfg_handler)
self._parse_file_section(cfg_handler)
def _parse_global_section(self, cfg_handler):
"""Parse global ([versionner]) section
:param cfg_handler:
:return:
"""
# global configuration
if 'versionner' in cfg_handler:
cfg = cfg_handler['versionner']
if 'file' in cfg:
self.version_file = cfg['file']
if 'date_format' in cfg:
self.date_format = cfg['date_format']
if 'up_part' in cfg:
self.up_part = cfg['up_part']
if 'default_init_version' in cfg:
self.default_init_version = cfg['default_init_version']
if 'default_increase_value' in cfg:
self.default_increase_value = cfg.getint('default_increase_value')
def _parse_vcs_section(self, cfg_handler):
"""Parse [vcs] section
:param cfg_handler:
:return:
"""
if 'vcs' in cfg_handler:
cfg = cfg_handler['vcs']
if 'engine' in cfg:
self.vcs_engine = cfg['engine']
if 'tag_params' in cfg:
self.vcs_tag_params = list(filter(None, cfg['tag_params'].split("\n")))
if 'commit_message' in cfg:
self.vcs_commit_message = cfg['commit_message']
def _parse_file_section(self, cfg_handler):
"""Parse [file:*] sections
:param cfg_handler:
:return:
"""
_number_rxp = re.compile(r'^\d+:(.)')
# project files configuration
for section in cfg_handler.sections():
if section.startswith('file:'):
path = section[5:]
path = _number_rxp.sub(r'\1', path)
project_file = FileConfig(path, cfg_handler[section])
if not project_file.date_format:
project_file.date_format = self.date_format
if project_file.enabled:
try:
project_file.validate()
except ValueError as exc:
print("Incorrect configuration for file \"%s\": %s" % (project_file.filename, exc.args[0]), file=sys.stderr)
else:
self.files.append(project_file)
def __repr__(self):
ret = '<' + self.__class__.__name__ + ': '
ret += ', '.join('%s=%r' % (name, getattr(self, name)) for name in self.__slots__)
return ret
| mit | 6,680,834,695,407,195,000 | 30.122995 | 132 | 0.549828 | false |
jimmymunoz/jeuxdemots | public/package/node_modules/node-gyp/gyp/pylib/gyp/common.py | 1292 | 20063 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import with_statement
import collections
import errno
import filecmp
import os.path
import re
import tempfile
import sys
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
# among other "problems".
class memoize(object):
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
result = self.func(*args)
self.cache[args] = result
return result
class GypError(Exception):
"""Error class representing an error, which is to be presented
to the user. The main entry point will catch and display this.
"""
pass
def ExceptionAppend(e, msg):
"""Append a message to the given exception's message."""
if not e.args:
e.args = (msg,)
elif len(e.args) == 1:
e.args = (str(e.args[0]) + ' ' + msg,)
else:
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
def FindQualifiedTargets(target, qualified_list):
"""
Given a list of qualified targets, return the qualified targets for the
specified |target|.
"""
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
target_split = target.rsplit(':', 1)
if len(target_split) == 2:
[build_file, target] = target_split
else:
build_file = None
target_split = target.rsplit('#', 1)
if len(target_split) == 2:
[target, toolset] = target_split
else:
toolset = None
return [build_file, target, toolset]
def ResolveTarget(build_file, target, toolset):
# This function resolves a target into a canonical form:
# - a fully defined build file, either absolute or relative to the current
# directory
# - a target name
# - a toolset
#
# build_file is the file relative to which 'target' is defined.
# target is the qualified target.
# toolset is the default toolset for that target.
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
if parsed_build_file:
if build_file:
# If a relative path, parsed_build_file is relative to the directory
# containing build_file. If build_file is not in the current directory,
# parsed_build_file is not a usable path as-is. Resolve it by
# interpreting it as relative to build_file. If parsed_build_file is
# absolute, it is usable as a path regardless of the current directory,
# and os.path.join will return it as-is.
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
parsed_build_file))
# Further (to handle cases like ../cwd), make it relative to cwd)
if not os.path.isabs(build_file):
build_file = RelativePath(build_file, '.')
else:
build_file = parsed_build_file
if parsed_toolset:
toolset = parsed_toolset
return [build_file, target, toolset]
def BuildFile(fully_qualified_target):
# Extracts the build file from the fully qualified target.
return ParseQualifiedTarget(fully_qualified_target)[0]
def GetEnvironFallback(var_list, default):
"""Look up a key in the environment, with fallback to secondary keys
and finally falling back to a default value."""
for var in var_list:
if var in os.environ:
return os.environ[var]
return default
def QualifiedTarget(build_file, target, toolset):
# "Qualified" means the file that a target was defined in and the target
# name, separated by a colon, suffixed by a # and the toolset name:
# /path/to/file.gyp:target_name#toolset
fully_qualified = build_file + ':' + target
if toolset:
fully_qualified = fully_qualified + '#' + toolset
return fully_qualified
@memoize
def RelativePath(path, relative_to, follow_path_symlink=True):
# Assuming both |path| and |relative_to| are relative to the current
# directory, returns a relative path that identifies path relative to
# relative_to.
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
# this method returns a path to the real file represented by |path|. If it is
# false, this method returns a path to the symlink. If |path| is not a
# symlink, this option has no effect.
# Convert to normalized (and therefore absolute paths).
if follow_path_symlink:
path = os.path.realpath(path)
else:
path = os.path.abspath(path)
relative_to = os.path.realpath(relative_to)
# On Windows, we can't create a relative path to a different drive, so just
# use the absolute path.
if sys.platform == 'win32':
if (os.path.splitdrive(path)[0].lower() !=
os.path.splitdrive(relative_to)[0].lower()):
return path
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
# Determine how much of the prefix the two paths share.
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
# Put enough ".." components to back up out of relative_to to the common
# prefix, and then append the part of path_split after the common prefix.
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
path_split[prefix_len:]
if len(relative_split) == 0:
# The paths were the same.
return ''
# Turn it back into a string and we're done.
return os.path.join(*relative_split)
@memoize
def InvertRelativePath(path, toplevel_dir=None):
"""Given a path like foo/bar that is relative to toplevel_dir, return
the inverse relative path back to the toplevel_dir.
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
should always produce the empty string, unless the path contains symlinks.
"""
if not path:
return path
toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
def FixIfRelativePath(path, relative_to):
# Like RelativePath but returns |path| unchanged if it is absolute.
if os.path.isabs(path):
return path
return RelativePath(path, relative_to)
def UnrelativePath(path, relative_to):
# Assuming that |relative_to| is relative to the current directory, and |path|
# is a path relative to the dirname of |relative_to|, returns a path that
# identifies |path| relative to the current directory.
rel_dir = os.path.dirname(relative_to)
return os.path.normpath(os.path.join(rel_dir, path))
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
# and the documentation for various shells.
# _quote is a pattern that should match any argument that needs to be quoted
# with double-quotes by EncodePOSIXShellArgument. It matches the following
# characters appearing anywhere in an argument:
# \t, \n, space parameter separators
# # comments
# $ expansions (quoted to always expand within one argument)
# % called out by IEEE 1003.1 XCU.2.2
# & job control
# ' quoting
# (, ) subshell execution
# *, ?, [ pathname expansion
# ; command delimiter
# <, >, | redirection
# = assignment
# {, } brace expansion (bash)
# ~ tilde expansion
# It also matches the empty string, because "" (or '') is the only way to
# represent an empty string literal argument to a POSIX shell.
#
# This does not match the characters in _escape, because those need to be
# backslash-escaped regardless of whether they appear in a double-quoted
# string.
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
# _escape is a pattern that should match any character that needs to be
# escaped with a backslash, whether or not the argument matched the _quote
# pattern. _escape is used with re.sub to backslash anything in _escape's
# first match group, hence the (parentheses) in the regular expression.
#
# _escape matches the following characters appearing anywhere in an argument:
# " to prevent POSIX shells from interpreting this character for quoting
# \ to prevent POSIX shells from interpreting this character for escaping
# ` to prevent POSIX shells from interpreting this character for command
# substitution
# Missing from this list is $, because the desired behavior of
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
#
# Also missing from this list is !, which bash will interpret as the history
# expansion character when history is enabled. bash does not enable history
# by default in non-interactive shells, so this is not thought to be a problem.
# ! was omitted from this list because bash interprets "\!" as a literal string
# including the backslash character (avoiding history expansion but retaining
# the backslash), which would not be correct for argument encoding. Handling
# this case properly would also be problematic because bash allows the history
# character to be changed with the histchars shell variable. Fortunately,
# as history is not enabled in non-interactive shells and
# EncodePOSIXShellArgument is only expected to encode for non-interactive
# shells, there is no room for error here by ignoring !.
_escape = re.compile(r'(["\\`])')
def EncodePOSIXShellArgument(argument):
"""Encodes |argument| suitably for consumption by POSIX shells.
argument may be quoted and escaped as necessary to ensure that POSIX shells
treat the returned value as a literal representing the argument passed to
this function. Parameter (variable) expansions beginning with $ are allowed
to remain intact without escaping the $, to allow the argument to contain
references to variables to be expanded by the shell.
"""
if not isinstance(argument, str):
argument = str(argument)
if _quote.search(argument):
quote = '"'
else:
quote = ''
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
return encoded
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
"""
encoded_arguments = []
for argument in list:
encoded_arguments.append(EncodePOSIXShellArgument(argument))
return ' '.join(encoded_arguments)
def DeepDependencyTargets(target_dicts, roots):
"""Returns the recursive list of target dependencies."""
dependencies = set()
pending = set(roots)
while pending:
# Pluck out one.
r = pending.pop()
# Skip if visited already.
if r in dependencies:
continue
# Add it.
dependencies.add(r)
# Add its children.
spec = target_dicts[r]
pending.update(set(spec.get('dependencies', [])))
pending.update(set(spec.get('dependencies_original', [])))
return list(dependencies - set(roots))
def BuildFileTargets(target_list, build_file):
"""From a target_list, returns the subset from the specified build_file.
"""
return [p for p in target_list if BuildFile(p) == build_file]
def AllTargets(target_list, target_dicts, build_file):
"""Returns all targets (direct and dependencies) for the specified build_file.
"""
bftargets = BuildFileTargets(target_list, build_file)
deptargets = DeepDependencyTargets(target_dicts, bftargets)
return bftargets + deptargets
def WriteOnDiff(filename):
"""Write to a file only if the new contents differ.
Arguments:
filename: name of the file to potentially write to.
Returns:
A file like object which will write to temporary file and only overwrite
the target if it differs (on close).
"""
class Writer(object):
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
# Pick temporary file.
tmp_fd, self.tmp_path = tempfile.mkstemp(
suffix='.tmp',
prefix=os.path.split(filename)[1] + '.gyp.',
dir=os.path.split(filename)[0])
try:
self.tmp_file = os.fdopen(tmp_fd, 'wb')
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
def __getattr__(self, attrname):
# Delegate everything else to self.tmp_file
return getattr(self.tmp_file, attrname)
def close(self):
try:
# Close tmp file.
self.tmp_file.close()
# Determine if different.
same = False
try:
same = filecmp.cmp(self.tmp_path, filename, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(self.tmp_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(self.tmp_path, 0666 & ~umask)
if sys.platform == 'win32' and os.path.exists(filename):
# NOTE: on windows (but not cygwin) rename will not replace an
# existing file, so it must be preceded with a remove. Sadly there
# is no way to make the switch atomic.
os.remove(filename)
os.rename(self.tmp_path, filename)
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
return Writer()
def EnsureDirExists(path):
"""Make sure the directory for |path| exists."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
def GetFlavor(params):
"""Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = {
'cygwin': 'win',
'win32': 'win',
'darwin': 'mac',
}
if 'flavor' in params:
return params['flavor']
if sys.platform in flavors:
return flavors[sys.platform]
if sys.platform.startswith('sunos'):
return 'solaris'
if sys.platform.startswith('freebsd'):
return 'freebsd'
if sys.platform.startswith('openbsd'):
return 'openbsd'
if sys.platform.startswith('netbsd'):
return 'netbsd'
if sys.platform.startswith('aix'):
return 'aix'
return 'linux'
def CopyTool(flavor, out_path):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
# support scripts.
prefix = {
'aix': 'flock',
'solaris': 'flock',
'mac': 'mac',
'win': 'win'
}.get(flavor, None)
if not prefix:
return
# Slurp input file.
source_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
with open(source_path) as source_file:
source = source_file.readlines()
# Add header and write it out.
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
with open(tool_path, 'w') as tool_file:
tool_file.write(
''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
# Make file executable.
os.chmod(tool_path, 0755)
# From Alex Martelli,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# First comment, dated 2001/10/13.
# (Also in the printed Python Cookbook.)
def uniquer(seq, idfun=None):
if idfun is None:
idfun = lambda x: x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
# Based on http://code.activestate.com/recipes/576694/.
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev_item, next_item = self.map.pop(key)
prev_item[2] = next_item
next_item[1] = prev_item
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
# The second argument is an addition that causes a pylint warning.
def pop(self, last=True): # pylint: disable=W0221
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
# Extensions to the recipe.
def update(self, iterable):
for i in iterable:
if i not in self:
self.add(i)
class CycleError(Exception):
"""An exception raised when an unexpected cycle is detected."""
def __init__(self, nodes):
self.nodes = nodes
def __str__(self):
return 'CycleError: cycle involving: ' + str(self.nodes)
def TopologicallySorted(graph, get_edges):
r"""Topologically sort based on a user provided edge definition.
Args:
graph: A list of node names.
get_edges: A function mapping from node name to a hashable collection
of node names which this node has outgoing edges to.
Returns:
A list containing all of the node in graph in topological order.
It is assumed that calling get_edges once for each node and caching is
cheaper than repeatedly calling get_edges.
Raises:
CycleError in the event of a cycle.
Example:
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
def GetEdges(node):
return re.findall(r'\$\(([^))]\)', graph[node])
print TopologicallySorted(graph.keys(), GetEdges)
==>
['a', 'c', b']
"""
get_edges = memoize(get_edges)
visited = set()
visiting = set()
ordered_nodes = []
def Visit(node):
if node in visiting:
raise CycleError(visiting)
if node in visited:
return
visited.add(node)
visiting.add(node)
for neighbor in get_edges(node):
Visit(neighbor)
visiting.remove(node)
ordered_nodes.insert(0, node)
for node in sorted(graph):
Visit(node)
return ordered_nodes
def CrossCompileRequested():
# TODO: figure out how to not build extra host objects in the
# non-cross-compile case when this is enabled, and enable unconditionally.
return (os.environ.get('GYP_CROSSCOMPILE') or
os.environ.get('AR_host') or
os.environ.get('CC_host') or
os.environ.get('CXX_host') or
os.environ.get('AR_target') or
os.environ.get('CC_target') or
os.environ.get('CXX_target'))
| bsd-3-clause | 8,775,038,177,345,021,000 | 31.998355 | 83 | 0.66361 | false |
williamthegrey/swift | test/unit/proxy/controllers/test_account.py | 2 | 15815 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from swift.common.swob import Request, Response
from swift.common.middleware.acl import format_acl
from swift.proxy import server as proxy_server
from swift.proxy.controllers.base import headers_to_account_info
from swift.common import constraints
from test.unit import fake_http_connect, FakeRing, FakeMemcache
from swift.common.storage_policy import StoragePolicy
from swift.common.request_helpers import get_sys_meta_prefix
import swift.proxy.controllers.base
from test.unit import patch_policies
@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
class TestAccountController(unittest.TestCase):
def setUp(self):
self.app = proxy_server.Application(
None, FakeMemcache(),
account_ring=FakeRing(), container_ring=FakeRing())
def _make_callback_func(self, context):
def callback(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
context['method'] = method
context['path'] = path
context['headers'] = headers or {}
return callback
def _assert_responses(self, method, test_cases):
if method in ('PUT', 'DELETE'):
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'AUTH_bob')
for responses, expected in test_cases:
with mock.patch(
'swift.proxy.controllers.base.http_connect',
fake_http_connect(*responses)):
req = Request.blank('/v1/AUTH_bob')
resp = getattr(controller, method)(req)
self.assertEqual(expected,
resp.status_int,
'Expected %s but got %s. Failed case: %s' %
(expected, resp.status_int, str(responses)))
def test_account_info_in_response_env(self):
controller = proxy_server.AccountController(self.app, 'AUTH_bob')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, body='')):
req = Request.blank('/v1/AUTH_bob', {'PATH_INFO': '/v1/AUTH_bob'})
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
self.assertTrue('swift.account/AUTH_bob' in resp.environ)
self.assertEqual(headers_to_account_info(resp.headers),
resp.environ['swift.account/AUTH_bob'])
def test_swift_owner(self):
owner_headers = {
'x-account-meta-temp-url-key': 'value',
'x-account-meta-temp-url-key-2': 'value'}
controller = proxy_server.AccountController(self.app, 'a')
req = Request.blank('/v1/a')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key not in resp.headers)
req = Request.blank('/v1/a', environ={'swift_owner': True})
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key in resp.headers)
def test_get_deleted_account(self):
resp_headers = {
'x-account-status': 'deleted',
}
controller = proxy_server.AccountController(self.app, 'a')
req = Request.blank('/v1/a')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(404, headers=resp_headers)):
resp = controller.HEAD(req)
self.assertEqual(410, resp.status_int)
def test_long_acct_names(self):
long_acct_name = '%sLongAccountName' % (
'Very' * (constraints.MAX_ACCOUNT_NAME_LENGTH // 4))
controller = proxy_server.AccountController(self.app, long_acct_name)
req = Request.blank('/v1/%s' % long_acct_name)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200)):
resp = controller.HEAD(req)
self.assertEqual(400, resp.status_int)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200)):
resp = controller.GET(req)
self.assertEqual(400, resp.status_int)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200)):
resp = controller.POST(req)
self.assertEqual(400, resp.status_int)
def test_sys_meta_headers_PUT(self):
# check that headers in sys meta namespace make it through
# the proxy controller
sys_meta_key = '%stest' % get_sys_meta_prefix('account')
sys_meta_key = sys_meta_key.title()
user_meta_key = 'X-Account-Meta-Test'
# allow PUTs to account...
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'a')
context = {}
callback = self._make_callback_func(context)
hdrs_in = {sys_meta_key: 'foo',
user_meta_key: 'bar',
'x-timestamp': '1.0'}
req = Request.blank('/v1/a', headers=hdrs_in)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, give_connect=callback)):
controller.PUT(req)
self.assertEqual(context['method'], 'PUT')
self.assertTrue(sys_meta_key in context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
self.assertTrue(user_meta_key in context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
def test_sys_meta_headers_POST(self):
# check that headers in sys meta namespace make it through
# the proxy controller
sys_meta_key = '%stest' % get_sys_meta_prefix('account')
sys_meta_key = sys_meta_key.title()
user_meta_key = 'X-Account-Meta-Test'
controller = proxy_server.AccountController(self.app, 'a')
context = {}
callback = self._make_callback_func(context)
hdrs_in = {sys_meta_key: 'foo',
user_meta_key: 'bar',
'x-timestamp': '1.0'}
req = Request.blank('/v1/a', headers=hdrs_in)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, give_connect=callback)):
controller.POST(req)
self.assertEqual(context['method'], 'POST')
self.assertTrue(sys_meta_key in context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
self.assertTrue(user_meta_key in context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
def _make_user_and_sys_acl_headers_data(self):
acl = {
'admin': ['AUTH_alice', 'AUTH_bob'],
'read-write': ['AUTH_carol'],
'read-only': [],
}
user_prefix = 'x-account-' # external, user-facing
user_headers = {(user_prefix + 'access-control'): format_acl(
version=2, acl_dict=acl)}
sys_prefix = get_sys_meta_prefix('account') # internal, system-facing
sys_headers = {(sys_prefix + 'core-access-control'): format_acl(
version=2, acl_dict=acl)}
return user_headers, sys_headers
def test_account_acl_headers_translated_for_GET_HEAD(self):
# Verify that a GET/HEAD which receives X-Account-Sysmeta-Acl-* headers
# from the account server will remap those headers to X-Account-Acl-*
hdrs_ext, hdrs_int = self._make_user_and_sys_acl_headers_data()
controller = proxy_server.AccountController(self.app, 'acct')
for verb in ('GET', 'HEAD'):
req = Request.blank('/v1/acct', environ={'swift_owner': True})
controller.GETorHEAD_base = lambda *_: Response(
headers=hdrs_int, environ={
'PATH_INFO': '/acct',
'REQUEST_METHOD': verb,
})
method = getattr(controller, verb)
resp = method(req)
for header, value in hdrs_ext.items():
if value:
self.assertEqual(resp.headers.get(header), value)
else:
# blank ACLs should result in no header
self.assertTrue(header not in resp.headers)
def test_add_acls_impossible_cases(self):
# For test coverage: verify that defensive coding does defend, in cases
# that shouldn't arise naturally
# add_acls should do nothing if REQUEST_METHOD isn't HEAD/GET/PUT/POST
resp = Response()
controller = proxy_server.AccountController(self.app, 'a')
resp.environ['PATH_INFO'] = '/a'
resp.environ['REQUEST_METHOD'] = 'OPTIONS'
controller.add_acls_from_sys_metadata(resp)
self.assertEqual(1, len(resp.headers)) # we always get Content-Type
self.assertEqual(2, len(resp.environ))
def test_memcache_key_impossible_cases(self):
# For test coverage: verify that defensive coding does defend, in cases
# that shouldn't arise naturally
self.assertRaises(
ValueError,
lambda: swift.proxy.controllers.base.get_container_memcache_key(
'/a', None))
def test_stripping_swift_admin_headers(self):
# Verify that a GET/HEAD which receives privileged headers from the
# account server will strip those headers for non-swift_owners
headers = {
'x-account-meta-harmless': 'hi mom',
'x-account-meta-temp-url-key': 's3kr1t',
}
controller = proxy_server.AccountController(self.app, 'acct')
for verb in ('GET', 'HEAD'):
for env in ({'swift_owner': True}, {'swift_owner': False}):
req = Request.blank('/v1/acct', environ=env)
controller.GETorHEAD_base = lambda *_: Response(
headers=headers, environ={
'PATH_INFO': '/acct',
'REQUEST_METHOD': verb,
})
method = getattr(controller, verb)
resp = method(req)
self.assertEqual(resp.headers.get('x-account-meta-harmless'),
'hi mom')
privileged_header_present = (
'x-account-meta-temp-url-key' in resp.headers)
self.assertEqual(privileged_header_present, env['swift_owner'])
def test_response_code_for_PUT(self):
PUT_TEST_CASES = [
((201, 201, 201), 201),
((201, 201, 404), 201),
((201, 201, 503), 201),
((201, 404, 404), 404),
((201, 404, 503), 503),
((201, 503, 503), 503),
((404, 404, 404), 404),
((404, 404, 503), 404),
((404, 503, 503), 503),
((503, 503, 503), 503)
]
self._assert_responses('PUT', PUT_TEST_CASES)
def test_response_code_for_DELETE(self):
DELETE_TEST_CASES = [
((204, 204, 204), 204),
((204, 204, 404), 204),
((204, 204, 503), 204),
((204, 404, 404), 404),
((204, 404, 503), 503),
((204, 503, 503), 503),
((404, 404, 404), 404),
((404, 404, 503), 404),
((404, 503, 503), 503),
((503, 503, 503), 503)
]
self._assert_responses('DELETE', DELETE_TEST_CASES)
def test_response_code_for_POST(self):
POST_TEST_CASES = [
((204, 204, 204), 204),
((204, 204, 404), 204),
((204, 204, 503), 204),
((204, 404, 404), 404),
((204, 404, 503), 503),
((204, 503, 503), 503),
((404, 404, 404), 404),
((404, 404, 503), 404),
((404, 503, 503), 503),
((503, 503, 503), 503)
]
self._assert_responses('POST', POST_TEST_CASES)
@patch_policies(
[StoragePolicy(0, 'zero', True, object_ring=FakeRing(replicas=4))])
class TestAccountController4Replicas(TestAccountController):
def setUp(self):
self.app = proxy_server.Application(
None,
FakeMemcache(),
account_ring=FakeRing(replicas=4),
container_ring=FakeRing(replicas=4))
def test_response_code_for_PUT(self):
PUT_TEST_CASES = [
((201, 201, 201, 201), 201),
((201, 201, 201, 404), 201),
((201, 201, 201, 503), 201),
((201, 201, 404, 404), 503),
((201, 201, 404, 503), 503),
((201, 201, 503, 503), 503),
((201, 404, 404, 404), 404),
((201, 404, 404, 503), 503),
((201, 404, 503, 503), 503),
((201, 503, 503, 503), 503),
((404, 404, 404, 404), 404),
((404, 404, 404, 503), 404),
((404, 404, 503, 503), 503),
((404, 503, 503, 503), 503),
((503, 503, 503, 503), 503)
]
self._assert_responses('PUT', PUT_TEST_CASES)
def test_response_code_for_DELETE(self):
DELETE_TEST_CASES = [
((204, 204, 204, 204), 204),
((204, 204, 204, 404), 204),
((204, 204, 204, 503), 204),
((204, 204, 404, 404), 503),
((204, 204, 404, 503), 503),
((204, 204, 503, 503), 503),
((204, 404, 404, 404), 404),
((204, 404, 404, 503), 503),
((204, 404, 503, 503), 503),
((204, 503, 503, 503), 503),
((404, 404, 404, 404), 404),
((404, 404, 404, 503), 404),
((404, 404, 503, 503), 503),
((404, 503, 503, 503), 503),
((503, 503, 503, 503), 503)
]
self._assert_responses('DELETE', DELETE_TEST_CASES)
def test_response_code_for_POST(self):
POST_TEST_CASES = [
((204, 204, 204, 204), 204),
((204, 204, 204, 404), 204),
((204, 204, 204, 503), 204),
((204, 204, 404, 404), 503),
((204, 204, 404, 503), 503),
((204, 204, 503, 503), 503),
((204, 404, 404, 404), 404),
((204, 404, 404, 503), 503),
((204, 404, 503, 503), 503),
((204, 503, 503, 503), 503),
((404, 404, 404, 404), 404),
((404, 404, 404, 503), 404),
((404, 404, 503, 503), 503),
((404, 503, 503, 503), 503),
((503, 503, 503, 503), 503)
]
self._assert_responses('POST', POST_TEST_CASES)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -4,818,286,084,352,963,000 | 40.618421 | 79 | 0.553525 | false |
applicationdevm/XlsxWriter | xlsxwriter/test/comparison/test_chart_name04.py | 8 | 1935 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_font04.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'bar'})
chart.axis_ids = [43944960, 45705472]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_title({
'name': ['Sheet1', 0, 0],
'name_font': {'bold': 0, 'italic': 1},
})
chart.set_x_axis({
'name': ['Sheet1', 1, 0],
'name_font': {'bold': 0, 'italic': 1},
})
chart.set_y_axis({
'name': ['Sheet1', 2, 0],
'name_font': {'bold': 1, 'italic': 1},
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| bsd-2-clause | -7,750,065,231,574,683,000 | 25.148649 | 79 | 0.509044 | false |
nmearl/pyqtgraph | pyqtgraph/SRTTransform3D.py | 45 | 10879 | # -*- coding: utf-8 -*-
from .Qt import QtCore, QtGui
from .Vector import Vector
from .Transform3D import Transform3D
from .Vector import Vector
import numpy as np
class SRTTransform3D(Transform3D):
"""4x4 Transform matrix that can always be represented as a combination of 3 matrices: scale * rotate * translate
This transform has no shear; angles are always preserved.
"""
def __init__(self, init=None):
Transform3D.__init__(self)
self.reset()
if init is None:
return
if init.__class__ is QtGui.QTransform:
init = SRTTransform(init)
if isinstance(init, dict):
self.restoreState(init)
elif isinstance(init, SRTTransform3D):
self._state = {
'pos': Vector(init._state['pos']),
'scale': Vector(init._state['scale']),
'angle': init._state['angle'],
'axis': Vector(init._state['axis']),
}
self.update()
elif isinstance(init, SRTTransform):
self._state = {
'pos': Vector(init._state['pos']),
'scale': Vector(init._state['scale']),
'angle': init._state['angle'],
'axis': Vector(0, 0, 1),
}
self._state['scale'][2] = 1.0
self.update()
elif isinstance(init, QtGui.QMatrix4x4):
self.setFromMatrix(init)
else:
raise Exception("Cannot build SRTTransform3D from argument type:", type(init))
def getScale(self):
return Vector(self._state['scale'])
def getRotation(self):
"""Return (angle, axis) of rotation"""
return self._state['angle'], Vector(self._state['axis'])
def getTranslation(self):
return Vector(self._state['pos'])
def reset(self):
self._state = {
'pos': Vector(0,0,0),
'scale': Vector(1,1,1),
'angle': 0.0, ## in degrees
'axis': (0, 0, 1)
}
self.update()
def translate(self, *args):
"""Adjust the translation of this transform"""
t = Vector(*args)
self.setTranslate(self._state['pos']+t)
def setTranslate(self, *args):
"""Set the translation of this transform"""
self._state['pos'] = Vector(*args)
self.update()
def scale(self, *args):
"""adjust the scale of this transform"""
## try to prevent accidentally setting 0 scale on z axis
if len(args) == 1 and hasattr(args[0], '__len__'):
args = args[0]
if len(args) == 2:
args = args + (1,)
s = Vector(*args)
self.setScale(self._state['scale'] * s)
def setScale(self, *args):
"""Set the scale of this transform"""
if len(args) == 1 and hasattr(args[0], '__len__'):
args = args[0]
if len(args) == 2:
args = args + (1,)
self._state['scale'] = Vector(*args)
self.update()
def rotate(self, angle, axis=(0,0,1)):
"""Adjust the rotation of this transform"""
origAxis = self._state['axis']
if axis[0] == origAxis[0] and axis[1] == origAxis[1] and axis[2] == origAxis[2]:
self.setRotate(self._state['angle'] + angle)
else:
m = QtGui.QMatrix4x4()
m.translate(*self._state['pos'])
m.rotate(self._state['angle'], *self._state['axis'])
m.rotate(angle, *axis)
m.scale(*self._state['scale'])
self.setFromMatrix(m)
def setRotate(self, angle, axis=(0,0,1)):
"""Set the transformation rotation to angle (in degrees)"""
self._state['angle'] = angle
self._state['axis'] = Vector(axis)
self.update()
def setFromMatrix(self, m):
"""
Set this transform mased on the elements of *m*
The input matrix must be affine AND have no shear,
otherwise the conversion will most likely fail.
"""
import numpy.linalg
for i in range(4):
self.setRow(i, m.row(i))
m = self.matrix().reshape(4,4)
## translation is 4th column
self._state['pos'] = m[:3,3]
## scale is vector-length of first three columns
scale = (m[:3,:3]**2).sum(axis=0)**0.5
## see whether there is an inversion
z = np.cross(m[0, :3], m[1, :3])
if np.dot(z, m[2, :3]) < 0:
scale[1] *= -1 ## doesn't really matter which axis we invert
self._state['scale'] = scale
## rotation axis is the eigenvector with eigenvalue=1
r = m[:3, :3] / scale[np.newaxis, :]
try:
evals, evecs = numpy.linalg.eig(r)
except:
print("Rotation matrix: %s" % str(r))
print("Scale: %s" % str(scale))
print("Original matrix: %s" % str(m))
raise
eigIndex = np.argwhere(np.abs(evals-1) < 1e-6)
if len(eigIndex) < 1:
print("eigenvalues: %s" % str(evals))
print("eigenvectors: %s" % str(evecs))
print("index: %s, %s" % (str(eigIndex), str(evals-1)))
raise Exception("Could not determine rotation axis.")
axis = evecs[:,eigIndex[0,0]].real
axis /= ((axis**2).sum())**0.5
self._state['axis'] = axis
## trace(r) == 2 cos(angle) + 1, so:
cos = (r.trace()-1)*0.5 ## this only gets us abs(angle)
## The off-diagonal values can be used to correct the angle ambiguity,
## but we need to figure out which element to use:
axisInd = np.argmax(np.abs(axis))
rInd,sign = [((1,2), -1), ((0,2), 1), ((0,1), -1)][axisInd]
## Then we have r-r.T = sin(angle) * 2 * sign * axis[axisInd];
## solve for sin(angle)
sin = (r-r.T)[rInd] / (2. * sign * axis[axisInd])
## finally, we get the complete angle from arctan(sin/cos)
self._state['angle'] = np.arctan2(sin, cos) * 180 / np.pi
if self._state['angle'] == 0:
self._state['axis'] = (0,0,1)
def as2D(self):
"""Return a QTransform representing the x,y portion of this transform (if possible)"""
return SRTTransform(self)
#def __div__(self, t):
#"""A / B == B^-1 * A"""
#dt = t.inverted()[0] * self
#return SRTTransform(dt)
#def __mul__(self, t):
#return SRTTransform(QtGui.QTransform.__mul__(self, t))
def saveState(self):
p = self._state['pos']
s = self._state['scale']
ax = self._state['axis']
#if s[0] == 0:
#raise Exception('Invalid scale: %s' % str(s))
return {
'pos': (p[0], p[1], p[2]),
'scale': (s[0], s[1], s[2]),
'angle': self._state['angle'],
'axis': (ax[0], ax[1], ax[2])
}
def restoreState(self, state):
self._state['pos'] = Vector(state.get('pos', (0.,0.,0.)))
scale = state.get('scale', (1.,1.,1.))
scale = tuple(scale) + (1.,) * (3-len(scale))
self._state['scale'] = Vector(scale)
self._state['angle'] = state.get('angle', 0.)
self._state['axis'] = state.get('axis', (0, 0, 1))
self.update()
def update(self):
Transform3D.setToIdentity(self)
## modifications to the transform are multiplied on the right, so we need to reverse order here.
Transform3D.translate(self, *self._state['pos'])
Transform3D.rotate(self, self._state['angle'], *self._state['axis'])
Transform3D.scale(self, *self._state['scale'])
def __repr__(self):
return str(self.saveState())
def matrix(self, nd=3):
if nd == 3:
return np.array(self.copyDataTo()).reshape(4,4)
elif nd == 2:
m = np.array(self.copyDataTo()).reshape(4,4)
m[2] = m[3]
m[:,2] = m[:,3]
return m[:3,:3]
else:
raise Exception("Argument 'nd' must be 2 or 3")
if __name__ == '__main__':
import widgets
import GraphicsView
from functions import *
app = QtGui.QApplication([])
win = QtGui.QMainWindow()
win.show()
cw = GraphicsView.GraphicsView()
#cw.enableMouse()
win.setCentralWidget(cw)
s = QtGui.QGraphicsScene()
cw.setScene(s)
win.resize(600,600)
cw.enableMouse()
cw.setRange(QtCore.QRectF(-100., -100., 200., 200.))
class Item(QtGui.QGraphicsItem):
def __init__(self):
QtGui.QGraphicsItem.__init__(self)
self.b = QtGui.QGraphicsRectItem(20, 20, 20, 20, self)
self.b.setPen(QtGui.QPen(mkPen('y')))
self.t1 = QtGui.QGraphicsTextItem(self)
self.t1.setHtml('<span style="color: #F00">R</span>')
self.t1.translate(20, 20)
self.l1 = QtGui.QGraphicsLineItem(10, 0, -10, 0, self)
self.l2 = QtGui.QGraphicsLineItem(0, 10, 0, -10, self)
self.l1.setPen(QtGui.QPen(mkPen('y')))
self.l2.setPen(QtGui.QPen(mkPen('y')))
def boundingRect(self):
return QtCore.QRectF()
def paint(self, *args):
pass
#s.addItem(b)
#s.addItem(t1)
item = Item()
s.addItem(item)
l1 = QtGui.QGraphicsLineItem(10, 0, -10, 0)
l2 = QtGui.QGraphicsLineItem(0, 10, 0, -10)
l1.setPen(QtGui.QPen(mkPen('r')))
l2.setPen(QtGui.QPen(mkPen('r')))
s.addItem(l1)
s.addItem(l2)
tr1 = SRTTransform()
tr2 = SRTTransform()
tr3 = QtGui.QTransform()
tr3.translate(20, 0)
tr3.rotate(45)
print("QTransform -> Transform: %s" % str(SRTTransform(tr3)))
print("tr1: %s" % str(tr1))
tr2.translate(20, 0)
tr2.rotate(45)
print("tr2: %s" % str(tr2))
dt = tr2/tr1
print("tr2 / tr1 = %s" % str(dt))
print("tr2 * tr1 = %s" % str(tr2*tr1))
tr4 = SRTTransform()
tr4.scale(-1, 1)
tr4.rotate(30)
print("tr1 * tr4 = %s" % str(tr1*tr4))
w1 = widgets.TestROI((19,19), (22, 22), invertible=True)
#w2 = widgets.TestROI((0,0), (150, 150))
w1.setZValue(10)
s.addItem(w1)
#s.addItem(w2)
w1Base = w1.getState()
#w2Base = w2.getState()
def update():
tr1 = w1.getGlobalTransform(w1Base)
#tr2 = w2.getGlobalTransform(w2Base)
item.setTransform(tr1)
#def update2():
#tr1 = w1.getGlobalTransform(w1Base)
#tr2 = w2.getGlobalTransform(w2Base)
#t1.setTransform(tr1)
#w1.setState(w1Base)
#w1.applyGlobalTransform(tr2)
w1.sigRegionChanged.connect(update)
#w2.sigRegionChanged.connect(update2)
from .SRTTransform import SRTTransform
| mit | 1,438,805,295,212,093,000 | 33.536508 | 117 | 0.524221 | false |
sander76/home-assistant | homeassistant/components/thethingsnetwork/sensor.py | 5 | 4925 | """Support for The Things Network's Data storage integration."""
import asyncio
import logging
import aiohttp
from aiohttp.hdrs import ACCEPT, AUTHORIZATION
import async_timeout
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
ATTR_DEVICE_ID,
ATTR_TIME,
CONF_DEVICE_ID,
CONTENT_TYPE_JSON,
HTTP_NOT_FOUND,
HTTP_UNAUTHORIZED,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from . import DATA_TTN, TTN_ACCESS_KEY, TTN_APP_ID, TTN_DATA_STORAGE_URL
_LOGGER = logging.getLogger(__name__)
ATTR_RAW = "raw"
DEFAULT_TIMEOUT = 10
CONF_VALUES = "values"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Required(CONF_VALUES): {cv.string: cv.string},
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up The Things Network Data storage sensors."""
ttn = hass.data.get(DATA_TTN)
device_id = config.get(CONF_DEVICE_ID)
values = config.get(CONF_VALUES)
app_id = ttn.get(TTN_APP_ID)
access_key = ttn.get(TTN_ACCESS_KEY)
ttn_data_storage = TtnDataStorage(hass, app_id, device_id, access_key, values)
success = await ttn_data_storage.async_update()
if not success:
return
devices = []
for value, unit_of_measurement in values.items():
devices.append(
TtnDataSensor(ttn_data_storage, device_id, value, unit_of_measurement)
)
async_add_entities(devices, True)
class TtnDataSensor(SensorEntity):
"""Representation of a The Things Network Data Storage sensor."""
def __init__(self, ttn_data_storage, device_id, value, unit_of_measurement):
"""Initialize a The Things Network Data Storage sensor."""
self._ttn_data_storage = ttn_data_storage
self._state = None
self._device_id = device_id
self._unit_of_measurement = unit_of_measurement
self._value = value
self._name = f"{self._device_id} {self._value}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
if self._ttn_data_storage.data is not None:
try:
return self._state[self._value]
except KeyError:
return None
return None
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def extra_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._ttn_data_storage.data is not None:
return {
ATTR_DEVICE_ID: self._device_id,
ATTR_RAW: self._state["raw"],
ATTR_TIME: self._state["time"],
}
async def async_update(self):
"""Get the current state."""
await self._ttn_data_storage.async_update()
self._state = self._ttn_data_storage.data
class TtnDataStorage:
"""Get the latest data from The Things Network Data Storage."""
def __init__(self, hass, app_id, device_id, access_key, values):
"""Initialize the data object."""
self.data = None
self._hass = hass
self._app_id = app_id
self._device_id = device_id
self._values = values
self._url = TTN_DATA_STORAGE_URL.format(
app_id=app_id, endpoint="api/v2/query", device_id=device_id
)
self._headers = {ACCEPT: CONTENT_TYPE_JSON, AUTHORIZATION: f"key {access_key}"}
async def async_update(self):
"""Get the current state from The Things Network Data Storage."""
try:
session = async_get_clientsession(self._hass)
with async_timeout.timeout(DEFAULT_TIMEOUT):
response = await session.get(self._url, headers=self._headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Error while accessing: %s", self._url)
return None
status = response.status
if status == 204:
_LOGGER.error("The device is not available: %s", self._device_id)
return None
if status == HTTP_UNAUTHORIZED:
_LOGGER.error("Not authorized for Application ID: %s", self._app_id)
return None
if status == HTTP_NOT_FOUND:
_LOGGER.error("Application ID is not available: %s", self._app_id)
return None
data = await response.json()
self.data = data[-1]
for value in self._values.items():
if value[0] not in self.data:
_LOGGER.warning("Value not available: %s", value[0])
return response
| apache-2.0 | 5,296,179,162,585,522,000 | 30.570513 | 87 | 0.620508 | false |
slowfranklin/samba | source4/heimdal/lib/wind/rfc3454.py | 88 | 2296 | #!/usr/local/bin/python
# -*- coding: iso-8859-1 -*-
# $Id$
# Copyright (c) 2004 Kungliga Tekniska Högskolan
# (Royal Institute of Technology, Stockholm, Sweden).
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Institute nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import re
import string
def read(filename):
"""return a dict of tables from rfc3454"""
f = open(filename, 'r')
inTable = False
ret = {}
while True:
l = f.readline()
if not l:
break
if inTable:
m = re.search('^ *----- End Table ([A-Z0-9\.]+) ----- *$', l)
if m:
ret[m.group(1)] = t
inTable = False
else:
t.append(l)
if re.search('^ *----- Start Table ([A-Z0-9\.]+) ----- *$', l):
inTable = True
t = []
f.close()
return ret
| gpl-3.0 | -7,528,530,628,747,683,000 | 37.25 | 77 | 0.664924 | false |
rockneurotiko/django | tests/field_deconstruction/tests.py | 189 | 18358 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import isolate_lru_cache
from django.utils import six
class FieldDeconstructionTests(SimpleTestCase):
"""
Tests the deconstruct() method on all core fields.
"""
def test_name(self):
"""
Tests the outputting of the correct name if assigned one.
"""
# First try using a "normal" field
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("is_awesome_test")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "is_awesome_test")
self.assertIsInstance(name, six.text_type)
# Now try with a ForeignKey
field = models.ForeignKey("some_fake.ModelName", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("author")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "author")
def test_auto_field(self):
field = models.AutoField(primary_key=True)
field.set_attributes_from_name("id")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.AutoField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"primary_key": True})
def test_big_integer_field(self):
field = models.BigIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BigIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_boolean_field(self):
field = models.BooleanField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.BooleanField(default=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"default": True})
def test_char_field(self):
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65})
field = models.CharField(max_length=65, null=True, blank=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True})
def test_char_field_choices(self):
field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two")))
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1})
def test_csi_field(self):
field = models.CommaSeparatedIntegerField(max_length=100)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 100})
def test_date_field(self):
field = models.DateField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now": True})
def test_datetime_field(self):
field = models.DateTimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateTimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True})
# Bug #21785
field = models.DateTimeField(auto_now=True, auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True})
def test_decimal_field(self):
field = models.DecimalField(max_digits=5, decimal_places=2)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2})
def test_decimal_field_0_decimal_places(self):
"""
A DecimalField with decimal_places=0 should work (#22272).
"""
field = models.DecimalField(max_digits=5, decimal_places=0)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0})
def test_email_field(self):
field = models.EmailField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 254})
field = models.EmailField(max_length=255)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 255})
def test_file_field(self):
field = models.FileField(upload_to="foo/bar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar"})
# Test max_length
field = models.FileField(upload_to="foo/bar", max_length=200)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200})
def test_file_path_field(self):
field = models.FilePathField(match=".*\.txt$")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"match": ".*\.txt$"})
field = models.FilePathField(recursive=True, allow_folders=True, max_length=123)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123})
def test_float_field(self):
field = models.FloatField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FloatField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_foreign_key(self):
# Test basic pointing
from django.contrib.auth.models import Permission
field = models.ForeignKey("auth.Permission", models.CASCADE)
field.remote_field.model = Permission
field.remote_field.field_name = "id"
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swap detection for swappable model
field = models.ForeignKey("auth.User", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test nonexistent (for now) model
field = models.ForeignKey("something.Else", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "something.Else", "on_delete": models.CASCADE})
# Test on_delete
field = models.ForeignKey("auth.User", models.SET_NULL)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.SET_NULL})
# Test to_field preservation
field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "to_field": "foobar", "on_delete": models.CASCADE})
# Test related_name preservation
field = models.ForeignKey("auth.Permission", models.CASCADE, related_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "foobar", "on_delete": models.CASCADE})
@override_settings(AUTH_USER_MODEL="auth.Permission")
def test_foreign_key_swapped(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
# It doesn't matter that we swapped out user for permission;
# there's no validation. We just want to check the setting stuff works.
field = models.ForeignKey("auth.Permission", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
def test_image_field(self):
field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ImageField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"})
def test_integer_field(self):
field = models.IntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.IntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_ip_address_field(self):
field = models.IPAddressField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.IPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_generic_ip_address_field(self):
field = models.GenericIPAddressField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.GenericIPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.GenericIPAddressField(protocol="IPv6")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.GenericIPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"protocol": "IPv6"})
def test_many_to_many_field(self):
# Test normal
field = models.ManyToManyField("auth.Permission")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swappable
field = models.ManyToManyField("auth.User")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User"})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test through
field = models.ManyToManyField("auth.Permission", through="auth.Group")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "through": "auth.Group"})
# Test custom db_table
field = models.ManyToManyField("auth.Permission", db_table="custom_table")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "db_table": "custom_table"})
# Test related_name
field = models.ManyToManyField("auth.Permission", related_name="custom_table")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "custom_table"})
@override_settings(AUTH_USER_MODEL="auth.Permission")
def test_many_to_many_field_swapped(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
# It doesn't matter that we swapped out user for permission;
# there's no validation. We just want to check the setting stuff works.
field = models.ManyToManyField("auth.Permission")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
def test_null_boolean_field(self):
field = models.NullBooleanField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.NullBooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_positive_integer_field(self):
field = models.PositiveIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.PositiveIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_positive_small_integer_field(self):
field = models.PositiveSmallIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.PositiveSmallIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_slug_field(self):
field = models.SlugField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SlugField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.SlugField(db_index=False, max_length=231)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SlugField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"db_index": False, "max_length": 231})
def test_small_integer_field(self):
field = models.SmallIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SmallIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_text_field(self):
field = models.TextField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.TextField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_time_field(self):
field = models.TimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.TimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.TimeField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'auto_now': True})
field = models.TimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'auto_now_add': True})
def test_url_field(self):
field = models.URLField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.URLField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.URLField(max_length=231)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.URLField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 231})
def test_binary_field(self):
field = models.BinaryField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BinaryField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
| bsd-3-clause | -3,762,096,207,059,471,000 | 45.358586 | 114 | 0.638032 | false |
aaigner/LIGGGHTS-PUBLIC | python/install.py | 2 | 2208 | #!/usr/bin/env python
# copy LIGGGHTS src/libliggghts.so and liggghts.py to system dirs
instructions = """
Syntax: python install.py [-h] [libdir] [pydir]
libdir = target dir for src/libliggghts.so, default = /usr/local/lib
pydir = target dir for liggghts.py, default = Python site-packages dir
"""
import sys,os # ,commands
if sys.version_info[0] == 3:
import subprocess as commands
else:
import commands
if (len(sys.argv) > 1 and sys.argv[1] == "-h") or len(sys.argv) > 3:
print(instructions)
sys.exit()
if len(sys.argv) >= 2: libdir = sys.argv[1]
else: libdir = "/usr/local/lib"
if len(sys.argv) == 3: pydir = sys.argv[2]
else: pydir = ""
# copy C lib to libdir if it exists
# warn if not in LD_LIBRARY_PATH or LD_LIBRARY_PATH is undefined
if not os.path.isdir(libdir):
print("ERROR: libdir %s does not exist" % libdir)
sys.exit()
if "LD_LIBRARY_PATH" not in os.environ:
print("WARNING: LD_LIBRARY_PATH undefined, cannot check libdir %s" % libdir)
else:
libpaths = os.environ['LD_LIBRARY_PATH'].split(':')
if libdir not in libpaths:
print("WARNING: libdir %s not in LD_LIBRARY_PATH" % libdir)
str = "cp ../src/libliggghts.so %s" % libdir
print(str)
outstr = commands.getoutput(str)
if len(outstr.strip()): print(outstr)
# copy liggghts.py to pydir if it exists
# if pydir not specified, install in site-packages via distutils setup()
if pydir:
if not os.path.isdir(pydir):
print("ERROR: pydir %s does not exist" % pydir)
sys.exit()
str = "cp ../python/liggghts.py %s" % pydir
print(str)
outstr = commands.getoutput(str)
if len(outstr.strip()): print(outstr)
sys.exit()
print("installing liggghts.py in Python site-packages dir")
os.chdir('../python') # in case invoked via make in src dir
from distutils.core import setup
sys.argv = ["setup.py","install"] # as if had run "python setup.py install"
setup(name = "liggghts",
version = "3.8.0",
author = "Christoph Kloss",
author_email = "[email protected]",
url = "http://www.cfdem.com",
description = "LIGGGHTS - LAMMPS improved for general granular and granular heat transfer simulations",
py_modules = ["liggghts"])
| gpl-2.0 | 9,108,740,366,643,753,000 | 30.098592 | 109 | 0.671649 | false |
turon/openthread | tests/scripts/thread-cert/Cert_5_1_10_RouterAttachLinkQuality.py | 1 | 6969 | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import config
import mle
import node
LEADER = 1
ROUTER1 = 2
ROUTER2 = 3
ROUTER3 = 4
class Cert_5_1_10_RouterAttachLinkQuality(unittest.TestCase):
def setUp(self):
self.simulator = config.create_default_simulator()
self.nodes = {}
for i in range(1, 5):
self.nodes[i] = node.Node(i, simulator=self.simulator)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER2].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER1].set_panid(0xface)
self.nodes[ROUTER1].set_mode('rsdn')
self.nodes[ROUTER1].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER1].add_whitelist(self.nodes[ROUTER3].get_addr64())
self.nodes[ROUTER1].enable_whitelist()
self.nodes[ROUTER1].set_router_selection_jitter(1)
self.nodes[ROUTER2].set_panid(0xface)
self.nodes[ROUTER2].set_mode('rsdn')
self.nodes[ROUTER2].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER2].add_whitelist(
self.nodes[ROUTER3].get_addr64(), rssi=-85
)
self.nodes[ROUTER2].enable_whitelist()
self.nodes[ROUTER2].set_router_selection_jitter(1)
self.nodes[ROUTER3].set_panid(0xface)
self.nodes[ROUTER3].set_mode('rsdn')
self.nodes[ROUTER3].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[ROUTER3].add_whitelist(self.nodes[ROUTER2].get_addr64())
self.nodes[ROUTER3].enable_whitelist()
self.nodes[ROUTER3].set_router_selection_jitter(1)
def tearDown(self):
for n in list(self.nodes.values()):
n.stop()
n.destroy()
self.simulator.stop()
def test(self):
self.nodes[LEADER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER1].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER1].get_state(), 'router')
self.nodes[ROUTER2].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER2].get_state(), 'router')
self.nodes[ROUTER3].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER3].get_state(), 'router')
leader_messages = self.simulator.get_messages_sent_by(LEADER)
router1_messages = self.simulator.get_messages_sent_by(ROUTER1)
router2_messages = self.simulator.get_messages_sent_by(ROUTER2)
router3_messages = self.simulator.get_messages_sent_by(ROUTER3)
# 1 - Leader, Router1, Router2
leader_messages.next_mle_message(mle.CommandType.ADVERTISEMENT)
router1_messages.next_mle_message(mle.CommandType.PARENT_REQUEST)
leader_messages.next_mle_message(mle.CommandType.PARENT_RESPONSE)
router1_messages.next_mle_message(mle.CommandType.CHILD_ID_REQUEST)
leader_messages.next_mle_message(mle.CommandType.CHILD_ID_RESPONSE)
msg = router1_messages.next_coap_message("0.02")
msg.assertCoapMessageRequestUriPath("/a/as")
msg = leader_messages.next_coap_message("2.04")
router2_messages.next_mle_message(mle.CommandType.PARENT_REQUEST)
leader_messages.next_mle_message(mle.CommandType.PARENT_RESPONSE)
router2_messages.next_mle_message(mle.CommandType.CHILD_ID_REQUEST)
leader_messages.next_mle_message(mle.CommandType.CHILD_ID_RESPONSE)
msg = router2_messages.next_coap_message("0.02")
msg.assertCoapMessageRequestUriPath("/a/as")
msg = leader_messages.next_coap_message("2.04")
router1_messages.next_mle_message(mle.CommandType.ADVERTISEMENT)
router2_messages.next_mle_message(mle.CommandType.ADVERTISEMENT)
# 3 - Router3
msg = router3_messages.next_mle_message(mle.CommandType.PARENT_REQUEST)
msg.assertSentWithHopLimit(255)
msg.assertSentToDestinationAddress("ff02::2")
msg.assertMleMessageContainsTlv(mle.Mode)
msg.assertMleMessageContainsTlv(mle.Challenge)
msg.assertMleMessageContainsTlv(mle.ScanMask)
msg.assertMleMessageContainsTlv(mle.Version)
# 4 - Router1, Router2
msg = router1_messages.next_mle_message(
mle.CommandType.PARENT_RESPONSE
)
msg.assertSentToNode(self.nodes[ROUTER3])
msg = router2_messages.next_mle_message(
mle.CommandType.PARENT_RESPONSE
)
msg.assertSentToNode(self.nodes[ROUTER3])
# 5 - Router3
msg = router3_messages.next_mle_message(
mle.CommandType.CHILD_ID_REQUEST
)
msg.assertSentToNode(self.nodes[ROUTER1])
msg.assertMleMessageContainsTlv(mle.Response)
msg.assertMleMessageContainsTlv(mle.LinkLayerFrameCounter)
msg.assertMleMessageContainsOptionalTlv(mle.MleFrameCounter)
msg.assertMleMessageContainsTlv(mle.Mode)
msg.assertMleMessageContainsTlv(mle.Timeout)
msg.assertMleMessageContainsTlv(mle.Version)
msg.assertMleMessageContainsTlv(mle.TlvRequest)
msg.assertMleMessageDoesNotContainTlv(mle.AddressRegistration)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 9,154,540,873,371,013,000 | 39.754386 | 79 | 0.694361 | false |
kingland/runtime | deps/v8/tools/testrunner/server/main.py | 12 | 8953 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import multiprocessing
import os
import shutil
import subprocess
import threading
import time
from . import daemon
from . import local_handler
from . import presence_handler
from . import signatures
from . import status_handler
from . import work_handler
from ..network import perfdata
class Server(daemon.Daemon):
def __init__(self, pidfile, root, stdin="/dev/null",
stdout="/dev/null", stderr="/dev/null"):
super(Server, self).__init__(pidfile, stdin, stdout, stderr)
self.root = root
self.local_handler = None
self.local_handler_thread = None
self.work_handler = None
self.work_handler_thread = None
self.status_handler = None
self.status_handler_thread = None
self.presence_daemon = None
self.presence_daemon_thread = None
self.peers = []
self.jobs = multiprocessing.cpu_count()
self.peer_list_lock = threading.Lock()
self.perf_data_lock = None
self.presence_daemon_lock = None
self.datadir = os.path.join(self.root, "data")
pubkey_fingerprint_filename = os.path.join(self.datadir, "mypubkey")
with open(pubkey_fingerprint_filename) as f:
self.pubkey_fingerprint = f.read().strip()
self.relative_perf_filename = os.path.join(self.datadir, "myperf")
if os.path.exists(self.relative_perf_filename):
with open(self.relative_perf_filename) as f:
try:
self.relative_perf = float(f.read())
except:
self.relative_perf = 1.0
else:
self.relative_perf = 1.0
def run(self):
os.nice(20)
self.ip = presence_handler.GetOwnIP()
self.perf_data_manager = perfdata.PerfDataManager(self.datadir)
self.perf_data_lock = threading.Lock()
self.local_handler = local_handler.LocalSocketServer(self)
self.local_handler_thread = threading.Thread(
target=self.local_handler.serve_forever)
self.local_handler_thread.start()
self.work_handler = work_handler.WorkSocketServer(self)
self.work_handler_thread = threading.Thread(
target=self.work_handler.serve_forever)
self.work_handler_thread.start()
self.status_handler = status_handler.StatusSocketServer(self)
self.status_handler_thread = threading.Thread(
target=self.status_handler.serve_forever)
self.status_handler_thread.start()
self.presence_daemon = presence_handler.PresenceDaemon(self)
self.presence_daemon_thread = threading.Thread(
target=self.presence_daemon.serve_forever)
self.presence_daemon_thread.start()
self.presence_daemon.FindPeers()
time.sleep(0.5) # Give those peers some time to reply.
with self.peer_list_lock:
for p in self.peers:
if p.address == self.ip: continue
status_handler.RequestTrustedPubkeys(p, self)
while True:
try:
self.PeriodicTasks()
time.sleep(60)
except Exception, e:
print("MAIN LOOP EXCEPTION: %s" % e)
self.Shutdown()
break
except KeyboardInterrupt:
self.Shutdown()
break
def Shutdown(self):
with open(self.relative_perf_filename, "w") as f:
f.write("%s" % self.relative_perf)
self.presence_daemon.shutdown()
self.presence_daemon.server_close()
self.local_handler.shutdown()
self.local_handler.server_close()
self.work_handler.shutdown()
self.work_handler.server_close()
self.status_handler.shutdown()
self.status_handler.server_close()
def PeriodicTasks(self):
# If we know peers we don't trust, see if someone else trusts them.
with self.peer_list_lock:
for p in self.peers:
if p.trusted: continue
if self.IsTrusted(p.pubkey):
p.trusted = True
status_handler.ITrustYouNow(p)
continue
for p2 in self.peers:
if not p2.trusted: continue
status_handler.TryTransitiveTrust(p2, p.pubkey, self)
# TODO: Ping for more peers waiting to be discovered.
# TODO: Update the checkout (if currently idle).
def AddPeer(self, peer):
with self.peer_list_lock:
for p in self.peers:
if p.address == peer.address:
return
self.peers.append(peer)
if peer.trusted:
status_handler.ITrustYouNow(peer)
def DeletePeer(self, peer_address):
with self.peer_list_lock:
for i in xrange(len(self.peers)):
if self.peers[i].address == peer_address:
del self.peers[i]
return
def MarkPeerAsTrusting(self, peer_address):
with self.peer_list_lock:
for p in self.peers:
if p.address == peer_address:
p.trusting_me = True
break
def UpdatePeerPerformance(self, peer_address, performance):
with self.peer_list_lock:
for p in self.peers:
if p.address == peer_address:
p.relative_performance = performance
def CopyToTrusted(self, pubkey_filename):
with open(pubkey_filename, "r") as f:
lines = f.readlines()
fingerprint = lines[-1].strip()
target_filename = self._PubkeyFilename(fingerprint)
shutil.copy(pubkey_filename, target_filename)
with self.peer_list_lock:
for peer in self.peers:
if peer.address == self.ip: continue
if peer.pubkey == fingerprint:
status_handler.ITrustYouNow(peer)
else:
result = self.SignTrusted(fingerprint)
status_handler.NotifyNewTrusted(peer, result)
return fingerprint
def _PubkeyFilename(self, pubkey_fingerprint):
return os.path.join(self.root, "trusted", "%s.pem" % pubkey_fingerprint)
def IsTrusted(self, pubkey_fingerprint):
return os.path.exists(self._PubkeyFilename(pubkey_fingerprint))
def ListTrusted(self):
path = os.path.join(self.root, "trusted")
if not os.path.exists(path): return []
return [ f[:-4] for f in os.listdir(path) if f.endswith(".pem") ]
def SignTrusted(self, pubkey_fingerprint):
if not self.IsTrusted(pubkey_fingerprint):
return []
filename = self._PubkeyFilename(pubkey_fingerprint)
result = signatures.ReadFileAndSignature(filename) # Format: [key, sig].
return [pubkey_fingerprint, result[0], result[1], self.pubkey_fingerprint]
def AcceptNewTrusted(self, data):
# The format of |data| matches the return value of |SignTrusted()|.
if not data: return
fingerprint = data[0]
pubkey = data[1]
signature = data[2]
signer = data[3]
if not self.IsTrusted(signer):
return
if self.IsTrusted(fingerprint):
return # Already trusted.
filename = self._PubkeyFilename(fingerprint)
signer_pubkeyfile = self._PubkeyFilename(signer)
if not signatures.VerifySignature(filename, pubkey, signature,
signer_pubkeyfile):
return
return # Nothing more to do.
def AddPerfData(self, test_key, duration, arch, mode):
data_store = self.perf_data_manager.GetStore(arch, mode)
data_store.RawUpdatePerfData(str(test_key), duration)
def CompareOwnPerf(self, test, arch, mode):
data_store = self.perf_data_manager.GetStore(arch, mode)
observed = data_store.FetchPerfData(test)
if not observed: return
own_perf_estimate = observed / test.duration
with self.perf_data_lock:
kLearnRateLimiter = 9999
self.relative_perf *= kLearnRateLimiter
self.relative_perf += own_perf_estimate
self.relative_perf /= (kLearnRateLimiter + 1)
| apache-2.0 | 6,778,309,564,730,051,000 | 35.542857 | 78 | 0.685804 | false |
alikins/ansible | lib/ansible/module_utils/network/common/config.py | 38 | 13532 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
import hashlib
from ansible.module_utils.six.moves import zip
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.network.common.utils import to_list
DEFAULT_COMMENT_TOKENS = ['#', '!', '/*', '*/', 'echo']
DEFAULT_IGNORE_LINES_RE = set([
re.compile(r"Using \d+ out of \d+ bytes"),
re.compile(r"Building configuration"),
re.compile(r"Current configuration : \d+ bytes")
])
class ConfigLine(object):
def __init__(self, raw):
self.text = str(raw).strip()
self.raw = raw
self._children = list()
self._parents = list()
def __str__(self):
return self.raw
def __eq__(self, other):
return self.line == other.line
def __ne__(self, other):
return not self.__eq__(other)
def __getitem__(self, key):
for item in self._children:
if item.text == key:
return item
raise KeyError(key)
@property
def line(self):
line = self.parents
line.append(self.text)
return ' '.join(line)
@property
def children(self):
return _obj_to_text(self._children)
@property
def child_objs(self):
return self._children
@property
def parents(self):
return _obj_to_text(self._parents)
@property
def path(self):
config = _obj_to_raw(self._parents)
config.append(self.raw)
return '\n'.join(config)
@property
def has_children(self):
return len(self._children) > 0
@property
def has_parents(self):
return len(self._parents) > 0
def add_child(self, obj):
if not isinstance(obj, ConfigLine):
raise AssertionError('child must be of type `ConfigLine`')
self._children.append(obj)
def ignore_line(text, tokens=None):
for item in (tokens or DEFAULT_COMMENT_TOKENS):
if text.startswith(item):
return True
for regex in DEFAULT_IGNORE_LINES_RE:
if regex.match(text):
return True
def _obj_to_text(x):
return [o.text for o in x]
def _obj_to_raw(x):
return [o.raw for o in x]
def _obj_to_block(objects, visited=None):
items = list()
for o in objects:
if o not in items:
items.append(o)
for child in o._children:
if child not in items:
items.append(child)
return _obj_to_raw(items)
def dumps(objects, output='block', comments=False):
if output == 'block':
items = _obj_to_block(objects)
elif output == 'commands':
items = _obj_to_text(objects)
else:
raise TypeError('unknown value supplied for keyword output')
if output != 'commands':
if comments:
for index, item in enumerate(items):
nextitem = index + 1
if nextitem < len(items) and not item.startswith(' ') and items[nextitem].startswith(' '):
item = '!\n%s' % item
items[index] = item
items.append('!')
items.append('end')
return '\n'.join(items)
class NetworkConfig(object):
def __init__(self, indent=1, contents=None, ignore_lines=None):
self._indent = indent
self._items = list()
self._config_text = None
if ignore_lines:
for item in ignore_lines:
if not isinstance(item, re._pattern_type):
item = re.compile(item)
DEFAULT_IGNORE_LINES_RE.add(item)
if contents:
self.load(contents)
@property
def items(self):
return self._items
@property
def config_text(self):
return self._config_text
@property
def sha1(self):
sha1 = hashlib.sha1()
sha1.update(to_bytes(str(self), errors='surrogate_or_strict'))
return sha1.digest()
def __getitem__(self, key):
for line in self:
if line.text == key:
return line
raise KeyError(key)
def __iter__(self):
return iter(self._items)
def __str__(self):
return '\n'.join([c.raw for c in self.items])
def __len__(self):
return len(self._items)
def load(self, s):
self._config_text = s
self._items = self.parse(s)
def loadfp(self, fp):
return self.load(open(fp).read())
def parse(self, lines, comment_tokens=None):
toplevel = re.compile(r'\S')
childline = re.compile(r'^\s*(.+)$')
entry_reg = re.compile(r'([{};])')
ancestors = list()
config = list()
curlevel = 0
prevlevel = 0
for linenum, line in enumerate(to_native(lines, errors='surrogate_or_strict').split('\n')):
text = entry_reg.sub('', line).strip()
cfg = ConfigLine(line)
if not text or ignore_line(text, comment_tokens):
continue
# handle top level commands
if toplevel.match(line):
ancestors = [cfg]
prevlevel = curlevel
curlevel = 0
# handle sub level commands
else:
match = childline.match(line)
line_indent = match.start(1)
prevlevel = curlevel
curlevel = int(line_indent / self._indent)
if (curlevel - 1) > prevlevel:
curlevel = prevlevel + 1
parent_level = curlevel - 1
cfg._parents = ancestors[:curlevel]
if curlevel > len(ancestors):
config.append(cfg)
continue
for i in range(curlevel, len(ancestors)):
ancestors.pop()
ancestors.append(cfg)
ancestors[parent_level].add_child(cfg)
config.append(cfg)
return config
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
if item.parents == path[:-1]:
return item
def get_block(self, path):
if not isinstance(path, list):
raise AssertionError('path argument must be a list object')
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self._expand_block(obj)
def get_block_config(self, path):
block = self.get_block(path)
return dumps(block, 'block')
def _expand_block(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj._children:
if child in S:
continue
self._expand_block(child, S)
return S
def _diff_line(self, other):
updates = list()
for item in self.items:
if item not in other:
updates.append(item)
return updates
def _diff_strict(self, other):
updates = list()
for index, line in enumerate(self.items):
try:
if str(line).strip() != str(other[index]).strip():
updates.append(line)
except (AttributeError, IndexError):
updates.append(line)
return updates
def _diff_exact(self, other):
updates = list()
if len(other) != len(self.items):
updates.extend(self.items)
else:
for ours, theirs in zip(self.items, other):
if ours != theirs:
updates.extend(self.items)
break
return updates
def difference(self, other, match='line', path=None, replace=None):
"""Perform a config diff against the another network config
:param other: instance of NetworkConfig to diff against
:param match: type of diff to perform. valid values are 'line',
'strict', 'exact'
:param path: context in the network config to filter the diff
:param replace: the method used to generate the replacement lines.
valid values are 'block', 'line'
:returns: a string of lines that are different
"""
if path and match != 'line':
try:
other = other.get_block(path)
except ValueError:
other = list()
else:
other = other.items
# generate a list of ConfigLines that aren't in other
meth = getattr(self, '_diff_%s' % match)
updates = meth(other)
if replace == 'block':
parents = list()
for item in updates:
if not item.has_parents:
parents.append(item)
else:
for p in item._parents:
if p not in parents:
parents.append(p)
updates = list()
for item in parents:
updates.extend(self._expand_block(item))
visited = set()
expanded = list()
for item in updates:
for p in item._parents:
if p.line not in visited:
visited.add(p.line)
expanded.append(p)
expanded.append(item)
visited.add(item.line)
return expanded
def add(self, lines, parents=None):
ancestors = list()
offset = 0
obj = None
# global config command
if not parents:
for line in lines:
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_block(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self._indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj._parents = list(ancestors)
ancestors[-1]._children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in lines:
# check if child already exists
for child in ancestors[-1]._children:
if child.text == line:
break
else:
offset = len(parents) * self._indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item._parents = ancestors
ancestors[-1]._children.append(item)
self.items.append(item)
class CustomNetworkConfig(NetworkConfig):
def items_text(self):
return [item.text for item in self.items]
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.child_objs:
if child in S:
continue
self.expand_section(child, S)
return S
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
| gpl-3.0 | -1,920,168,921,701,054,200 | 29.477477 | 106 | 0.551803 | false |
Thhhza/XlsxWriter | xlsxwriter/test/comparison/test_chart_title01.py | 8 | 1535 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_title01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of an XlsxWriter file with default title."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [46165376, 54462720]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5',
'name': 'Foo'})
chart.set_title({'none': True})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| bsd-2-clause | -8,200,257,901,563,032,000 | 24.583333 | 79 | 0.542671 | false |
trbs/django-oauth-toolkit | oauth2_provider/ext/rest_framework/permissions.py | 11 | 1922 | import logging
from django.core.exceptions import ImproperlyConfigured
from rest_framework.permissions import BasePermission
from ...settings import oauth2_settings
log = logging.getLogger('oauth2_provider')
SAFE_HTTP_METHODS = ['GET', 'HEAD', 'OPTIONS']
class TokenHasScope(BasePermission):
"""
The request is authenticated as a user and the token used has the right scope
"""
def has_permission(self, request, view):
token = request.auth
if not token:
return False
if hasattr(token, 'scope'): # OAuth 2
required_scopes = self.get_scopes(request, view)
log.debug("Required scopes to access resource: {0}".format(required_scopes))
return token.is_valid(required_scopes)
assert False, ('TokenHasScope requires either the'
'`oauth2_provider.rest_framework.OAuth2Authentication` authentication '
'class to be used.')
def get_scopes(self, request, view):
try:
return getattr(view, 'required_scopes')
except AttributeError:
raise ImproperlyConfigured(
'TokenHasScope requires the view to define the required_scopes attribute')
class TokenHasReadWriteScope(TokenHasScope):
"""
The request is authenticated as a user and the token used has the right scope
"""
def get_scopes(self, request, view):
try:
required_scopes = super(TokenHasReadWriteScope, self).get_scopes(request, view)
except ImproperlyConfigured:
required_scopes = []
# TODO: code duplication!! see dispatch in ReadWriteScopedResourceMixin
if request.method.upper() in SAFE_HTTP_METHODS:
read_write_scope = oauth2_settings.READ_SCOPE
else:
read_write_scope = oauth2_settings.WRITE_SCOPE
return required_scopes + [read_write_scope]
| bsd-2-clause | 6,172,054,866,437,923,000 | 30.508197 | 94 | 0.651925 | false |
saurabh6790/OFF-RISAPP | patches/april_2013/p05_update_file_data.py | 30 | 2406 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import webnotes, webnotes.utils, os
def execute():
webnotes.reload_doc("core", "doctype", "file_data")
webnotes.reset_perms("File Data")
singles = get_single_doctypes()
for doctype in webnotes.conn.sql_list("""select parent from tabDocField where
fieldname='file_list'"""):
# the other scenario is handled in p07_update_file_data_2
if doctype in singles:
update_file_list(doctype, singles)
# export_to_files([["DocType", doctype]])
def get_single_doctypes():
return webnotes.conn.sql_list("""select name from tabDocType
where ifnull(issingle,0)=1""")
def update_file_list(doctype, singles):
if doctype in singles:
doc = webnotes.doc(doctype, doctype)
if doc.file_list:
update_for_doc(doctype, doc)
webnotes.conn.set_value(doctype, None, "file_list", None)
else:
try:
for doc in webnotes.conn.sql("""select name, file_list from `tab%s` where
ifnull(file_list, '')!=''""" % doctype, as_dict=True):
update_for_doc(doctype, doc)
webnotes.conn.commit()
webnotes.conn.sql("""alter table `tab%s` drop column `file_list`""" % doctype)
except Exception, e:
print webnotes.getTraceback()
if (e.args and e.args[0]!=1054) or not e.args:
raise
def update_for_doc(doctype, doc):
for filedata in doc.file_list.split("\n"):
if not filedata:
continue
filedata = filedata.split(",")
if len(filedata)==2:
filename, fileid = filedata[0], filedata[1]
else:
continue
exists = True
if not (filename.startswith("http://") or filename.startswith("https://")):
if not os.path.exists(webnotes.utils.get_site_path(webnotes.conf.files_path, filename)):
exists = False
if exists:
if webnotes.conn.exists("File Data", fileid):
try:
fd = webnotes.bean("File Data", fileid)
if not (fd.doc.attached_to_doctype and fd.doc.attached_to_name):
fd.doc.attached_to_doctype = doctype
fd.doc.attached_to_name = doc.name
fd.save()
else:
fd = webnotes.bean("File Data", copy=fd.doclist)
fd.doc.attached_to_doctype = doctype
fd.doc.attached_to_name = doc.name
fd.doc.name = None
fd.insert()
except webnotes.DuplicateEntryError:
pass
else:
webnotes.conn.sql("""delete from `tabFile Data` where name=%s""",
fileid)
| agpl-3.0 | -1,996,610,951,925,007,600 | 30.657895 | 91 | 0.672901 | false |
wyvernnot/learn_python_through_unittest | tests/ClassMagicTest.py | 1 | 2708 | import unittest
from unittest.mock import Mock
class ClassMagicTest(unittest.TestCase):
def test_new(self):
class A(object):
def __new__(cls, *args, **kwargs):
# 重载 __new__ 必须返回
return object.__new__(cls, *args, **kwargs)
def __init__(self):
self.foo = 0
a = A()
self.assertEqual(a.foo, 0)
def test_toString(self):
class A(object):
def __str__(self):
return 'a instance'
a = A()
self.assertEqual(str(a), 'a instance')
def test_iter(self):
class Range(object):
def __init__(self, count):
self.count = count
def __iter__(self):
return self
def __next__(self):
if self.count == 0:
raise StopIteration
self.count -= 1
return self.count
r = Range(3)
m = Mock()
for a in r:
m(a)
self.assertEqual(m.call_count, 3)
def test_items(self):
class A(object):
ok = 'foo'
def __getitem__(self, item):
return item.upper()
a = A()
self.assertEqual(a["ok"], 'OK')
class B(object):
def __init__(self):
self.foo = 1
def __getattr__(self, item):
return item
b = B()
self.assertEqual(b.z, 'z')
self.assertEqual(b.foo, 1)
def test_call(self):
class A(object):
pass
class B(A):
def __call__(self, *args, **kwargs):
return 0
b = B()
self.assertTrue(callable(b))
def test_property(self):
class Exam(object):
def __init__(self, score):
self.__score = score
@property
def score(self):
return self.__score
# 为什么这个名字要和 score 一样呢 ?
@score.setter
def score(self, val):
self.__score = val
e = Exam(99)
self.assertEqual(e.score, 99)
e.score = 8
self.assertEqual(e.score, 8)
def test_super(self):
class Animal(object):
def __init__(self, name):
self.name = name
class Cat(Animal):
def __init__(self, name, age):
super().__init__(name)
self.age = age
@property
def super(self):
return super()
c = Cat('Lulu', 3)
self.assertEqual(c.name, 'Lulu')
self.assertEqual(c.age, 3)
| mit | 8,066,743,159,398,102,000 | 21.25 | 59 | 0.437828 | false |
kaushik94/boto | boto/ec2/reservedinstance.py | 17 | 12948 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.resultset import ResultSet
from boto.ec2.ec2object import EC2Object
from boto.utils import parse_ts
class ReservedInstancesOffering(EC2Object):
def __init__(self, connection=None, id=None, instance_type=None,
availability_zone=None, duration=None, fixed_price=None,
usage_price=None, description=None, instance_tenancy=None,
currency_code=None, offering_type=None,
recurring_charges=None, pricing_details=None):
super(ReservedInstancesOffering, self).__init__(connection)
self.id = id
self.instance_type = instance_type
self.availability_zone = availability_zone
self.duration = duration
self.fixed_price = fixed_price
self.usage_price = usage_price
self.description = description
self.instance_tenancy = instance_tenancy
self.currency_code = currency_code
self.offering_type = offering_type
self.recurring_charges = recurring_charges
self.pricing_details = pricing_details
def __repr__(self):
return 'ReservedInstanceOffering:%s' % self.id
def startElement(self, name, attrs, connection):
if name == 'recurringCharges':
self.recurring_charges = ResultSet([('item', RecurringCharge)])
return self.recurring_charges
elif name == 'pricingDetailsSet':
self.pricing_details = ResultSet([('item', PricingDetail)])
return self.pricing_details
return None
def endElement(self, name, value, connection):
if name == 'reservedInstancesOfferingId':
self.id = value
elif name == 'instanceType':
self.instance_type = value
elif name == 'availabilityZone':
self.availability_zone = value
elif name == 'duration':
self.duration = int(value)
elif name == 'fixedPrice':
self.fixed_price = value
elif name == 'usagePrice':
self.usage_price = value
elif name == 'productDescription':
self.description = value
elif name == 'instanceTenancy':
self.instance_tenancy = value
elif name == 'currencyCode':
self.currency_code = value
elif name == 'offeringType':
self.offering_type = value
elif name == 'marketplace':
self.marketplace = True if value == 'true' else False
def describe(self):
print 'ID=%s' % self.id
print '\tInstance Type=%s' % self.instance_type
print '\tZone=%s' % self.availability_zone
print '\tDuration=%s' % self.duration
print '\tFixed Price=%s' % self.fixed_price
print '\tUsage Price=%s' % self.usage_price
print '\tDescription=%s' % self.description
def purchase(self, instance_count=1, dry_run=False):
return self.connection.purchase_reserved_instance_offering(
self.id,
instance_count,
dry_run=dry_run
)
class RecurringCharge(object):
def __init__(self, connection=None, frequency=None, amount=None):
self.frequency = frequency
self.amount = amount
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
class PricingDetail(object):
def __init__(self, connection=None, price=None, count=None):
self.price = price
self.count = count
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
class ReservedInstance(ReservedInstancesOffering):
def __init__(self, connection=None, id=None, instance_type=None,
availability_zone=None, duration=None, fixed_price=None,
usage_price=None, description=None,
instance_count=None, state=None):
super(ReservedInstance, self).__init__(connection, id, instance_type,
availability_zone, duration,
fixed_price, usage_price,
description)
self.instance_count = instance_count
self.state = state
self.start = None
def __repr__(self):
return 'ReservedInstance:%s' % self.id
def endElement(self, name, value, connection):
if name == 'reservedInstancesId':
self.id = value
if name == 'instanceCount':
self.instance_count = int(value)
elif name == 'state':
self.state = value
elif name == 'start':
self.start = value
else:
super(ReservedInstance, self).endElement(name, value, connection)
class ReservedInstanceListing(EC2Object):
def __init__(self, connection=None, listing_id=None, id=None,
create_date=None, update_date=None,
status=None, status_message=None, client_token=None):
self.connection = connection
self.listing_id = listing_id
self.id = id
self.create_date = create_date
self.update_date = update_date
self.status = status
self.status_message = status_message
self.client_token = client_token
def startElement(self, name, attrs, connection):
if name == 'instanceCounts':
self.instance_counts = ResultSet([('item', InstanceCount)])
return self.instance_counts
elif name == 'priceSchedules':
self.price_schedules = ResultSet([('item', PriceSchedule)])
return self.price_schedules
return None
def endElement(self, name, value, connection):
if name == 'reservedInstancesListingId':
self.listing_id = value
elif name == 'reservedInstancesId':
self.id = value
elif name == 'createDate':
self.create_date = value
elif name == 'updateDate':
self.update_date = value
elif name == 'status':
self.status = value
elif name == 'statusMessage':
self.status_message = value
else:
setattr(self, name, value)
class InstanceCount(object):
def __init__(self, connection=None, state=None, instance_count=None):
self.state = state
self.instance_count = instance_count
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'state':
self.state = value
elif name == 'instanceCount':
self.instance_count = int(value)
else:
setattr(self, name, value)
class PriceSchedule(object):
def __init__(self, connection=None, term=None, price=None,
currency_code=None, active=None):
self.connection = connection
self.term = term
self.price = price
self.currency_code = currency_code
self.active = active
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'term':
self.term = int(value)
elif name == 'price':
self.price = value
elif name == 'currencyCode':
self.currency_code = value
elif name == 'active':
self.active = True if value == 'true' else False
else:
setattr(self, name, value)
class ReservedInstancesConfiguration(object):
def __init__(self, connection=None, availability_zone=None, platform=None,
instance_count=None, instance_type=None):
self.connection = connection
self.availability_zone = availability_zone
self.platform = platform
self.instance_count = instance_count
self.instance_type = instance_type
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'availabilityZone':
self.availability_zone = value
elif name == 'platform':
self.platform = value
elif name == 'instanceCount':
self.instance_count = int(value)
elif name == 'instanceType':
self.instance_type = value
else:
setattr(self, name, value)
class ModifyReservedInstancesResult(object):
def __init__(self, connection=None, modification_id=None):
self.connection = connection
self.modification_id = modification_id
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'reservedInstancesModificationId':
self.modification_id = value
else:
setattr(self, name, value)
class ModificationResult(object):
def __init__(self, connection=None, modification_id=None,
availability_zone=None, platform=None, instance_count=None,
instance_type=None):
self.connection = connection
self.modification_id = modification_id
self.availability_zone = availability_zone
self.platform = platform
self.instance_count = instance_count
self.instance_type = instance_type
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'reservedInstancesModificationId':
self.modification_id = value
elif name == 'availabilityZone':
self.availability_zone = value
elif name == 'platform':
self.platform = value
elif name == 'instanceCount':
self.instance_count = int(value)
elif name == 'instanceType':
self.instance_type = value
else:
setattr(self, name, value)
class ReservedInstancesModification(object):
def __init__(self, connection=None, modification_id=None,
reserved_instances=None, modification_results=None,
create_date=None, update_date=None, effective_date=None,
status=None, status_message=None, client_token=None):
self.connection = connection
self.modification_id = modification_id
self.reserved_instances = reserved_instances
self.modification_results = modification_results
self.create_date = create_date
self.update_date = update_date
self.effective_date = effective_date
self.status = status
self.status_message = status_message
self.client_token = client_token
def startElement(self, name, attrs, connection):
if name == 'reservedInstancesSet':
self.reserved_instances = ResultSet([
('item', ReservedInstance)
])
return self.reserved_instances
elif name == 'modificationResultSet':
self.modification_results = ResultSet([
('item', ModificationResult)
])
return self.modification_results
return None
def endElement(self, name, value, connection):
if name == 'reservedInstancesModificationId':
self.modification_id = value
elif name == 'createDate':
self.create_date = parse_ts(value)
elif name == 'updateDate':
self.update_date = parse_ts(value)
elif name == 'effectiveDate':
self.effective_date = parse_ts(value)
elif name == 'status':
self.status = value
elif name == 'statusMessage':
self.status_message = value
elif name == 'clientToken':
self.client_token = value
else:
setattr(self, name, value)
| mit | -130,505,028,038,792,800 | 35.994286 | 78 | 0.614149 | false |
bqbn/addons-server | src/olympia/git/tests/test_admin.py | 1 | 2320 | from django.contrib.admin.sites import AdminSite
from pyquery import PyQuery as pq
from olympia.amo.tests import TestCase, addon_factory, user_factory
from olympia.amo.urlresolvers import reverse
from olympia.git.admin import GitExtractionEntryAdmin
from olympia.git.models import GitExtractionEntry
class TestGitExtractionEntryAdmin(TestCase):
def setUp(self):
super().setUp()
self.user = user_factory()
self.grant_permission(self.user, 'Admin:GitExtractionEdit')
self.client.login(email=self.user.email)
self.list_url = reverse('admin:git_gitextractionentry_changelist')
self.admin = GitExtractionEntryAdmin(
model=GitExtractionEntry, admin_site=AdminSite()
)
def test_has_add_permission(self):
assert self.admin.has_add_permission(request=None) is False
def test_has_change_permission(self):
assert self.admin.has_change_permission(request=None) is False
def test_list_view(self):
GitExtractionEntry.objects.create(addon=addon_factory())
# 9 queries:
# - 2 transaction savepoints because of tests
# - 2 request user and groups
# - 2 COUNT(*) on extraction entries for pagination and total display
# - 1 all git extraction entries in one query
# - 1 all add-ons in one query
# - 1 all add-ons translations in one query
with self.assertNumQueries(9):
response = self.client.get(self.list_url)
assert response.status_code == 200
html = pq(response.content)
assert html('.column-id').length == 1
assert html('.actions option[value="delete_selected"]').length == 1
def test_list_view_is_restricted(self):
user = user_factory()
self.grant_permission(user, 'Admin:Curation')
self.client.login(email=user.email)
response = self.client.get(self.list_url)
assert response.status_code == 403
def test_formatted_addon(self):
addon = addon_factory()
entry = GitExtractionEntry.objects.create(addon=addon)
formatted_addon = self.admin.formatted_addon(entry)
assert (
reverse('admin:addons_addon_change', args=(addon.pk,))
in formatted_addon
)
assert str(addon) in formatted_addon
| bsd-3-clause | -6,525,388,924,771,073,000 | 34.692308 | 77 | 0.66681 | false |
pranavtendolkr/horizon | openstack_dashboard/dashboards/admin/metadata_defs/urls.py | 62 | 1253 | # (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.admin.metadata_defs import views
NAMESPACES = r'^(?P<namespace_id>[^/]+)/%s$'
urlpatterns = patterns(
'openstack_dashboard.dashboards.admin.metadata_defs.views',
url(r'^$', views.AdminIndexView.as_view(), name='index'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(NAMESPACES % 'detail', views.DetailView.as_view(), name='detail'),
url(r'^(?P<id>[^/]+)/resource_types/$',
views.ManageResourceTypes.as_view(), name='resource_types'),
)
| apache-2.0 | 5,348,798,606,228,972,000 | 39.419355 | 78 | 0.702314 | false |
ghtmtt/QGIS | tests/src/python/test_layer_dependencies.py | 31 | 14442 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsSnappingUtils (complement to C++-based tests)
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Hugo Mercier'
__date__ = '12/07/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA
import os
from qgis.core import (QgsProject,
QgsVectorLayer,
QgsMapSettings,
QgsSnappingUtils,
QgsSnappingConfig,
QgsTolerance,
QgsRectangle,
QgsPointXY,
QgsFeature,
QgsGeometry,
QgsLayerDefinition,
QgsMapLayerDependency
)
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import QSize, QPoint
from qgis.PyQt.QtTest import QSignalSpy
import tempfile
from qgis.utils import spatialite_connect
# Convenience instances in case you may need them
start_app()
class TestLayerDependencies(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
pass
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
pass
def setUp(self):
"""Run before each test."""
# create a temp SpatiaLite db with a trigger
fo = tempfile.NamedTemporaryFile()
fn = fo.name
fo.close()
self.fn = fn
con = spatialite_connect(fn)
cur = con.cursor()
cur.execute("SELECT InitSpatialMetadata(1)")
cur.execute("create table node(id integer primary key autoincrement);")
cur.execute("select AddGeometryColumn('node', 'geom', 4326, 'POINT');")
cur.execute("create table section(id integer primary key autoincrement, node1 integer, node2 integer);")
cur.execute("select AddGeometryColumn('section', 'geom', 4326, 'LINESTRING');")
cur.execute("create trigger add_nodes after insert on section begin insert into node (geom) values (st_startpoint(NEW.geom)); insert into node (geom) values (st_endpoint(NEW.geom)); end;")
cur.execute("insert into node (geom) values (geomfromtext('point(0 0)', 4326));")
cur.execute("insert into node (geom) values (geomfromtext('point(1 0)', 4326));")
cur.execute("create table node2(id integer primary key autoincrement);")
cur.execute("select AddGeometryColumn('node2', 'geom', 4326, 'POINT');")
cur.execute("create trigger add_nodes2 after insert on node begin insert into node2 (geom) values (st_translate(NEW.geom, 0.2, 0, 0)); end;")
con.commit()
con.close()
self.pointsLayer = QgsVectorLayer("dbname='%s' table=\"node\" (geom) sql=" % fn, "points", "spatialite")
assert (self.pointsLayer.isValid())
self.linesLayer = QgsVectorLayer("dbname='%s' table=\"section\" (geom) sql=" % fn, "lines", "spatialite")
assert (self.linesLayer.isValid())
self.pointsLayer2 = QgsVectorLayer("dbname='%s' table=\"node2\" (geom) sql=" % fn, "_points2", "spatialite")
assert (self.pointsLayer2.isValid())
QgsProject.instance().addMapLayers([self.pointsLayer, self.linesLayer, self.pointsLayer2])
# save the project file
fo = tempfile.NamedTemporaryFile()
fn = fo.name
fo.close()
self.projectFile = fn
QgsProject.instance().setFileName(self.projectFile)
QgsProject.instance().write()
def tearDown(self):
"""Run after each test."""
QgsProject.instance().clear()
pass
def test_resetSnappingIndex(self):
self.pointsLayer.setDependencies([])
self.linesLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
ms = QgsMapSettings()
ms.setOutputSize(QSize(100, 100))
ms.setExtent(QgsRectangle(0, 0, 1, 1))
self.assertTrue(ms.hasValidSettings())
u = QgsSnappingUtils()
u.setMapSettings(ms)
cfg = u.config()
cfg.setEnabled(True)
cfg.setMode(QgsSnappingConfig.AdvancedConfiguration)
cfg.setIndividualLayerSettings(self.pointsLayer,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.VertexFlag, 20, QgsTolerance.Pixels, 0.0, 0.0))
u.setConfig(cfg)
m = u.snapToMap(QPoint(95, 100))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(1, 0))
f = QgsFeature(self.linesLayer.fields())
f.setId(1)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,1 1)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
l1 = len([f for f in self.pointsLayer.getFeatures()])
self.assertEqual(l1, 4)
m = u.snapToMap(QPoint(95, 0))
# snapping not updated
self.pointsLayer.setDependencies([])
self.assertEqual(m.isValid(), False)
# set layer dependencies
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(2)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,0.5 0.5)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the snapped point is OK
m = u.snapToMap(QPoint(45, 50))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.5, 0.5))
self.pointsLayer.setDependencies([])
# test chained layer dependencies A -> B -> C
cfg.setIndividualLayerSettings(self.pointsLayer2,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.VertexFlag, 20, QgsTolerance.Pixels, 0.0, 0.0))
u.setConfig(cfg)
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
self.pointsLayer2.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())])
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(3)
geom = QgsGeometry.fromWkt("LINESTRING(0 0.2,0.5 0.8)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the second snapped point is OK
m = u.snapToMap(QPoint(75, 100 - 80))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.7, 0.8))
self.pointsLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
def test_circular_dependencies_with_2_layers(self):
spy_points_data_changed = QSignalSpy(self.pointsLayer.dataChanged)
spy_lines_data_changed = QSignalSpy(self.linesLayer.dataChanged)
spy_points_repaint_requested = QSignalSpy(self.pointsLayer.repaintRequested)
spy_lines_repaint_requested = QSignalSpy(self.linesLayer.repaintRequested)
# only points fire dataChanged because we change its dependencies
self.assertTrue(self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())]))
self.assertEqual(len(spy_points_data_changed), 1)
self.assertEqual(len(spy_lines_data_changed), 0)
# lines fire dataChanged because we changes its dependencies
# points fire dataChanged because it depends on line
self.assertTrue(self.linesLayer.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())]))
self.assertEqual(len(spy_points_data_changed), 2)
self.assertEqual(len(spy_lines_data_changed), 1)
f = QgsFeature(self.pointsLayer.fields())
f.setId(1)
geom = QgsGeometry.fromWkt("POINT(0 0)")
f.setGeometry(geom)
self.pointsLayer.startEditing()
# new point fire featureAdded so depending line fire dataChanged
# point depends on line, so fire dataChanged
self.pointsLayer.addFeatures([f])
self.assertEqual(len(spy_points_data_changed), 3)
self.assertEqual(len(spy_lines_data_changed), 2)
# added feature is deleted and added with its new defined id
# (it was -1 before) so it fires 2 more signal dataChanged on
# depending line (on featureAdded and on featureDeleted)
# and so 2 more signal on points because it depends on line
self.pointsLayer.commitChanges()
self.assertEqual(len(spy_points_data_changed), 5)
self.assertEqual(len(spy_lines_data_changed), 4)
# repaintRequested is called on commit changes on point
# so it is on depending line
self.assertEqual(len(spy_lines_repaint_requested), 1)
self.assertEqual(len(spy_points_repaint_requested), 1)
def test_circular_dependencies_with_1_layer(self):
# You can define a layer dependent on it self (for instance, a line
# layer that trigger connected lines modifications when you modify
# one line)
spy_lines_data_changed = QSignalSpy(self.linesLayer.dataChanged)
spy_lines_repaint_requested = QSignalSpy(self.linesLayer.repaintRequested)
# line fire dataChanged because we change its dependencies
self.assertTrue(self.linesLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())]))
self.assertEqual(len(spy_lines_data_changed), 1)
f = QgsFeature(self.linesLayer.fields())
f.setId(1)
geom = QgsGeometry.fromWkt("LINESTRING(0 0,1 1)")
f.setGeometry(geom)
self.linesLayer.startEditing()
# line fire featureAdded so depending line fire dataChanged once more
self.linesLayer.addFeatures([f])
self.assertEqual(len(spy_lines_data_changed), 2)
# added feature is deleted and added with its new defined id
# (it was -1 before) so it fires 2 more signal dataChanged on
# depending line (on featureAdded and on featureDeleted)
self.linesLayer.commitChanges()
self.assertEqual(len(spy_lines_data_changed), 4)
# repaintRequested is called only once on commit changes on line
self.assertEqual(len(spy_lines_repaint_requested), 1)
def test_layerDefinitionRewriteId(self):
tmpfile = os.path.join(tempfile.tempdir, "test.qlr")
ltr = QgsProject.instance().layerTreeRoot()
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
QgsLayerDefinition.exportLayerDefinition(tmpfile, [ltr])
grp = ltr.addGroup("imported")
QgsLayerDefinition.loadLayerDefinition(tmpfile, QgsProject.instance(), grp)
newPointsLayer = None
newLinesLayer = None
for l in grp.findLayers():
if l.layerId().startswith('points'):
newPointsLayer = l.layer()
elif l.layerId().startswith('lines'):
newLinesLayer = l.layer()
self.assertIsNotNone(newPointsLayer)
self.assertIsNotNone(newLinesLayer)
self.assertTrue(newLinesLayer.id() in [dep.layerId() for dep in newPointsLayer.dependencies()])
self.pointsLayer.setDependencies([])
def test_signalConnection(self):
# remove all layers
QgsProject.instance().removeAllMapLayers()
# set dependencies and add back layers
self.pointsLayer = QgsVectorLayer("dbname='%s' table=\"node\" (geom) sql=" % self.fn, "points", "spatialite")
assert (self.pointsLayer.isValid())
self.linesLayer = QgsVectorLayer("dbname='%s' table=\"section\" (geom) sql=" % self.fn, "lines", "spatialite")
assert (self.linesLayer.isValid())
self.pointsLayer2 = QgsVectorLayer("dbname='%s' table=\"node2\" (geom) sql=" % self.fn, "_points2", "spatialite")
assert (self.pointsLayer2.isValid())
self.pointsLayer.setDependencies([QgsMapLayerDependency(self.linesLayer.id())])
self.pointsLayer2.setDependencies([QgsMapLayerDependency(self.pointsLayer.id())])
# this should update connections between layers
QgsProject.instance().addMapLayers([self.pointsLayer])
QgsProject.instance().addMapLayers([self.linesLayer])
QgsProject.instance().addMapLayers([self.pointsLayer2])
ms = QgsMapSettings()
ms.setOutputSize(QSize(100, 100))
ms.setExtent(QgsRectangle(0, 0, 1, 1))
self.assertTrue(ms.hasValidSettings())
u = QgsSnappingUtils()
u.setMapSettings(ms)
cfg = u.config()
cfg.setEnabled(True)
cfg.setMode(QgsSnappingConfig.AdvancedConfiguration)
cfg.setIndividualLayerSettings(self.pointsLayer,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.VertexFlag, 20, QgsTolerance.Pixels, 0.0, 0.0))
cfg.setIndividualLayerSettings(self.pointsLayer2,
QgsSnappingConfig.IndividualLayerSettings(True,
QgsSnappingConfig.VertexFlag, 20, QgsTolerance.Pixels, 0.0, 0.0))
u.setConfig(cfg)
# add another line
f = QgsFeature(self.linesLayer.fields())
f.setId(4)
geom = QgsGeometry.fromWkt("LINESTRING(0.5 0.2,0.6 0)")
f.setGeometry(geom)
self.linesLayer.startEditing()
self.linesLayer.addFeatures([f])
self.linesLayer.commitChanges()
# check the second snapped point is OK
m = u.snapToMap(QPoint(75, 100 - 0))
self.assertTrue(m.isValid())
self.assertTrue(m.hasVertex())
self.assertEqual(m.point(), QgsPointXY(0.8, 0.0))
self.pointsLayer.setDependencies([])
self.pointsLayer2.setDependencies([])
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -150,756,860,004,319,140 | 42.63142 | 196 | 0.632876 | false |
topxiaoke/myedx | lms/djangoapps/certificates/migrations/0011_auto__del_field_generatedcertificate_certificate_id__add_field_generat.py | 188 | 5961 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'GeneratedCertificate.certificate_id'
db.delete_column('certificates_generatedcertificate', 'certificate_id')
# Adding field 'GeneratedCertificate.verify_uuid'
db.add_column('certificates_generatedcertificate', 'verify_uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=32, blank=True),
keep_default=False)
# Adding field 'GeneratedCertificate.download_uuid'
db.add_column('certificates_generatedcertificate', 'download_uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=32, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'GeneratedCertificate.certificate_id'
db.add_column('certificates_generatedcertificate', 'certificate_id',
self.gf('django.db.models.fields.CharField')(default='', max_length=32, blank=True),
keep_default=False)
# Deleting field 'GeneratedCertificate.verify_uuid'
db.delete_column('certificates_generatedcertificate', 'verify_uuid')
# Deleting field 'GeneratedCertificate.download_uuid'
db.delete_column('certificates_generatedcertificate', 'download_uuid')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'certificates.generatedcertificate': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'GeneratedCertificate'},
'course_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'distinction': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'download_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'download_uuid': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'grade': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '5', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'unavailable'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'verify_uuid': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['certificates']
| agpl-3.0 | 3,137,861,653,596,457,000 | 65.233333 | 182 | 0.572052 | false |
JuliBakagianni/CEF-ELRC | lib/python2.7/site-packages/django/conf/locale/pt/formats.py | 232 | 1532 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \de F \de Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = r'j \de F \de Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \de Y'
MONTH_DAY_FORMAT = r'j \de F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%d/%m/%Y', '%d/%m/%y', # '2006-10-25', '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause | 2,282,631,868,811,984,000 | 37.275 | 90 | 0.52776 | false |
catlinman/input-dungeon | interface.py | 1 | 5859 |
# This interface module contains additional behavior for curses to make it more suited for
# the current aim of this project. It enables quick interaction between curses and the
# actual game while still attempting to be flexible and expendable at the same time.
# Developer note: It should be good practice to allow the user to interrupt the program at
# any given point. With that said - there should be checks in place that catch signals
# such as your usual interrupt and actually handle them as expected. This is however not
# automatically the case since curses catches key presses in a raw format, suppressing their
# default behavior and as such completely removing their functionality and use.
import time
import curses
import helpers
NUMUPDATES = 30 # The number of interface updates per second.
DEBUG = True # Enables development settings to make debugging easier.
# Menu interface class. Creates a scrolling menu inside a specified window.
class Menu:
def __init__(self, win, x=0, y=0):
self.window = win # The window this menu is attached to.
self.x, self.y = x, y # Positional offset of the menu.
self.max_length = 4 # Maximum number of displayed selections.
self.height, self.width = self.window.getmaxyx() # Get the maximum width and height of the assigned window.
self.selections = [] # Stores the possible selections the user can make.
self.total_selections = 0 # Stores the number of selections in the selection array.
self.finished = False # Stores the state of the menu.
self.line_symbol = "> " # Symbol displayed at the beginning of each menu option.
self.cursor = 0 # Current hovered selection index.
self.cursor_symbol = " => "
self.shift = 0 # The amount of lines the menu is shifted by.
self.shift_symbol = "+"
# Add a selection option stored under a specified index. Selections can be overwritten by reassignment to a given index.
def set_selection(self, item, index):
# Make sure the option dictionary contains the correct keys. Else replace them with default values.
if not item.get("desc"):
item["desc"] = "Menu item"
if not item.get("out"):
item["out"] = "None"
self.selections.append(item) # Append the selection item.
self.total_selections += 1 # Update the selection count.
# Remove a selection by it's key.
def remove_selection(self, index):
del self.selections[index]
self.total_selections -= 1 # Update the selection count.
# Clear the list of selection.
def remove_all(self):
del self.selections[index]
self.total_selections = 0 # Update the selection count.
# Displays the menu and requires input.
def interact(self):
self.window.nodelay(1) # Make getch non-blocking.
self.window.keypad(1) # Enable special keys to return keycodes.
self.draw() # Draw the current menu.
while not self.finished:
key = self.window.getch() # Get the current pressed keycode. Also, refresh the screen.
if key == curses.KEY_DOWN: # Arrow down
self.move(-1) # Move the cursor down. Also redraws the menu.
elif key == curses.KEY_UP:
self.move(1) # Move the cursor up. Also redraws the menu.
elif key == curses.KEY_ENTER or key == 10:
self.finished = True
return self.selections[self.cursor + self.shift]["out"], self.selections[self.cursor + self.shift]["desc"]
elif key == 3:
exit() # Exit the entire program if the user presses the interrupt key.
if DEBUG == True:
if key == 27:
self.finished = True # Exit the menu if the user presses the escape key.
if key != -1: # Show the keycode.
self.clearline(self.height - 1)
self.window.addstr(self.height - 1, self.width - len(str(key)) - 1, str(key))
time.sleep(1 / NUMUPDATES) # Sleep between checks
# Clear a specific line.
def clearline(self, column):
self.window.move(column, 0)
self.window.clrtoeol()
# Draw all menu options/items.
def draw(self):
for i in range(min(self.max_length, self.total_selections)):
entry = self.selections[i + self.shift]
self.clearline(i + self.y) # Clear the line beneath.
self.window.addstr(i + self.y, self.x + 1, "%s%s" % (self.line_symbol, entry["desc"]))
if self.shift < self.total_selections - min(self.total_selections, self.max_length):
self.window.addstr(self.max_length + self.y - 1, self.x, self.shift_symbol)
if self.shift > 0:
self.window.addstr(self.y, self.x, self.shift_symbol)
self.window.addstr(self.cursor + self.y, self.x + 1, "%s%s" % (self.cursor_symbol, self.selections[self.cursor + self.shift]["desc"]))
# Move the current selected line and redraw.
def move(self, value):
self.clearline(self.cursor + self.y) # Clear the previously selected line to avoid leftover characters.
if self.cursor == min(self.total_selections, self.max_length) - 1 and value < 0: # Shift the displayed selections up if possible.
self.shift = min(self.shift - value, self.total_selections - min(self.total_selections, self.max_length))
if self.cursor == 0 and value > 0: # Shift the displayed selections down if possible.
self.shift = max(self.shift - value, 0)
self.cursor = min(min(self.total_selections, self.max_length) - 1, max(0, self.cursor - value)) # Move and clamp the cursor.
self.draw() # Redraw the menu to avoid lines from staying selected.
class Bar:
pass
| mit | 5,287,872,878,099,049,000 | 42.080882 | 142 | 0.644479 | false |
robinro/ansible-modules-core | cloud/google/gce_pd.py | 51 | 9869 | #!/usr/bin/python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: gce_pd
version_added: "1.4"
short_description: utilize GCE persistent disk resources
description:
- This module can create and destroy unformatted GCE persistent disks
U(https://developers.google.com/compute/docs/disks#persistentdisks).
It also supports attaching and detaching disks from running instances.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
detach_only:
description:
- do not destroy the disk, merely detach it from an instance
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
instance_name:
description:
- instance name if you wish to attach or detach the disk
required: false
default: null
aliases: []
mode:
description:
- GCE mount mode of disk, READ_ONLY (default) or READ_WRITE
required: false
default: "READ_ONLY"
choices: ["READ_WRITE", "READ_ONLY"]
aliases: []
name:
description:
- name of the disk
required: true
default: null
aliases: []
size_gb:
description:
- whole integer size of disk (in GB) to create, default is 10 GB
required: false
default: 10
aliases: []
image:
description:
- the source image to use for the disk
required: false
default: null
aliases: []
version_added: "1.7"
snapshot:
description:
- the source snapshot to use for the disk
required: false
default: null
aliases: []
version_added: "1.7"
state:
description:
- desired state of the persistent disk
required: false
default: "present"
choices: ["active", "present", "absent", "deleted"]
aliases: []
zone:
description:
- zone in which to create the disk
required: false
default: "us-central1-b"
aliases: []
service_account_email:
version_added: "1.6"
description:
- service account email
required: false
default: null
aliases: []
pem_file:
version_added: "1.6"
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
required: false
default: null
aliases: []
credentials_file:
version_added: "2.1.0"
description:
- path to the JSON file associated with the service account email
required: false
default: null
aliases: []
project_id:
version_added: "1.6"
description:
- your GCE project ID
required: false
default: null
aliases: []
disk_type:
version_added: "1.9"
description:
- type of disk provisioned
required: false
default: "pd-standard"
choices: ["pd-standard", "pd-ssd"]
aliases: []
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials"
author: "Eric Johnson (@erjohnso) <[email protected]>"
'''
EXAMPLES = '''
# Simple attachment action to an existing instance
- local_action:
module: gce_pd
instance_name: notlocalhost
size_gb: 5
name: pd
'''
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceNotFoundError, ResourceInUseError
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
def main():
module = AnsibleModule(
argument_spec = dict(
detach_only = dict(type='bool'),
instance_name = dict(),
mode = dict(default='READ_ONLY', choices=['READ_WRITE', 'READ_ONLY']),
name = dict(required=True),
size_gb = dict(default=10),
disk_type = dict(default='pd-standard'),
image = dict(),
snapshot = dict(),
state = dict(default='present'),
zone = dict(default='us-central1-b'),
service_account_email = dict(),
pem_file = dict(),
credentials_file = dict(),
project_id = dict(),
)
)
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.17.0+) is required for this module')
gce = gce_connect(module)
detach_only = module.params.get('detach_only')
instance_name = module.params.get('instance_name')
mode = module.params.get('mode')
name = module.params.get('name')
size_gb = module.params.get('size_gb')
disk_type = module.params.get('disk_type')
image = module.params.get('image')
snapshot = module.params.get('snapshot')
state = module.params.get('state')
zone = module.params.get('zone')
if detach_only and not instance_name:
module.fail_json(
msg='Must specify an instance name when detaching a disk',
changed=False)
disk = inst = None
changed = is_attached = False
json_output = { 'name': name, 'zone': zone, 'state': state, 'disk_type': disk_type }
if detach_only:
json_output['detach_only'] = True
json_output['detached_from_instance'] = instance_name
if instance_name:
# user wants to attach/detach from an existing instance
try:
inst = gce.ex_get_node(instance_name, zone)
# is the disk attached?
for d in inst.extra['disks']:
if d['deviceName'] == name:
is_attached = True
json_output['attached_mode'] = d['mode']
json_output['attached_to_instance'] = inst.name
except:
pass
# find disk if it already exists
try:
disk = gce.ex_get_volume(name)
json_output['size_gb'] = int(disk.size)
except ResourceNotFoundError:
pass
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
# user wants a disk to exist. If "instance_name" is supplied the user
# also wants it attached
if state in ['active', 'present']:
if not size_gb:
module.fail_json(msg="Must supply a size_gb", changed=False)
try:
size_gb = int(round(float(size_gb)))
if size_gb < 1:
raise Exception
except:
module.fail_json(msg="Must supply a size_gb larger than 1 GB",
changed=False)
if instance_name and inst is None:
module.fail_json(msg='Instance %s does not exist in zone %s' % (
instance_name, zone), changed=False)
if not disk:
if image is not None and snapshot is not None:
module.fail_json(
msg='Cannot give both image (%s) and snapshot (%s)' % (
image, snapshot), changed=False)
lc_image = None
lc_snapshot = None
if image is not None:
lc_image = gce.ex_get_image(image)
elif snapshot is not None:
lc_snapshot = gce.ex_get_snapshot(snapshot)
try:
disk = gce.create_volume(
size_gb, name, location=zone, image=lc_image,
snapshot=lc_snapshot, ex_disk_type=disk_type)
except ResourceExistsError:
pass
except QuotaExceededError:
module.fail_json(msg='Requested disk size exceeds quota',
changed=False)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
json_output['size_gb'] = size_gb
if image is not None:
json_output['image'] = image
if snapshot is not None:
json_output['snapshot'] = snapshot
changed = True
if inst and not is_attached:
try:
gce.attach_volume(inst, disk, device=name, ex_mode=mode)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
json_output['attached_to_instance'] = inst.name
json_output['attached_mode'] = mode
changed = True
# user wants to delete a disk (or perhaps just detach it).
if state in ['absent', 'deleted'] and disk:
if inst and is_attached:
try:
gce.detach_volume(disk, ex_node=inst)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
changed = True
if not detach_only:
try:
gce.destroy_volume(disk)
except ResourceInUseError as e:
module.fail_json(msg=str(e.value), changed=False)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
changed = True
json_output['changed'] = changed
module.exit_json(**json_output)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.gce import *
if __name__ == '__main__':
main()
| gpl-3.0 | -5,226,139,787,287,253,000 | 31.357377 | 95 | 0.599959 | false |
jhartford/pybo | tests/test_acquisitions.py | 1 | 1134 | """
Unit tests for different acquisition functions. This mainly tests that the
gradients of each acquisition function are computed correctly.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
import numpy.testing as nt
import scipy.optimize as spop
# local imports
import pygp
import pybo.bayesopt.policies as policies
def check_acq_gradient(policy):
# randomly generate some data.
rng = np.random.RandomState(0)
X = rng.rand(10, 2)
y = rng.rand(10)
# create the model.
model = pygp.BasicGP(0.5, 1, [1, 1])
model.add_data(X, y)
# get the computed gradients.
index = policy(model)
xtest = rng.rand(20, 2)
_, grad = index(xtest, grad=True)
# numericall approximate the gradients
index_ = lambda x: index(x[None])
grad_ = np.array([spop.approx_fprime(x, index_, 1e-8) for x in xtest])
nt.assert_allclose(grad, grad_, rtol=1e-6, atol=1e-6)
def test_acqs():
for fname in policies.__all__:
yield check_acq_gradient, getattr(policies, fname)
| bsd-2-clause | -8,453,243,416,616,783,000 | 24.2 | 74 | 0.685185 | false |
egor-tensin/sorting_algorithms | algorithms/impl/heapsort.py | 2 | 1858 | # Copyright (c) 2015 Egor Tensin <[email protected]>
# This file is part of the "Sorting algorithms" project.
# For details, see https://github.com/egor-tensin/sorting-algorithms.
# Distributed under the MIT License.
import sys
from ..algorithm import SortingAlgorithm
# Disclaimer: implemented in the most literate way.
def heapsort(xs):
_heapify(xs)
first, last = 0, len(xs) - 1
for end in range(last, first, -1):
xs[end], xs[first] = xs[first], xs[end]
_siftdown(xs, first, end - 1)
return xs
# In a heap stored in a zero-based array,
# left_child = node * 2 + 1
# right_child = node * 2 + 2
# parent = (node - 1) // 2
def _get_parent(node):
return (node - 1) // 2
def _get_left_child(node):
return node * 2 + 1
def _get_right_child(node):
return node * 2 + 2
def _heapify(xs):
last = len(xs) - 1
first_parent, last_parent = 0, _get_parent(last)
for parent in range(last_parent, first_parent - 1, -1):
_siftdown(xs, parent, last)
def _siftdown(xs, start, end):
root = start
while True:
# We swap if there is at least one child
child = _get_left_child(root)
if child > end:
break
# If there are two children, select the minimum
right_child = _get_right_child(root)
if right_child <= end and xs[child] < xs[right_child]:
child = right_child
if xs[root] < xs[child]:
xs[root], xs[child] = xs[child], xs[root]
root = child
else:
break
_ALGORITHMS = [
SortingAlgorithm('heapsort', 'Heapsort', heapsort),
]
def _parse_args(args=None):
if args is None:
args = sys.argv[1:]
return list(map(int, args))
def main(args=None):
xs = _parse_args(args)
print(heapsort(list(xs)))
if __name__ == '__main__':
main()
| mit | -6,606,844,577,005,573,000 | 21.658537 | 69 | 0.593111 | false |
mlcommons/training | single_stage_detector/ssd/distributed.py | 1 | 3611 | import torch
from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
import torch.distributed as dist
from torch.nn.modules import Module
'''
This version of DistributedDataParallel is designed to be used in conjunction with the multiproc.py
launcher included with this example. It assumes that your run is using multiprocess with 1
GPU/process, that the model is on the correct device, and that torch.set_device has been
used to set the device.
Parameters are broadcasted to the other processes on initialization of DistributedDataParallel,
and will be allreduced at the finish of the backward pass.
'''
class DistributedDataParallel(Module):
def __init__(self, module):
super(DistributedDataParallel, self).__init__()
self.warn_on_half = True if dist._backend == dist.dist_backend.GLOO else False
self.module = module
for p in self.module.state_dict().values():
if not torch.is_tensor(p):
continue
if dist._backend == dist.dist_backend.NCCL:
assert p.is_cuda, "NCCL backend only supports model parameters to be on GPU."
dist.broadcast(p, 0)
def allreduce_params():
if(self.needs_reduction):
self.needs_reduction = False
buckets = {}
for param in self.module.parameters():
if param.requires_grad and param.grad is not None:
tp = param.data.type()
if tp not in buckets:
buckets[tp] = []
buckets[tp].append(param)
if self.warn_on_half:
if torch.cuda.HalfTensor in buckets:
print("WARNING: gloo dist backend for half parameters may be extremely slow." +
" It is recommended to use the NCCL backend in this case.")
self.warn_on_half = False
for tp in buckets:
bucket = buckets[tp]
grads = [param.grad.data for param in bucket]
coalesced = _flatten_dense_tensors(grads)
dist.all_reduce(coalesced)
coalesced /= dist.get_world_size()
for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)):
buf.copy_(synced)
for param in list(self.module.parameters()):
def allreduce_hook(*unused):
param._execution_engine.queue_callback(allreduce_params)
if param.requires_grad:
param.register_hook(allreduce_hook)
def forward(self, *inputs, **kwargs):
self.needs_reduction = True
return self.module(*inputs, **kwargs)
'''
def _sync_buffers(self):
buffers = list(self.module._all_buffers())
if len(buffers) > 0:
# cross-node buffer sync
flat_buffers = _flatten_dense_tensors(buffers)
dist.broadcast(flat_buffers, 0)
for buf, synced in zip(buffers, _unflatten_dense_tensors(flat_buffers, buffers)):
buf.copy_(synced)
def train(self, mode=True):
# Clear NCCL communicator and CUDA event cache of the default group ID,
# These cache will be recreated at the later call. This is currently a
# work-around for a potential NCCL deadlock.
if dist._backend == dist.dist_backend.NCCL:
dist._clear_group_cache()
super(DistributedDataParallel, self).train(mode)
self.module.train(mode)
'''
| apache-2.0 | 253,998,514,403,053,860 | 43.036585 | 103 | 0.593464 | false |
amir-qayyum-khan/edx-platform | lms/djangoapps/courseware/tests/test_lti_integration.py | 10 | 9292 | """LTI integration tests"""
from collections import OrderedDict
import json
import mock
from nose.plugins.attrib import attr
import oauthlib
import urllib
from django.conf import settings
from django.core.urlresolvers import reverse
from courseware.tests import BaseTestXmodule
from courseware.views.views import get_course_lti_endpoints
from openedx.core.lib.url_utils import quote_slashes
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.x_module import STUDENT_VIEW
@attr(shard=1)
class TestLTI(BaseTestXmodule):
"""
Integration test for lti xmodule.
It checks overall code, by assuring that context that goes to template is correct.
As part of that, checks oauth signature generation by mocking signing function
of `oauthlib` library.
"""
CATEGORY = "lti"
def setUp(self):
"""
Mock oauth1 signing of requests library for testing.
"""
super(TestLTI, self).setUp()
mocked_nonce = u'135685044251684026041377608307'
mocked_timestamp = u'1234567890'
mocked_signature_after_sign = u'my_signature%3D'
mocked_decoded_signature = u'my_signature='
# Note: this course_id is actually a course_key
context_id = self.item_descriptor.course_id.to_deprecated_string()
user_id = unicode(self.item_descriptor.xmodule_runtime.anonymous_student_id)
hostname = self.item_descriptor.xmodule_runtime.hostname
resource_link_id = unicode(urllib.quote('{}-{}'.format(hostname, self.item_descriptor.location.html_id())))
sourcedId = "{context}:{resource_link}:{user_id}".format(
context=urllib.quote(context_id),
resource_link=resource_link_id,
user_id=user_id
)
self.correct_headers = {
u'user_id': user_id,
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': u'Student',
u'context_id': context_id,
u'resource_link_id': resource_link_id,
u'lis_result_sourcedid': sourcedId,
u'oauth_nonce': mocked_nonce,
u'oauth_timestamp': mocked_timestamp,
u'oauth_consumer_key': u'',
u'oauth_signature_method': u'HMAC-SHA1',
u'oauth_version': u'1.0',
u'oauth_signature': mocked_decoded_signature
}
saved_sign = oauthlib.oauth1.Client.sign
self.expected_context = {
'display_name': self.item_descriptor.display_name,
'input_fields': self.correct_headers,
'element_class': self.item_descriptor.category,
'element_id': self.item_descriptor.location.html_id(),
'launch_url': 'http://www.example.com', # default value
'open_in_a_new_page': True,
'form_url': self.item_descriptor.xmodule_runtime.handler_url(self.item_descriptor,
'preview_handler').rstrip('/?'),
'hide_launch': False,
'has_score': False,
'module_score': None,
'comment': u'',
'weight': 1.0,
'ask_to_send_username': self.item_descriptor.ask_to_send_username,
'ask_to_send_email': self.item_descriptor.ask_to_send_email,
'description': self.item_descriptor.description,
'button_text': self.item_descriptor.button_text,
'accept_grades_past_due': self.item_descriptor.accept_grades_past_due,
}
def mocked_sign(self, *args, **kwargs):
"""
Mocked oauth1 sign function.
"""
# self is <oauthlib.oauth1.rfc5849.Client object> here:
__, headers, __ = saved_sign(self, *args, **kwargs)
# we should replace nonce, timestamp and signed_signature in headers:
old = headers[u'Authorization']
old_parsed = OrderedDict([param.strip().replace('"', '').split('=') for param in old.split(',')])
old_parsed[u'OAuth oauth_nonce'] = mocked_nonce
old_parsed[u'oauth_timestamp'] = mocked_timestamp
old_parsed[u'oauth_signature'] = mocked_signature_after_sign
headers[u'Authorization'] = ', '.join([k + '="' + v + '"' for k, v in old_parsed.items()])
return None, headers, None
patcher = mock.patch.object(oauthlib.oauth1.Client, "sign", mocked_sign)
patcher.start()
self.addCleanup(patcher.stop)
def test_lti_constructor(self):
generated_content = self.item_descriptor.render(STUDENT_VIEW).content
expected_content = self.runtime.render_template('lti.html', self.expected_context)
self.assertEqual(generated_content, expected_content)
def test_lti_preview_handler(self):
generated_content = self.item_descriptor.preview_handler(None, None).body
expected_content = self.runtime.render_template('lti_form.html', self.expected_context)
self.assertEqual(generated_content, expected_content)
@attr(shard=1)
class TestLTIModuleListing(SharedModuleStoreTestCase):
"""
a test for the rest endpoint that lists LTI modules in a course
"""
# arbitrary constant
COURSE_SLUG = "100"
COURSE_NAME = "test_course"
@classmethod
def setUpClass(cls):
super(TestLTIModuleListing, cls).setUpClass()
cls.course = CourseFactory.create(display_name=cls.COURSE_NAME, number=cls.COURSE_SLUG)
cls.chapter1 = ItemFactory.create(
parent_location=cls.course.location,
display_name="chapter1",
category='chapter')
cls.section1 = ItemFactory.create(
parent_location=cls.chapter1.location,
display_name="section1",
category='sequential')
cls.chapter2 = ItemFactory.create(
parent_location=cls.course.location,
display_name="chapter2",
category='chapter')
cls.section2 = ItemFactory.create(
parent_location=cls.chapter2.location,
display_name="section2",
category='sequential')
# creates one draft and one published lti module, in different sections
cls.lti_published = ItemFactory.create(
parent_location=cls.section1.location,
display_name="lti published",
category="lti",
location=cls.course.id.make_usage_key('lti', 'lti_published'),
)
cls.lti_draft = ItemFactory.create(
parent_location=cls.section2.location,
display_name="lti draft",
category="lti",
location=cls.course.id.make_usage_key('lti', 'lti_draft'),
publish_item=False,
)
def setUp(self):
"""Create course, 2 chapters, 2 sections"""
super(TestLTIModuleListing, self).setUp()
def expected_handler_url(self, handler):
"""convenience method to get the reversed handler urls"""
return "https://{}{}".format(settings.SITE_NAME, reverse(
'courseware.module_render.handle_xblock_callback_noauth',
args=[
self.course.id.to_deprecated_string(),
quote_slashes(unicode(self.lti_published.scope_ids.usage_id.to_deprecated_string()).encode('utf-8')),
handler
]
))
def test_lti_rest_bad_course(self):
"""Tests what happens when the lti listing rest endpoint gets a bad course_id"""
bad_ids = [u"sf", u"dne/dne/dne", u"fo/ey/\\u5305"]
for bad_course_id in bad_ids:
lti_rest_endpoints_url = 'courses/{}/lti_rest_endpoints/'.format(bad_course_id)
response = self.client.get(lti_rest_endpoints_url)
self.assertEqual(404, response.status_code)
def test_lti_rest_listing(self):
"""tests that the draft lti module is part of the endpoint response"""
request = mock.Mock()
request.method = 'GET'
response = get_course_lti_endpoints(request, course_id=self.course.id.to_deprecated_string())
self.assertEqual(200, response.status_code)
self.assertEqual('application/json', response['Content-Type'])
expected = {
"lti_1_1_result_service_xml_endpoint": self.expected_handler_url('grade_handler'),
"lti_2_0_result_service_json_endpoint":
self.expected_handler_url('lti_2_0_result_rest_handler') + "/user/{anon_user_id}",
"display_name": self.lti_published.display_name,
}
self.assertEqual([expected], json.loads(response.content))
def test_lti_rest_non_get(self):
"""tests that the endpoint returns 404 when hit with NON-get"""
DISALLOWED_METHODS = ("POST", "PUT", "DELETE", "HEAD", "OPTIONS") # pylint: disable=invalid-name
for method in DISALLOWED_METHODS:
request = mock.Mock()
request.method = method
response = get_course_lti_endpoints(request, self.course.id.to_deprecated_string())
self.assertEqual(405, response.status_code)
| agpl-3.0 | 6,483,814,333,690,496,000 | 41.429224 | 117 | 0.620426 | false |
Thhhza/XlsxWriter | xlsxwriter/test/worksheet/test_worksheet04.py | 8 | 2180 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ..helperfunctions import _xml_to_list
from ...worksheet import Worksheet
from ...format import Format
class TestAssembleWorksheet(unittest.TestCase):
"""
Test assembling a complete Worksheet file.
"""
def test_assemble_xml_file(self):
"""Test writing a worksheet with row formatting set."""
self.maxDiff = None
fh = StringIO()
worksheet = Worksheet()
worksheet._set_filehandle(fh)
cell_format = Format({'xf_index': 1})
worksheet.set_row(1, 30)
worksheet.set_row(3, None, None, {'hidden': 1})
worksheet.set_row(6, None, cell_format)
worksheet.set_row(9, 3)
worksheet.set_row(12, 24, None, {'hidden': 1})
worksheet.set_row(14, 0)
worksheet.select()
worksheet._assemble_xml_file()
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<dimension ref="A2:A15"/>
<sheetViews>
<sheetView tabSelected="1" workbookViewId="0"/>
</sheetViews>
<sheetFormatPr defaultRowHeight="15"/>
<sheetData>
<row r="2" ht="30" customHeight="1"/>
<row r="4" hidden="1"/>
<row r="7" s="1" customFormat="1"/>
<row r="10" ht="3" customHeight="1"/>
<row r="13" ht="24" hidden="1" customHeight="1"/>
<row r="15" hidden="1"/>
</sheetData>
<pageMargins left="0.7" right="0.7" top="0.75" bottom="0.75" header="0.3" footer="0.3"/>
</worksheet>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| bsd-2-clause | -3,940,445,809,904,033,000 | 34.737705 | 171 | 0.52156 | false |
bobobox/ansible | lib/ansible/module_utils/dellos6.py | 21 | 8111 |
#
# (c) 2015 Peter Sprygada, <[email protected]>
#
# Copyright (c) 2016 Dell Inc.
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
from ansible.module_utils.shell import CliBase
from ansible.module_utils.network import Command, register_transport, to_list
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine, ignore_line, DEFAULT_COMMENT_TOKENS
def get_config(module):
contents = module.params['config']
if not contents:
contents = module.config.get_config()
module.params['config'] = contents
return Dellos6NetworkConfig(indent=0, contents=contents[0])
else:
return Dellos6NetworkConfig(indent=0, contents=contents)
def get_sublevel_config(running_config, module):
contents = list()
current_config_contents = list()
sublevel_config = Dellos6NetworkConfig(indent=0)
obj = running_config.get_object(module.params['parents'])
if obj:
contents = obj.children
for c in contents:
if isinstance(c, ConfigLine):
current_config_contents.append(c.raw)
sublevel_config.add(current_config_contents, module.params['parents'])
return sublevel_config
def os6_parse(lines, indent=None, comment_tokens=None):
sublevel_cmds = [
re.compile(r'^vlan.*$'),
re.compile(r'^stack.*$'),
re.compile(r'^interface.*$'),
re.compile(r'datacenter-bridging.*$'),
re.compile(r'line (console|telnet|ssh).*$'),
re.compile(r'ip ssh !(server).*$'),
re.compile(r'ip (dhcp|vrf).*$'),
re.compile(r'(ip|mac|management|arp) access-list.*$'),
re.compile(r'ipv6 (dhcp|router).*$'),
re.compile(r'mail-server.*$'),
re.compile(r'vpc domain.*$'),
re.compile(r'router.*$'),
re.compile(r'route-map.*$'),
re.compile(r'policy-map.*$'),
re.compile(r'class-map match-all.*$'),
re.compile(r'captive-portal.*$'),
re.compile(r'admin-profile.*$'),
re.compile(r'link-dependency group.*$'),
re.compile(r'banner motd.*$'),
re.compile(r'openflow.*$'),
re.compile(r'support-assist.*$'),
re.compile(r'template.*$'),
re.compile(r'address-family.*$'),
re.compile(r'spanning-tree mst.*$'),
re.compile(r'logging.*$'),
re.compile(r'(radius-server|tacacs-server) host.*$')]
childline = re.compile(r'^exit$')
config = list()
parent = list()
children = []
parent_match = False
for line in str(lines).split('\n'):
text = str(re.sub(r'([{};])', '', line)).strip()
cfg = ConfigLine(text)
cfg.raw = line
if not text or ignore_line(text, comment_tokens):
parent = list()
children = []
continue
else:
parent_match=False
# handle sublevel parent
for pr in sublevel_cmds:
if pr.match(line):
if len(parent) != 0:
cfg.parents.extend(parent)
parent.append(cfg)
config.append(cfg)
if children:
children.insert(len(parent) - 1,[])
children[len(parent) - 2].append(cfg)
parent_match=True
continue
# handle exit
if childline.match(line):
if children:
parent[len(children) - 1].children.extend(children[len(children) - 1])
if len(children)>1:
parent[len(children) - 2].children.extend(parent[len(children) - 1].children)
cfg.parents.extend(parent)
children.pop()
parent.pop()
if not children:
children = list()
if parent:
cfg.parents.extend(parent)
parent = list()
config.append(cfg)
# handle sublevel children
elif parent_match is False and len(parent)>0 :
if not children:
cfglist=[cfg]
children.append(cfglist)
else:
children[len(parent) - 1].append(cfg)
cfg.parents.extend(parent)
config.append(cfg)
# handle global commands
elif not parent:
config.append(cfg)
return config
class Dellos6NetworkConfig(NetworkConfig):
def load(self, contents):
self._config = os6_parse(contents, self.indent, DEFAULT_COMMENT_TOKENS)
def diff_line(self, other, path=None):
diff = list()
for item in self.items:
if str(item) == "exit":
for diff_item in diff:
if item.parents == diff_item.parents:
diff.append(item)
break
elif item not in other:
diff.append(item)
return diff
class Cli(CliBase):
NET_PASSWD_RE = re.compile(r"[\r\n]?password:\s?$", re.I)
CLI_PROMPTS_RE = [
re.compile(r"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(r"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$")
]
CLI_ERRORS_RE = [
re.compile(r"% ?Error"),
re.compile(r"% ?Bad secret"),
re.compile(r"invalid input", re.I),
re.compile(r"(?:incomplete|ambiguous) command", re.I),
re.compile(r"connection timed out", re.I),
re.compile(r"[^\r\n]+ not found", re.I),
re.compile(r"'[^']' +returned error code: ?\d+")]
def connect(self, params, **kwargs):
super(Cli, self).connect(params, kickstart=False, **kwargs)
def authorize(self, params, **kwargs):
passwd = params['auth_pass']
self.run_commands(
Command('enable', prompt=self.NET_PASSWD_RE, response=passwd)
)
self.run_commands('terminal length 0')
def configure(self, commands, **kwargs):
cmds = ['configure terminal']
cmds.extend(to_list(commands))
cmds.append('end')
responses = self.execute(cmds)
responses.pop(0)
return responses
def get_config(self, **kwargs):
return self.execute(['show running-config'])
def load_config(self, commands, **kwargs):
return self.configure(commands)
def save_config(self):
self.execute(['copy running-config startup-config'])
Cli = register_transport('cli', default=True)(Cli)
| gpl-3.0 | -8,090,575,197,356,559,000 | 35.868182 | 102 | 0.58772 | false |
stxnext-csr/volontulo | apps/volontulo/models.py | 1 | 10920 | # -*- coding: utf-8 -*-
u"""
.. module:: models
"""
import logging
import os
# pylint: disable=unused-import
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models import F
from django.utils import timezone
logger = logging.getLogger('volontulo.models')
class Organization(models.Model):
u"""Model that handles ogranizations/institutions."""
name = models.CharField(max_length=150)
address = models.CharField(max_length=150)
description = models.TextField()
def __str__(self):
u"""Organization model string reprezentation."""
return self.name
class OffersManager(models.Manager):
u"""Offers Manager."""
def get_active(self):
u"""Return active offers."""
return self.filter(
offer_status='published',
action_status__in=('ongoing', 'future'),
recruitment_status__in=('open', 'supplemental'),
).all()
def get_for_administrator(self):
u"""Return all offers for administrator to allow management."""
return self.filter(offer_status='unpublished').all()
def get_weightened(self, count=10):
u"""Return all published offers ordered by weight.
:param count: Integer
:return:
"""
return self.filter(
offer_status='published').order_by('weight')[:count]
def get_archived(self):
u"""Return archived offers."""
return self.filter(
offer_status='published',
action_status__in=('ongoing', 'finished'),
recruitment_status='closed',
).all()
class Offer(models.Model):
u"""Offer model."""
OFFER_STATUSES = (
('unpublished', u'Unpublished'),
('published', u'Published'),
('rejected', u'Rejected'),
)
RECRUITMENT_STATUSES = (
('open', u'Open'),
('supplemental', u'Supplemental'),
('closed', u'Closed'),
)
ACTION_STATUSES = (
('future', u'Future'),
('ongoing', u'Ongoing'),
('finished', u'Finished'),
)
objects = OffersManager()
organization = models.ForeignKey(Organization)
volunteers = models.ManyToManyField(User)
description = models.TextField()
requirements = models.TextField(blank=True, default='')
time_commitment = models.TextField()
benefits = models.TextField()
location = models.CharField(max_length=150)
title = models.CharField(max_length=150)
started_at = models.DateTimeField(blank=True, null=True)
finished_at = models.DateTimeField(blank=True, null=True)
time_period = models.CharField(max_length=150, default='', blank=True)
status_old = models.CharField(
max_length=30,
default='NEW',
null=True,
unique=False
)
offer_status = models.CharField(
max_length=16,
choices=OFFER_STATUSES,
default='unpublished',
)
recruitment_status = models.CharField(
max_length=16,
choices=RECRUITMENT_STATUSES,
default='open',
)
action_status = models.CharField(
max_length=16,
choices=ACTION_STATUSES,
default='ongoing',
)
votes = models.BooleanField(default=0)
recruitment_start_date = models.DateTimeField(blank=True, null=True)
recruitment_end_date = models.DateTimeField(blank=True, null=True)
reserve_recruitment = models.BooleanField(blank=True, default=True)
reserve_recruitment_start_date = models.DateTimeField(
blank=True,
null=True
)
reserve_recruitment_end_date = models.DateTimeField(
blank=True,
null=True
)
action_ongoing = models.BooleanField(default=False, blank=True)
constant_coop = models.BooleanField(default=False, blank=True)
action_start_date = models.DateTimeField(blank=True, null=True)
action_end_date = models.DateTimeField(blank=True, null=True)
volunteers_limit = models.IntegerField(default=0, null=True, blank=True)
weight = models.IntegerField(default=0, null=True, blank=True)
def __str__(self):
u"""Offer string representation."""
return self.title
def set_main_image(self, is_main):
u"""Set main image flag unsetting other offers images.
:param is_main: Boolean flag resetting offer main image
"""
if is_main:
OfferImage.objects.filter(offer=self).update(is_main=False)
return True
return False
def save_offer_image(self, gallery, userprofile, is_main=False):
u"""Handle image upload for user profile page.
:param gallery: UserProfile model instance
:param userprofile: UserProfile model instance
:param is_main: Boolean main image flag
"""
gallery.offer = self
gallery.userprofile = userprofile
gallery.is_main = self.set_main_image(is_main)
gallery.save()
return self
def create_new(self):
u"""Set status while creating new offer."""
self.offer_status = 'unpublished'
self.recruitment_status = 'open'
if self.started_at or self.finished_at:
self.action_status = self.determine_action_status()
def determine_action_status(self):
u"""Determine action status by offer dates."""
if (
(
self.finished_at and
self.started_at < timezone.now() < self.finished_at
) or
(
self.started_at < timezone.now() and
not self.finished_at
)
):
return 'ongoing'
elif self.started_at > timezone.now():
return 'future'
else:
return 'finished'
def change_status(self, status):
u"""Change offer status.
:param status: string Offer status
"""
if status in ('published', 'rejected', 'unpublished'):
self.offer_status = status
self.save()
return self
def unpublish(self):
u"""Unpublish offer."""
self.offer_status = 'unpublished'
self.save()
return self
def publish(self):
u"""Publish offer."""
self.offer_status = 'published'
Offer.objects.all().update(weight=F('weight') + 1)
self.weight = 0
self.save()
return self
def reject(self):
u"""Reject offer."""
self.offer_status = 'rejected'
self.save()
return self
def close_offer(self):
u"""Change offer status to close."""
self.offer_status = 'unpublished'
self.action_status = 'finished'
self.recruitment_status = 'closed'
self.save()
return self
class UserProfile(models.Model):
u"""Model that handles users' profiles."""
user = models.OneToOneField(User)
organizations = models.ManyToManyField(
Organization,
related_name='userprofiles',
)
is_administrator = models.BooleanField(default=False, blank=True)
phone_no = models.CharField(
max_length=32,
blank=True,
default='',
null=True
)
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
def is_admin(self):
u"""Return True if current user is administrator, else return False"""
return self.is_administrator
def is_volunteer(self):
u"""Return True if current user is volunteer, else return False"""
return not (self.is_administrator and self.organizations)
def can_edit_offer(self, offer=None, offer_id=None):
u"""Checks if the user can edit an offer based on its ID"""
if offer is None:
offer = Offer.objects.get(id=offer_id)
return self.is_administrator or self.organizations.filter(
id=offer.organization_id).exists()
def get_avatar(self):
u"""Return avatar for current user."""
return UserGallery.objects.filter(
userprofile=self,
is_avatar=True
)
def clean_images(self):
u"""Clean user images."""
images = UserGallery.objects.filter(userprofile=self)
for image in images:
try:
os.remove(os.path.join(settings.MEDIA_ROOT, str(image.image)))
except OSError as ex:
logger.error(ex)
image.delete()
def __str__(self):
return self.user.email
class UserGallery(models.Model):
u"""Handling user images."""
userprofile = models.ForeignKey(UserProfile, related_name='images')
image = models.ImageField(upload_to='profile/')
is_avatar = models.BooleanField(default=False)
def __str__(self):
u"""String representation of an image."""
return str(self.image)
class OfferImage(models.Model):
u"""Handling offer image."""
userprofile = models.ForeignKey(UserProfile, related_name='offerimages')
offer = models.ForeignKey(Offer, related_name='images')
path = models.ImageField(upload_to='offers/')
is_main = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
u"""String representation of an image."""
return str(self.path)
class OrganizationGallery(models.Model):
u"""Handling organizations gallery."""
organization = models.ForeignKey(Organization, related_name='images')
published_by = models.ForeignKey(UserProfile, related_name='gallery')
path = models.ImageField(upload_to='gallery/')
is_main = models.BooleanField(default=False, blank=True)
def __str__(self):
u"""String representation of an image."""
return str(self.path)
def remove(self):
u"""Remove image."""
self.remove()
def set_as_main(self, organization):
u"""Save image as main.
:param organization: Organization model instance
"""
OrganizationGallery.objects.filter(organization_id=organization.id)\
.update(
is_main=False
)
self.is_main = True
self.save()
@staticmethod
def get_organizations_galleries(userprofile):
u"""Get images grouped by organizations
:param userprofile: UserProfile model instance
"""
organizations = Organization.objects.filter(
userprofiles=userprofile
).all()
return {o.name: o.images.all() for o in organizations}
class Page(models.Model):
"""Static page model."""
title = models.CharField(max_length=255)
content = models.TextField()
author = models.ForeignKey(UserProfile)
published = models.BooleanField(default=False)
modified_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
| mit | 5,361,875,853,644,664,000 | 29.674157 | 78 | 0.616117 | false |
WendellDuncan/or-tools | examples/data/nonogram_regular/nonogram_p200.py | 74 | 1810 | # Copyright 2010 Hakan Kjellerstrand [email protected]
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Nonogram problem from Gecode: P200
# http://www.gecode.org/gecode-doc-latest/classNonogram.html
#
rows = 25
row_rule_len = 7
row_rules = [
[0,0,0,0,2,2,3],
[0,0,4,1,1,1,4],
[0,0,4,1,2,1,1],
[4,1,1,1,1,1,1],
[0,2,1,1,2,3,5],
[0,1,1,1,1,2,1],
[0,0,3,1,5,1,2],
[0,3,2,2,1,2,2],
[2,1,4,1,1,1,1],
[0,2,2,1,2,1,2],
[0,1,1,1,3,2,3],
[0,0,1,1,2,7,3],
[0,0,1,2,2,1,5],
[0,0,3,2,2,1,2],
[0,0,0,3,2,1,2],
[0,0,0,0,5,1,2],
[0,0,0,2,2,1,2],
[0,0,0,4,2,1,2],
[0,0,0,6,2,3,2],
[0,0,0,7,4,3,2],
[0,0,0,0,7,4,4],
[0,0,0,0,7,1,4],
[0,0,0,0,6,1,4],
[0,0,0,0,4,2,2],
[0,0,0,0,0,2,1]
]
cols = 25
col_rule_len = 6
col_rules = [
[0,0,1,1,2,2],
[0,0,0,5,5,7],
[0,0,5,2,2,9],
[0,0,3,2,3,9],
[0,1,1,3,2,7],
[0,0,0,3,1,5],
[0,7,1,1,1,3],
[1,2,1,1,2,1],
[0,0,0,4,2,4],
[0,0,1,2,2,2],
[0,0,0,4,6,2],
[0,0,1,2,2,1],
[0,0,3,3,2,1],
[0,0,0,4,1,15],
[1,1,1,3,1,1],
[2,1,1,2,2,3],
[0,0,1,4,4,1],
[0,0,1,4,3,2],
[0,0,1,1,2,2],
[0,7,2,3,1,1],
[0,2,1,1,1,5],
[0,0,0,1,2,5],
[0,0,1,1,1,3],
[0,0,0,4,2,1],
[0,0,0,0,0,3]
]
| apache-2.0 | -6,396,195,999,448,267,000 | 22.506494 | 75 | 0.507182 | false |
pennersr/django-allauth | allauth/socialaccount/providers/draugiem/tests.py | 2 | 4796 | from hashlib import md5
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.urls import reverse
from django.utils.http import urlencode
from allauth.socialaccount import providers
from allauth.socialaccount.models import SocialApp, SocialToken
from allauth.tests import Mock, TestCase, patch
from . import views
from .provider import DraugiemProvider
class DraugiemTests(TestCase):
def setUp(self):
# workaround to create a session. see:
# https://code.djangoproject.com/ticket/11475
User.objects.create_user(
"anakin", "[email protected]", "s1thrul3s"
)
self.client.login(username="anakin", password="s1thrul3s")
self.provider = providers.registry.by_id(DraugiemProvider.id)
app = SocialApp.objects.create(
provider=self.provider.id,
name=self.provider.id,
client_id="app123id",
key=self.provider.id,
secret="dummy",
)
app.sites.add(Site.objects.get_current())
self.app = app
def get_draugiem_login_response(self):
"""
Sample Draugiem.lv response
"""
return {
"apikey": "12345",
"uid": "42",
"users": {
"42": {
"age": "266",
"imgl": "http://cdn.memegenerator.net/instances/500x/23395689.jpg",
"surname": "Skywalker",
"url": "/user/42/",
"imgi": "http://cdn.memegenerator.net/instances/500x/23395689.jpg",
"nick": "Sky Guy",
"created": "09.11.1812 11:26:15",
"deleted": "false",
"imgm": "http://cdn.memegenerator.net/instances/500x/23395689.jpg",
"sex": "M",
"type": "User_Default",
"uid": "42",
"place": "London",
"emailHash": "3f198f21434gfd2f2b4rs05939shk93f3815bc6aa",
"name": "Anakin",
"adult": "1",
"birthday": "1750-09-13",
"img": "http://cdn.memegenerator.net/instances/500x/23395689.jpg",
}
},
}
def get_socialaccount(self, response, token):
"""
Returns SocialLogin based on the data from the request
"""
request = Mock()
login = self.provider.sociallogin_from_response(request, response)
login.token = token
return login
def mock_socialaccount_state(self):
"""
SocialLogin depends on Session state - a tuple of request
params and a random string
"""
session = self.client.session
session["socialaccount_state"] = (
{"process": "login", "scope": "", "auth_params": ""},
"12345",
)
session.save()
def test_login_redirect(self):
response = self.client.get(reverse(views.login))
redirect_url = reverse(views.callback)
full_redirect_url = "http://testserver" + redirect_url
secret = self.app.secret + full_redirect_url
redirect_url_hash = md5(secret.encode("utf-8")).hexdigest()
params = {
"app": self.app.client_id,
"hash": redirect_url_hash,
"redirect": full_redirect_url,
}
self.assertRedirects(
response,
"%s?%s" % (views.AUTHORIZE_URL, urlencode(params)),
fetch_redirect_response=False,
)
def test_callback_no_auth_status(self):
response = self.client.get(reverse(views.callback))
self.assertTemplateUsed(response, "socialaccount/authentication_error.html")
def test_callback_invalid_auth_status(self):
response = self.client.get(reverse(views.callback), {"dr_auth_status": "fail"})
self.assertTemplateUsed(response, "socialaccount/authentication_error.html")
def test_callback(self):
with patch(
"allauth.socialaccount.providers.draugiem.views" ".draugiem_complete_login"
) as draugiem_complete_login:
self.mock_socialaccount_state()
response_json = self.get_draugiem_login_response()
token = SocialToken(app=self.app, token=response_json["apikey"])
login = self.get_socialaccount(response_json, token)
draugiem_complete_login.return_value = login
response = self.client.get(
reverse(views.callback),
{"dr_auth_status": "ok", "dr_auth_code": "42"},
)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
| mit | 2,807,772,772,233,389,600 | 35.333333 | 87 | 0.562761 | false |
40223117cda/w16cdaa | static/Brython3.1.0-20150301-090019/Lib/multiprocessing/pool.py | 694 | 23263 | #
# Module providing the `Pool` class for managing a process pool
#
# multiprocessing/pool.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
__all__ = ['Pool']
#
# Imports
#
import threading
import queue
import itertools
import collections
import time
from multiprocessing import Process, cpu_count, TimeoutError
from multiprocessing.util import Finalize, debug
#
# Constants representing the state of a pool
#
RUN = 0
CLOSE = 1
TERMINATE = 2
#
# Miscellaneous
#
job_counter = itertools.count()
def mapstar(args):
return list(map(*args))
def starmapstar(args):
return list(itertools.starmap(args[0], args[1]))
#
# Code run by worker processes
#
class MaybeEncodingError(Exception):
"""Wraps possible unpickleable errors, so they can be
safely sent through the socket."""
def __init__(self, exc, value):
self.exc = repr(exc)
self.value = repr(value)
super(MaybeEncodingError, self).__init__(self.exc, self.value)
def __str__(self):
return "Error sending result: '%s'. Reason: '%s'" % (self.value,
self.exc)
def __repr__(self):
return "<MaybeEncodingError: %s>" % str(self)
def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
put = outqueue.put
get = inqueue.get
if hasattr(inqueue, '_writer'):
inqueue._writer.close()
outqueue._reader.close()
if initializer is not None:
initializer(*initargs)
completed = 0
while maxtasks is None or (maxtasks and completed < maxtasks):
try:
task = get()
except (EOFError, IOError):
debug('worker got EOFError or IOError -- exiting')
break
if task is None:
debug('worker got sentinel -- exiting')
break
job, i, func, args, kwds = task
try:
result = (True, func(*args, **kwds))
except Exception as e:
result = (False, e)
try:
put((job, i, result))
except Exception as e:
wrapped = MaybeEncodingError(e, result[1])
debug("Possible encoding error while sending result: %s" % (
wrapped))
put((job, i, (False, wrapped)))
completed += 1
debug('worker exiting after %d tasks' % completed)
#
# Class representing a process pool
#
class Pool(object):
'''
Class which supports an async version of applying functions to arguments.
'''
Process = Process
def __init__(self, processes=None, initializer=None, initargs=(),
maxtasksperchild=None):
self._setup_queues()
self._taskqueue = queue.Queue()
self._cache = {}
self._state = RUN
self._maxtasksperchild = maxtasksperchild
self._initializer = initializer
self._initargs = initargs
if processes is None:
try:
processes = cpu_count()
except NotImplementedError:
processes = 1
if processes < 1:
raise ValueError("Number of processes must be at least 1")
if initializer is not None and not callable(initializer):
raise TypeError('initializer must be a callable')
self._processes = processes
self._pool = []
self._repopulate_pool()
self._worker_handler = threading.Thread(
target=Pool._handle_workers,
args=(self, )
)
self._worker_handler.daemon = True
self._worker_handler._state = RUN
self._worker_handler.start()
self._task_handler = threading.Thread(
target=Pool._handle_tasks,
args=(self._taskqueue, self._quick_put, self._outqueue, self._pool)
)
self._task_handler.daemon = True
self._task_handler._state = RUN
self._task_handler.start()
self._result_handler = threading.Thread(
target=Pool._handle_results,
args=(self._outqueue, self._quick_get, self._cache)
)
self._result_handler.daemon = True
self._result_handler._state = RUN
self._result_handler.start()
self._terminate = Finalize(
self, self._terminate_pool,
args=(self._taskqueue, self._inqueue, self._outqueue, self._pool,
self._worker_handler, self._task_handler,
self._result_handler, self._cache),
exitpriority=15
)
def _join_exited_workers(self):
"""Cleanup after any worker processes which have exited due to reaching
their specified lifetime. Returns True if any workers were cleaned up.
"""
cleaned = False
for i in reversed(range(len(self._pool))):
worker = self._pool[i]
if worker.exitcode is not None:
# worker exited
debug('cleaning up worker %d' % i)
worker.join()
cleaned = True
del self._pool[i]
return cleaned
def _repopulate_pool(self):
"""Bring the number of pool processes up to the specified number,
for use after reaping workers which have exited.
"""
for i in range(self._processes - len(self._pool)):
w = self.Process(target=worker,
args=(self._inqueue, self._outqueue,
self._initializer,
self._initargs, self._maxtasksperchild)
)
self._pool.append(w)
w.name = w.name.replace('Process', 'PoolWorker')
w.daemon = True
w.start()
debug('added worker')
def _maintain_pool(self):
"""Clean up any exited workers and start replacements for them.
"""
if self._join_exited_workers():
self._repopulate_pool()
def _setup_queues(self):
from .queues import SimpleQueue
self._inqueue = SimpleQueue()
self._outqueue = SimpleQueue()
self._quick_put = self._inqueue._writer.send
self._quick_get = self._outqueue._reader.recv
def apply(self, func, args=(), kwds={}):
'''
Equivalent of `func(*args, **kwds)`.
'''
assert self._state == RUN
return self.apply_async(func, args, kwds).get()
def map(self, func, iterable, chunksize=None):
'''
Apply `func` to each element in `iterable`, collecting the results
in a list that is returned.
'''
return self._map_async(func, iterable, mapstar, chunksize).get()
def starmap(self, func, iterable, chunksize=None):
'''
Like `map()` method but the elements of the `iterable` are expected to
be iterables as well and will be unpacked as arguments. Hence
`func` and (a, b) becomes func(a, b).
'''
return self._map_async(func, iterable, starmapstar, chunksize).get()
def starmap_async(self, func, iterable, chunksize=None, callback=None,
error_callback=None):
'''
Asynchronous version of `starmap()` method.
'''
return self._map_async(func, iterable, starmapstar, chunksize,
callback, error_callback)
def imap(self, func, iterable, chunksize=1):
'''
Equivalent of `map()` -- can be MUCH slower than `Pool.map()`.
'''
if self._state != RUN:
raise ValueError("Pool not running")
if chunksize == 1:
result = IMapIterator(self._cache)
self._taskqueue.put((((result._job, i, func, (x,), {})
for i, x in enumerate(iterable)), result._set_length))
return result
else:
assert chunksize > 1
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = IMapIterator(self._cache)
self._taskqueue.put((((result._job, i, mapstar, (x,), {})
for i, x in enumerate(task_batches)), result._set_length))
return (item for chunk in result for item in chunk)
def imap_unordered(self, func, iterable, chunksize=1):
'''
Like `imap()` method but ordering of results is arbitrary.
'''
if self._state != RUN:
raise ValueError("Pool not running")
if chunksize == 1:
result = IMapUnorderedIterator(self._cache)
self._taskqueue.put((((result._job, i, func, (x,), {})
for i, x in enumerate(iterable)), result._set_length))
return result
else:
assert chunksize > 1
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = IMapUnorderedIterator(self._cache)
self._taskqueue.put((((result._job, i, mapstar, (x,), {})
for i, x in enumerate(task_batches)), result._set_length))
return (item for chunk in result for item in chunk)
def apply_async(self, func, args=(), kwds={}, callback=None,
error_callback=None):
'''
Asynchronous version of `apply()` method.
'''
if self._state != RUN:
raise ValueError("Pool not running")
result = ApplyResult(self._cache, callback, error_callback)
self._taskqueue.put(([(result._job, None, func, args, kwds)], None))
return result
def map_async(self, func, iterable, chunksize=None, callback=None,
error_callback=None):
'''
Asynchronous version of `map()` method.
'''
return self._map_async(func, iterable, mapstar, chunksize, callback,
error_callback)
def _map_async(self, func, iterable, mapper, chunksize=None, callback=None,
error_callback=None):
'''
Helper function to implement map, starmap and their async counterparts.
'''
if self._state != RUN:
raise ValueError("Pool not running")
if not hasattr(iterable, '__len__'):
iterable = list(iterable)
if chunksize is None:
chunksize, extra = divmod(len(iterable), len(self._pool) * 4)
if extra:
chunksize += 1
if len(iterable) == 0:
chunksize = 0
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = MapResult(self._cache, chunksize, len(iterable), callback,
error_callback=error_callback)
self._taskqueue.put((((result._job, i, mapper, (x,), {})
for i, x in enumerate(task_batches)), None))
return result
@staticmethod
def _handle_workers(pool):
thread = threading.current_thread()
# Keep maintaining workers until the cache gets drained, unless the pool
# is terminated.
while thread._state == RUN or (pool._cache and thread._state != TERMINATE):
pool._maintain_pool()
time.sleep(0.1)
# send sentinel to stop workers
pool._taskqueue.put(None)
debug('worker handler exiting')
@staticmethod
def _handle_tasks(taskqueue, put, outqueue, pool):
thread = threading.current_thread()
for taskseq, set_length in iter(taskqueue.get, None):
i = -1
for i, task in enumerate(taskseq):
if thread._state:
debug('task handler found thread._state != RUN')
break
try:
put(task)
except IOError:
debug('could not put task on queue')
break
else:
if set_length:
debug('doing set_length()')
set_length(i+1)
continue
break
else:
debug('task handler got sentinel')
try:
# tell result handler to finish when cache is empty
debug('task handler sending sentinel to result handler')
outqueue.put(None)
# tell workers there is no more work
debug('task handler sending sentinel to workers')
for p in pool:
put(None)
except IOError:
debug('task handler got IOError when sending sentinels')
debug('task handler exiting')
@staticmethod
def _handle_results(outqueue, get, cache):
thread = threading.current_thread()
while 1:
try:
task = get()
except (IOError, EOFError):
debug('result handler got EOFError/IOError -- exiting')
return
if thread._state:
assert thread._state == TERMINATE
debug('result handler found thread._state=TERMINATE')
break
if task is None:
debug('result handler got sentinel')
break
job, i, obj = task
try:
cache[job]._set(i, obj)
except KeyError:
pass
while cache and thread._state != TERMINATE:
try:
task = get()
except (IOError, EOFError):
debug('result handler got EOFError/IOError -- exiting')
return
if task is None:
debug('result handler ignoring extra sentinel')
continue
job, i, obj = task
try:
cache[job]._set(i, obj)
except KeyError:
pass
if hasattr(outqueue, '_reader'):
debug('ensuring that outqueue is not full')
# If we don't make room available in outqueue then
# attempts to add the sentinel (None) to outqueue may
# block. There is guaranteed to be no more than 2 sentinels.
try:
for i in range(10):
if not outqueue._reader.poll():
break
get()
except (IOError, EOFError):
pass
debug('result handler exiting: len(cache)=%s, thread._state=%s',
len(cache), thread._state)
@staticmethod
def _get_tasks(func, it, size):
it = iter(it)
while 1:
x = tuple(itertools.islice(it, size))
if not x:
return
yield (func, x)
def __reduce__(self):
raise NotImplementedError(
'pool objects cannot be passed between processes or pickled'
)
def close(self):
debug('closing pool')
if self._state == RUN:
self._state = CLOSE
self._worker_handler._state = CLOSE
def terminate(self):
debug('terminating pool')
self._state = TERMINATE
self._worker_handler._state = TERMINATE
self._terminate()
def join(self):
debug('joining pool')
assert self._state in (CLOSE, TERMINATE)
self._worker_handler.join()
self._task_handler.join()
self._result_handler.join()
for p in self._pool:
p.join()
@staticmethod
def _help_stuff_finish(inqueue, task_handler, size):
# task_handler may be blocked trying to put items on inqueue
debug('removing tasks from inqueue until task handler finished')
inqueue._rlock.acquire()
while task_handler.is_alive() and inqueue._reader.poll():
inqueue._reader.recv()
time.sleep(0)
@classmethod
def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool,
worker_handler, task_handler, result_handler, cache):
# this is guaranteed to only be called once
debug('finalizing pool')
worker_handler._state = TERMINATE
task_handler._state = TERMINATE
debug('helping task handler/workers to finish')
cls._help_stuff_finish(inqueue, task_handler, len(pool))
assert result_handler.is_alive() or len(cache) == 0
result_handler._state = TERMINATE
outqueue.put(None) # sentinel
# We must wait for the worker handler to exit before terminating
# workers because we don't want workers to be restarted behind our back.
debug('joining worker handler')
if threading.current_thread() is not worker_handler:
worker_handler.join()
# Terminate workers which haven't already finished.
if pool and hasattr(pool[0], 'terminate'):
debug('terminating workers')
for p in pool:
if p.exitcode is None:
p.terminate()
debug('joining task handler')
if threading.current_thread() is not task_handler:
task_handler.join()
debug('joining result handler')
if threading.current_thread() is not result_handler:
result_handler.join()
if pool and hasattr(pool[0], 'terminate'):
debug('joining pool workers')
for p in pool:
if p.is_alive():
# worker has not yet exited
debug('cleaning up worker %d' % p.pid)
p.join()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.terminate()
#
# Class whose instances are returned by `Pool.apply_async()`
#
class ApplyResult(object):
def __init__(self, cache, callback, error_callback):
self._event = threading.Event()
self._job = next(job_counter)
self._cache = cache
self._callback = callback
self._error_callback = error_callback
cache[self._job] = self
def ready(self):
return self._event.is_set()
def successful(self):
assert self.ready()
return self._success
def wait(self, timeout=None):
self._event.wait(timeout)
def get(self, timeout=None):
self.wait(timeout)
if not self.ready():
raise TimeoutError
if self._success:
return self._value
else:
raise self._value
def _set(self, i, obj):
self._success, self._value = obj
if self._callback and self._success:
self._callback(self._value)
if self._error_callback and not self._success:
self._error_callback(self._value)
self._event.set()
del self._cache[self._job]
AsyncResult = ApplyResult # create alias -- see #17805
#
# Class whose instances are returned by `Pool.map_async()`
#
class MapResult(ApplyResult):
def __init__(self, cache, chunksize, length, callback, error_callback):
ApplyResult.__init__(self, cache, callback,
error_callback=error_callback)
self._success = True
self._value = [None] * length
self._chunksize = chunksize
if chunksize <= 0:
self._number_left = 0
self._event.set()
del cache[self._job]
else:
self._number_left = length//chunksize + bool(length % chunksize)
def _set(self, i, success_result):
success, result = success_result
if success:
self._value[i*self._chunksize:(i+1)*self._chunksize] = result
self._number_left -= 1
if self._number_left == 0:
if self._callback:
self._callback(self._value)
del self._cache[self._job]
self._event.set()
else:
self._success = False
self._value = result
if self._error_callback:
self._error_callback(self._value)
del self._cache[self._job]
self._event.set()
#
# Class whose instances are returned by `Pool.imap()`
#
class IMapIterator(object):
def __init__(self, cache):
self._cond = threading.Condition(threading.Lock())
self._job = next(job_counter)
self._cache = cache
self._items = collections.deque()
self._index = 0
self._length = None
self._unsorted = {}
cache[self._job] = self
def __iter__(self):
return self
def next(self, timeout=None):
self._cond.acquire()
try:
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
raise StopIteration
self._cond.wait(timeout)
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
raise StopIteration
raise TimeoutError
finally:
self._cond.release()
success, value = item
if success:
return value
raise value
__next__ = next # XXX
def _set(self, i, obj):
self._cond.acquire()
try:
if self._index == i:
self._items.append(obj)
self._index += 1
while self._index in self._unsorted:
obj = self._unsorted.pop(self._index)
self._items.append(obj)
self._index += 1
self._cond.notify()
else:
self._unsorted[i] = obj
if self._index == self._length:
del self._cache[self._job]
finally:
self._cond.release()
def _set_length(self, length):
self._cond.acquire()
try:
self._length = length
if self._index == self._length:
self._cond.notify()
del self._cache[self._job]
finally:
self._cond.release()
#
# Class whose instances are returned by `Pool.imap_unordered()`
#
class IMapUnorderedIterator(IMapIterator):
def _set(self, i, obj):
self._cond.acquire()
try:
self._items.append(obj)
self._index += 1
self._cond.notify()
if self._index == self._length:
del self._cache[self._job]
finally:
self._cond.release()
#
#
#
class ThreadPool(Pool):
from .dummy import Process
def __init__(self, processes=None, initializer=None, initargs=()):
Pool.__init__(self, processes, initializer, initargs)
def _setup_queues(self):
self._inqueue = queue.Queue()
self._outqueue = queue.Queue()
self._quick_put = self._inqueue.put
self._quick_get = self._outqueue.get
@staticmethod
def _help_stuff_finish(inqueue, task_handler, size):
# put sentinels at head of inqueue to make workers finish
inqueue.not_empty.acquire()
try:
inqueue.queue.clear()
inqueue.queue.extend([None] * size)
inqueue.not_empty.notify_all()
finally:
inqueue.not_empty.release()
| gpl-3.0 | -7,496,801,489,265,051,000 | 30.867123 | 83 | 0.543438 | false |
peterbarker/MAVProxy | MAVProxy/modules/mavproxy_antenna.py | 16 | 2064 | #!/usr/bin/env python
'''
antenna pointing module
Andrew Tridgell
June 2012
'''
import sys, os, time
from cuav.lib import cuav_util
from MAVProxy.modules.lib import mp_module
class AntennaModule(mp_module.MPModule):
def __init__(self, mpstate):
super(AntennaModule, self).__init__(mpstate, "antenna", "antenna pointing module")
self.gcs_location = None
self.last_bearing = 0
self.last_announce = 0
self.add_command('antenna', self.cmd_antenna, "antenna link control")
def cmd_antenna(self, args):
'''set gcs location'''
if len(args) != 2:
if self.gcs_location is None:
print("GCS location not set")
else:
print("GCS location %s" % str(self.gcs_location))
return
self.gcs_location = (float(args[0]), float(args[1]))
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
if self.gcs_location is None and self.module('wp').wploader.count() > 0:
home = self.module('wp').wploader.wp(0)
self.gcs_location = (home.x, home.y)
print("Antenna home set")
if self.gcs_location is None:
return
if m.get_type() == 'GPS_RAW' and self.gcs_location is not None:
(gcs_lat, gcs_lon) = self.gcs_location
bearing = cuav_util.gps_bearing(gcs_lat, gcs_lon, m.lat, m.lon)
elif m.get_type() == 'GPS_RAW_INT' and self.gcs_location is not None:
(gcs_lat, gcs_lon) = self.gcs_location
bearing = cuav_util.gps_bearing(gcs_lat, gcs_lon, m.lat / 1.0e7, m.lon / 1.0e7)
else:
return
self.console.set_status('Antenna', 'Antenna %.0f' % bearing, row=0)
if abs(bearing - self.last_bearing) > 5 and (time.time() - self.last_announce) > 15:
self.last_bearing = bearing
self.last_announce = time.time()
self.say("Antenna %u" % int(bearing + 0.5))
def init(mpstate):
'''initialise module'''
return AntennaModule(mpstate)
| gpl-3.0 | 3,427,887,423,603,498,500 | 35.857143 | 92 | 0.58624 | false |
garnaat/boto | boto/sns/__init__.py | 131 | 2117 | # Copyright (c) 2010-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# this is here for backward compatibility
# originally, the SNSConnection class was defined here
from boto.sns.connection import SNSConnection
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the SNS service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo` instances
"""
return get_regions('sns', connection_cls=SNSConnection)
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.sns.connection.SNSConnection`.
:type: str
:param region_name: The name of the region to connect to.
:rtype: :class:`boto.sns.connection.SNSConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit | -5,651,908,945,571,717,000 | 38.203704 | 75 | 0.729806 | false |
jbedorf/tensorflow | tensorflow/contrib/gan/python/eval/python/summaries_impl.py | 5 | 11802 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common TF-GAN summaries."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.gan.python import namedtuples
from tensorflow.contrib.gan.python.eval.python import eval_utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import util as loss_util
from tensorflow.python.summary import summary
__all__ = [
'add_gan_model_image_summaries',
'add_image_comparison_summaries',
'add_gan_model_summaries',
'add_regularization_loss_summaries',
'add_cyclegan_image_summaries',
'add_stargan_image_summaries'
]
def _assert_is_image(data):
data.shape.assert_has_rank(4)
data.shape[1:].assert_is_fully_defined()
def add_gan_model_image_summaries(gan_model, grid_size=4, model_summaries=True):
"""Adds image summaries for real and fake images.
Args:
gan_model: A GANModel tuple.
grid_size: The size of an image grid.
model_summaries: Also add summaries of the model.
Raises:
ValueError: If real and generated data aren't images.
"""
if isinstance(gan_model, namedtuples.CycleGANModel):
raise ValueError(
'`add_gan_model_image_summaries` does not take CycleGANModels. Please '
'use `add_cyclegan_image_summaries` instead.')
_assert_is_image(gan_model.real_data)
_assert_is_image(gan_model.generated_data)
num_images = grid_size ** 2
real_image_shape = gan_model.real_data.shape.as_list()[1:3]
generated_image_shape = gan_model.generated_data.shape.as_list()[1:3]
real_channels = gan_model.real_data.shape.as_list()[3]
generated_channels = gan_model.generated_data.shape.as_list()[3]
summary.image(
'real_data',
eval_utils.image_grid(
gan_model.real_data[:num_images],
grid_shape=(grid_size, grid_size),
image_shape=real_image_shape,
num_channels=real_channels),
max_outputs=1)
summary.image(
'generated_data',
eval_utils.image_grid(
gan_model.generated_data[:num_images],
grid_shape=(grid_size, grid_size),
image_shape=generated_image_shape,
num_channels=generated_channels),
max_outputs=1)
if model_summaries:
add_gan_model_summaries(gan_model)
def add_cyclegan_image_summaries(cyclegan_model):
"""Adds image summaries for CycleGAN.
There are two summaries, one for each generator. The first image is the
generator input, the second is the generator output, and the third is G(F(x)).
Args:
cyclegan_model: A CycleGANModel tuple.
Raises:
ValueError: If `cyclegan_model` isn't a CycleGANModel.
ValueError: If generated data, generator inputs, and reconstructions aren't
images.
ValueError: If the generator input, generated data, and reconstructions
aren't all the same size.
"""
if not isinstance(cyclegan_model, namedtuples.CycleGANModel):
raise ValueError('`cyclegan_model` was not a CycleGANModel. Instead, was '
'%s' % type(cyclegan_model))
_assert_is_image(cyclegan_model.model_x2y.generator_inputs)
_assert_is_image(cyclegan_model.model_x2y.generated_data)
_assert_is_image(cyclegan_model.reconstructed_x)
_assert_is_image(cyclegan_model.model_y2x.generator_inputs)
_assert_is_image(cyclegan_model.model_y2x.generated_data)
_assert_is_image(cyclegan_model.reconstructed_y)
def _add_comparison_summary(gan_model, reconstructions):
image_list = (array_ops.unstack(gan_model.generator_inputs[:1]) +
array_ops.unstack(gan_model.generated_data[:1]) +
array_ops.unstack(reconstructions[:1]))
summary.image(
'image_comparison', eval_utils.image_reshaper(
image_list, num_cols=len(image_list)), max_outputs=1)
with ops.name_scope('x2y_image_comparison_summaries'):
_add_comparison_summary(
cyclegan_model.model_x2y, cyclegan_model.reconstructed_x)
with ops.name_scope('y2x_image_comparison_summaries'):
_add_comparison_summary(
cyclegan_model.model_y2x, cyclegan_model.reconstructed_y)
def add_image_comparison_summaries(gan_model, num_comparisons=2,
display_diffs=False):
"""Adds image summaries to compare triplets of images.
The first image is the generator input, the second is the generator output,
and the third is the real data. This style of comparison is useful for
image translation problems, where the generator input is a corrupted image,
the generator output is the reconstruction, and the real data is the target.
Args:
gan_model: A GANModel tuple.
num_comparisons: The number of image triplets to display.
display_diffs: Also display the difference between generated and target.
Raises:
ValueError: If real data, generated data, and generator inputs aren't
images.
ValueError: If the generator input, real, and generated data aren't all the
same size.
"""
_assert_is_image(gan_model.generator_inputs)
_assert_is_image(gan_model.generated_data)
_assert_is_image(gan_model.real_data)
gan_model.generated_data.shape.assert_is_compatible_with(
gan_model.generator_inputs.shape)
gan_model.real_data.shape.assert_is_compatible_with(
gan_model.generated_data.shape)
image_list = []
image_list.extend(
array_ops.unstack(gan_model.generator_inputs[:num_comparisons]))
image_list.extend(
array_ops.unstack(gan_model.generated_data[:num_comparisons]))
image_list.extend(array_ops.unstack(gan_model.real_data[:num_comparisons]))
if display_diffs:
generated_list = array_ops.unstack(
gan_model.generated_data[:num_comparisons])
real_list = array_ops.unstack(gan_model.real_data[:num_comparisons])
diffs = [
math_ops.abs(math_ops.cast(generated, dtypes.float32) -
math_ops.cast(real, dtypes.float32))
for generated, real in zip(generated_list, real_list)
]
image_list.extend(diffs)
# Reshape image and display.
summary.image(
'image_comparison',
eval_utils.image_reshaper(image_list, num_cols=num_comparisons),
max_outputs=1)
def add_stargan_image_summaries(stargan_model,
num_images=2,
display_diffs=False):
"""Adds image summaries to see StarGAN image results.
If display_diffs is True, each image result has `2` rows and `num_domains + 1`
columns.
The first row looks like:
[original_image, transformed_to_domain_0, transformed_to_domain_1, ...]
The second row looks like:
[no_modification_baseline, transformed_to_domain_0-original_image, ...]
If display_diffs is False, only the first row is shown.
IMPORTANT:
Since the model originally does not transformed the image to every domains,
we will transform them on-the-fly within this function in parallel.
Args:
stargan_model: A StarGANModel tuple.
num_images: The number of examples/images to be transformed and shown.
display_diffs: Also display the difference between generated and target.
Raises:
ValueError: If input_data is not images.
ValueError: If input_data_domain_label is not rank 2.
ValueError: If dimension 2 of input_data_domain_label is not fully defined.
"""
_assert_is_image(stargan_model.input_data)
stargan_model.input_data_domain_label.shape.assert_has_rank(2)
stargan_model.input_data_domain_label.shape[1:].assert_is_fully_defined()
num_domains = stargan_model.input_data_domain_label.get_shape().as_list()[-1]
def _build_image(image):
"""Helper function to create a result for each image on the fly."""
# Expand the first dimension as batch_size = 1.
images = array_ops.expand_dims(image, axis=0)
# Tile the image num_domains times, so we can get all transformed together.
images = array_ops.tile(images, [num_domains, 1, 1, 1])
# Create the targets to 0, 1, 2, ..., num_domains-1.
targets = array_ops.one_hot(list(range(num_domains)), num_domains)
with variable_scope.variable_scope(
stargan_model.generator_scope, reuse=True):
# Add the original image.
output_images_list = [image]
# Generate the image and add to the list.
gen_images = stargan_model.generator_fn(images, targets)
gen_images_list = array_ops.split(gen_images, num_domains)
gen_images_list = [
array_ops.squeeze(img, axis=0) for img in gen_images_list
]
output_images_list.extend(gen_images_list)
# Display diffs.
if display_diffs:
diff_images = gen_images - images
diff_images_list = array_ops.split(diff_images, num_domains)
diff_images_list = [
array_ops.squeeze(img, axis=0) for img in diff_images_list
]
output_images_list.append(array_ops.zeros_like(image))
output_images_list.extend(diff_images_list)
# Create the final image.
final_image = eval_utils.image_reshaper(
output_images_list, num_cols=num_domains + 1)
# Reduce the first rank.
return array_ops.squeeze(final_image, axis=0)
summary.image(
'stargan_image_generation',
map_fn.map_fn(
_build_image,
stargan_model.input_data[:num_images],
parallel_iterations=num_images,
back_prop=False,
swap_memory=True),
max_outputs=num_images)
def add_gan_model_summaries(gan_model):
"""Adds typical GANModel summaries.
Args:
gan_model: A GANModel tuple.
"""
if isinstance(gan_model, namedtuples.CycleGANModel):
with ops.name_scope('cyclegan_x2y_summaries'):
add_gan_model_summaries(gan_model.model_x2y)
with ops.name_scope('cyclegan_y2x_summaries'):
add_gan_model_summaries(gan_model.model_y2x)
return
with ops.name_scope('generator_variables'):
for var in gan_model.generator_variables:
summary.histogram(var.name, var)
with ops.name_scope('discriminator_variables'):
for var in gan_model.discriminator_variables:
summary.histogram(var.name, var)
def add_regularization_loss_summaries(gan_model):
"""Adds summaries for a regularization losses..
Args:
gan_model: A GANModel tuple.
"""
if isinstance(gan_model, namedtuples.CycleGANModel):
with ops.name_scope('cyclegan_x2y_regularization_loss_summaries'):
add_regularization_loss_summaries(gan_model.model_x2y)
with ops.name_scope('cyclegan_y2x_regularization_loss_summaries'):
add_regularization_loss_summaries(gan_model.model_y2x)
return
if gan_model.generator_scope:
summary.scalar(
'generator_regularization_loss',
loss_util.get_regularization_loss(gan_model.generator_scope.name))
if gan_model.discriminator_scope:
summary.scalar(
'discriminator_regularization_loss',
loss_util.get_regularization_loss(gan_model.discriminator_scope.name))
| apache-2.0 | 9,098,161,463,842,176,000 | 36.230284 | 80 | 0.694882 | false |
jcoady9/youtube-dl | youtube_dl/extractor/beatportpro.py | 142 | 3423 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import int_or_none
class BeatportProIE(InfoExtractor):
_VALID_URL = r'https?://pro\.beatport\.com/track/(?P<display_id>[^/]+)/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'https://pro.beatport.com/track/synesthesia-original-mix/5379371',
'md5': 'b3c34d8639a2f6a7f734382358478887',
'info_dict': {
'id': '5379371',
'display_id': 'synesthesia-original-mix',
'ext': 'mp4',
'title': 'Froxic - Synesthesia (Original Mix)',
},
}, {
'url': 'https://pro.beatport.com/track/love-and-war-original-mix/3756896',
'md5': 'e44c3025dfa38c6577fbaeb43da43514',
'info_dict': {
'id': '3756896',
'display_id': 'love-and-war-original-mix',
'ext': 'mp3',
'title': 'Wolfgang Gartner - Love & War (Original Mix)',
},
}, {
'url': 'https://pro.beatport.com/track/birds-original-mix/4991738',
'md5': 'a1fd8e8046de3950fd039304c186c05f',
'info_dict': {
'id': '4991738',
'display_id': 'birds-original-mix',
'ext': 'mp4',
'title': "Tos, Middle Milk, Mumblin' Johnsson - Birds (Original Mix)",
}
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
track_id = mobj.group('id')
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
playables = self._parse_json(
self._search_regex(
r'window\.Playables\s*=\s*({.+?});', webpage,
'playables info', flags=re.DOTALL),
track_id)
track = next(t for t in playables['tracks'] if t['id'] == int(track_id))
title = ', '.join((a['name'] for a in track['artists'])) + ' - ' + track['name']
if track['mix']:
title += ' (' + track['mix'] + ')'
formats = []
for ext, info in track['preview'].items():
if not info['url']:
continue
fmt = {
'url': info['url'],
'ext': ext,
'format_id': ext,
'vcodec': 'none',
}
if ext == 'mp3':
fmt['preference'] = 0
fmt['acodec'] = 'mp3'
fmt['abr'] = 96
fmt['asr'] = 44100
elif ext == 'mp4':
fmt['preference'] = 1
fmt['acodec'] = 'aac'
fmt['abr'] = 96
fmt['asr'] = 44100
formats.append(fmt)
self._sort_formats(formats)
images = []
for name, info in track['images'].items():
image_url = info.get('url')
if name == 'dynamic' or not image_url:
continue
image = {
'id': name,
'url': image_url,
'height': int_or_none(info.get('height')),
'width': int_or_none(info.get('width')),
}
images.append(image)
return {
'id': compat_str(track.get('id')) or track_id,
'display_id': track.get('slug') or display_id,
'title': title,
'formats': formats,
'thumbnails': images,
}
| unlicense | 725,830,475,123,532,300 | 32.23301 | 90 | 0.472393 | false |
nhuntwalker/astroML | examples/algorithms/plot_bayesian_blocks.py | 3 | 2706 | """
Bayesian Blocks for Histograms
------------------------------
.. currentmodule:: astroML
Bayesian Blocks is a dynamic histogramming method which optimizes one of
several possible fitness functions to determine an optimal binning for
data, where the bins are not necessarily uniform width. The astroML
implementation is based on [1]_. For more discussion of this technique,
see the blog post at [2]_.
The code below uses a fitness function suitable for event data with possible
repeats. More fitness functions are available: see :mod:`density_estimation`
References
~~~~~~~~~~
.. [1] Scargle, J `et al.` (2012)
http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
.. [2] http://jakevdp.github.com/blog/2012/09/12/dynamic-programming-in-python/
"""
# Author: Jake VanderPlas <[email protected]>
# License: BSD
# The figure is an example from astroML: see http://astroML.github.com
import numpy as np
from scipy import stats
from matplotlib import pyplot as plt
from astroML.plotting import hist
# draw a set of variables
np.random.seed(0)
t = np.concatenate([stats.cauchy(-5, 1.8).rvs(500),
stats.cauchy(-4, 0.8).rvs(2000),
stats.cauchy(-1, 0.3).rvs(500),
stats.cauchy(2, 0.8).rvs(1000),
stats.cauchy(4, 1.5).rvs(500)])
# truncate values to a reasonable range
t = t[(t > -15) & (t < 15)]
#------------------------------------------------------------
# First figure: show normal histogram binning
fig = plt.figure(figsize=(10, 4))
fig.subplots_adjust(left=0.1, right=0.95, bottom=0.15)
ax1 = fig.add_subplot(121)
ax1.hist(t, bins=15, histtype='stepfilled', alpha=0.2, normed=True)
ax1.set_xlabel('t')
ax1.set_ylabel('P(t)')
ax2 = fig.add_subplot(122)
ax2.hist(t, bins=200, histtype='stepfilled', alpha=0.2, normed=True)
ax2.set_xlabel('t')
ax2.set_ylabel('P(t)')
#------------------------------------------------------------
# Second & Third figure: Knuth bins & Bayesian Blocks
fig = plt.figure(figsize=(10, 4))
fig.subplots_adjust(left=0.1, right=0.95, bottom=0.15)
for bins, title, subplot in zip(['knuth', 'blocks'],
["Knuth's rule", 'Bayesian blocks'],
[121, 122]):
ax = fig.add_subplot(subplot)
# plot a standard histogram in the background, with alpha transparency
hist(t, bins=200, histtype='stepfilled',
alpha=0.2, normed=True, label='standard histogram')
# plot an adaptive-width histogram on top
hist(t, bins=bins, ax=ax, color='black',
histtype='step', normed=True, label=title)
ax.legend(prop=dict(size=12))
ax.set_xlabel('t')
ax.set_ylabel('P(t)')
plt.show()
| bsd-2-clause | -5,005,010,556,680,249,000 | 33.692308 | 79 | 0.627494 | false |
jalavik/invenio-records | invenio_records/providers/recid.py | 4 | 1834 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Define PID provider for recids."""
from invenio_ext.sqlalchemy import db
from invenio_pidstore.provider import PidProvider
from sqlalchemy.exc import SQLAlchemyError
from ..models import Record
class RecordID(PidProvider):
"""Provider for recids."""
pid_type = 'recid'
def create_new_pid(self, pid_value):
"""Create a new row inside the ``Record`` table.
If ``pid_value`` is not ``None`` will be use as ``id`` to create this
new row.
"""
if pid_value is not None:
record = Record(id=int(pid_value))
else:
record = Record()
with db.session.begin_nested():
db.session.add(record)
return str(record.id)
def reserve(self, pid, *args, **kwargs):
"""Reserve recid."""
pid.log("RESERVE", "Successfully reserved recid")
return True
@classmethod
def is_provider_for_pid(cls, pid_str):
"""A recid is valid is it is ``None`` or ``Integer``."""
return pid_str is None or pid_str.isdigit()
| gpl-2.0 | -2,090,704,761,326,657,000 | 30.62069 | 77 | 0.663032 | false |
cycotech/WAR-app | env/lib/python3.5/site-packages/django/conf/locale/fr/formats.py | 504 | 1454 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'j N Y'
SHORT_DATETIME_FORMAT = 'j N Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
'%d.%m.%Y', '%d.%m.%y', # Swiss [fr_CH), '25.10.2006', '25.10.06'
# '%d %B %Y', '%d %b %Y', # '25 octobre 2006', '25 oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d.%m.%Y %H:%M:%S', # Swiss [fr_CH), '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # Swiss (fr_CH), '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # Swiss (fr_CH), '25.10.2006 14:30'
'%d.%m.%Y', # Swiss (fr_CH), '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| mit | 754,449,604,454,348,900 | 39.388889 | 77 | 0.566713 | false |
linjoahow/W16_test1 | static/Brython3.1.3-20150514-095342/Lib/VFS_import.py | 738 | 3059 | import os
from browser import doc
#_scripts=doc.createElement('script')
#_scripts.src="/src/py_VFS.js"
#_scripts.type="text/javascript"
#doc.get(tag='head')[0].appendChild(_scripts)
VFS=dict(JSObject(__BRYTHON__.py_VFS))
class VFSModuleFinder:
def __init__(self, path_entry):
print("in VFSModuleFinder")
if path_entry.startswith('/libs') or path_entry.startswith('/Lib'):
self.path_entry=path_entry
else:
raise ImportError()
def __str__(self):
return '<%s for "%s">' % (self.__class__.__name__, self.path_entry)
def find_module(self, fullname, path=None):
path = path or self.path_entry
#print('looking for "%s" in %s ...' % (fullname, path))
for _ext in ['js', 'pyj', 'py']:
_filepath=os.path.join(self.path_entry, '%s.%s' % (fullname, _ext))
if _filepath in VFS:
print("module found at %s:%s" % (_filepath, fullname))
return VFSModuleLoader(_filepath, fullname)
print('module %s not found' % fullname)
raise ImportError()
return None
class VFSModuleLoader:
"""Load source for modules"""
def __init__(self, filepath, name):
self._filepath=filepath
self._name=name
def get_source(self):
if self._filepath in VFS:
return JSObject(readFromVFS(self._filepath))
raise ImportError('could not find source for %s' % fullname)
def is_package(self):
return '.' in self._name
def load_module(self):
if self._name in sys.modules:
#print('reusing existing module from previous import of "%s"' % fullname)
mod = sys.modules[self._name]
return mod
_src=self.get_source()
if self._filepath.endswith('.js'):
mod=JSObject(import_js_module(_src, self._filepath, self._name))
elif self._filepath.endswith('.py'):
mod=JSObject(import_py_module(_src, self._filepath, self._name))
elif self._filepath.endswith('.pyj'):
mod=JSObject(import_pyj_module(_src, self._filepath, self._name))
else:
raise ImportError('Invalid Module: %s' % self._filepath)
# Set a few properties required by PEP 302
mod.__file__ = self._filepath
mod.__name__ = self._name
mod.__path__ = os.path.abspath(self._filepath)
mod.__loader__ = self
mod.__package__ = '.'.join(self._name.split('.')[:-1])
if self.is_package():
print('adding path for package')
# Set __path__ for packages
# so we can find the sub-modules.
mod.__path__ = [ self.path_entry ]
else:
print('imported as regular module')
print('creating a new module object for "%s"' % self._name)
sys.modules.setdefault(self._name, mod)
JSObject(__BRYTHON__.imported)[self._name]=mod
return mod
JSObject(__BRYTHON__.path_hooks.insert(0, VFSModuleFinder))
| gpl-3.0 | -2,127,311,724,851,457,800 | 34.16092 | 84 | 0.567833 | false |
antonve/s4-project-mooc | lms/djangoapps/instructor/access.py | 83 | 2634 | """
Access control operations for use by instructor APIs.
Does not include any access control, be sure to check access before calling.
TO DO sync instructor and staff flags
e.g. should these be possible?
{instructor: true, staff: false}
{instructor: true, staff: true}
"""
import logging
from django_comment_common.models import Role
from student.roles import (
CourseBetaTesterRole,
CourseInstructorRole,
CourseCcxCoachRole,
CourseStaffRole,
)
log = logging.getLogger(__name__)
ROLES = {
'beta': CourseBetaTesterRole,
'instructor': CourseInstructorRole,
'staff': CourseStaffRole,
'ccx_coach': CourseCcxCoachRole,
}
def list_with_level(course, level):
"""
List users who have 'level' access.
`level` is in ['instructor', 'staff', 'beta'] for standard courses.
There could be other levels specific to the course.
If there is no Group for that course-level, returns an empty list
"""
return ROLES[level](course.id).users_with_role()
def allow_access(course, user, level):
"""
Allow user access to course modification.
`level` is one of ['instructor', 'staff', 'beta']
"""
_change_access(course, user, level, 'allow')
def revoke_access(course, user, level):
"""
Revoke access from user to course modification.
`level` is one of ['instructor', 'staff', 'beta']
"""
_change_access(course, user, level, 'revoke')
def _change_access(course, user, level, action):
"""
Change access of user.
`level` is one of ['instructor', 'staff', 'beta']
action is one of ['allow', 'revoke']
NOTE: will create a group if it does not yet exist.
"""
try:
role = ROLES[level](course.id)
except KeyError:
raise ValueError("unrecognized level '{}'".format(level))
if action == 'allow':
role.add_users(user)
elif action == 'revoke':
role.remove_users(user)
else:
raise ValueError("unrecognized action '{}'".format(action))
def update_forum_role(course_id, user, rolename, action):
"""
Change forum access of user.
`rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
`action` is one of ['allow', 'revoke']
if `action` is bad, raises ValueError
if `rolename` does not exist, raises Role.DoesNotExist
"""
role = Role.objects.get(course_id=course_id, name=rolename)
if action == 'allow':
role.users.add(user)
elif action == 'revoke':
role.users.remove(user)
else:
raise ValueError("unrecognized action '{}'".format(action))
| agpl-3.0 | -4,383,224,409,923,555,300 | 24.823529 | 98 | 0.651481 | false |
guewen/odoo | openerp/report/render/rml2pdf/__init__.py | 381 | 1101 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from trml2pdf import parseString, parseNode
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -5,183,721,119,589,142,000 | 41.346154 | 79 | 0.614896 | false |
gechr/ansible-modules-extras | packaging/os/slackpkg.py | 131 | 6382 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Kim Nørgaard
# Written by Kim Nørgaard <[email protected]>
# Based on pkgng module written by bleader <[email protected]>
# that was based on pkgin module written by Shaun Zinck <shaun.zinck at gmail.com>
# that was based on pacman module written by Afterburn <http://github.com/afterburn>
# that was based on apt module written by Matthew Williams <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: slackpkg
short_description: Package manager for Slackware >= 12.2
description:
- Manage binary packages for Slackware using 'slackpkg' which
is available in versions after 12.2.
version_added: "2.0"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package, you can use "installed" as an alias for C(present) and removed as one for c(absent).
choices: [ 'present', 'absent', 'latest' ]
required: false
default: present
update_cache:
description:
- update the package database first
required: false
default: false
choices: [ true, false ]
author: Kim Nørgaard (@KimNorgaard)
requirements: [ "Slackware >= 12.2" ]
'''
EXAMPLES = '''
# Install package foo
- slackpkg: name=foo state=present
# Remove packages foo and bar
- slackpkg: name=foo,bar state=absent
# Make sure that it is the most updated package
- slackpkg: name=foo state=latest
'''
def query_package(module, slackpkg_path, name):
import glob
import platform
machine = platform.machine()
packages = glob.glob("/var/log/packages/%s-*-[%s|noarch]*" % (name,
machine))
if len(packages) > 0:
return True
return False
def remove_packages(module, slackpkg_path, packages):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, slackpkg_path, package):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
remove %s" % (slackpkg_path,
package))
if not module.check_mode and query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, slackpkg_path, packages):
install_c = 0
for package in packages:
if query_package(module, slackpkg_path, package):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
install %s" % (slackpkg_path,
package))
if not module.check_mode and not query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to install %s: %s" % (package, out),
stderr=err)
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)"
% (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def upgrade_packages(module, slackpkg_path, packages):
install_c = 0
for package in packages:
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
upgrade %s" % (slackpkg_path,
package))
if not module.check_mode and not query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to install %s: %s" % (package, out),
stderr=err)
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)"
% (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def update_cache(module, slackpkg_path):
rc, out, err = module.run_command("%s -batch=on update" % (slackpkg_path))
if rc != 0:
module.fail_json(msg="Could not update package cache")
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default="installed", choices=['installed', 'removed', 'absent', 'present', 'latest']),
name=dict(aliases=["pkg"], required=True, type='list'),
update_cache=dict(default=False, aliases=["update-cache"],
type='bool'),
),
supports_check_mode=True)
slackpkg_path = module.get_bin_path('slackpkg', True)
p = module.params
pkgs = p['name']
if p["update_cache"]:
update_cache(module, slackpkg_path)
if p['state'] == 'latest':
upgrade_packages(module, slackpkg_path, pkgs)
elif p['state'] in ['present', 'installed']:
install_packages(module, slackpkg_path, pkgs)
elif p["state"] in ['removed', 'absent']:
remove_packages(module, slackpkg_path, pkgs)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 | -7,659,245,710,266,280,000 | 31.055276 | 120 | 0.58865 | false |
KohlsTechnology/ansible | lib/ansible/modules/utilities/logic/async_status.py | 22 | 3006 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <[email protected]>, and others
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: async_status
short_description: Obtain status of asynchronous task
description:
- This module gets the status of an asynchronous task.
- This module is also supported for Windows targets.
version_added: "0.5"
options:
jid:
description:
- Job or task identifier
required: true
mode:
description:
- if C(status), obtain the status; if C(cleanup), clean up the async job cache (by default in C(~/.ansible_async/)) for the specified job I(jid).
choices: [ "status", "cleanup" ]
default: "status"
notes:
- See also U(https://docs.ansible.com/playbooks_async.html)
- This module is also supported for Windows targets.
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
import json
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
def main():
module = AnsibleModule(argument_spec=dict(
jid=dict(required=True),
mode=dict(default='status', choices=['status', 'cleanup']),
))
mode = module.params['mode']
jid = module.params['jid']
async_dir = os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
# setup logging directory
logdir = os.path.expanduser(async_dir)
log_path = os.path.join(logdir, jid)
if not os.path.exists(log_path):
module.fail_json(msg="could not find job", ansible_job_id=jid, started=1, finished=1)
if mode == 'cleanup':
os.unlink(log_path)
module.exit_json(ansible_job_id=jid, erased=log_path)
# NOT in cleanup mode, assume regular status mode
# no remote kill mode currently exists, but probably should
# consider log_path + ".pid" file and also unlink that above
data = None
try:
data = open(log_path).read()
data = json.loads(data)
except Exception:
if not data:
# file not written yet? That means it is running
module.exit_json(results_file=log_path, ansible_job_id=jid, started=1, finished=0)
else:
module.fail_json(ansible_job_id=jid, results_file=log_path,
msg="Could not parse job output: %s" % data, started=1, finished=1)
if 'started' not in data:
data['finished'] = 1
data['ansible_job_id'] = jid
elif 'finished' not in data:
data['finished'] = 0
# Fix error: TypeError: exit_json() keywords must be strings
data = dict([(str(k), v) for k, v in iteritems(data)])
module.exit_json(**data)
if __name__ == '__main__':
main()
| gpl-3.0 | 3,813,427,270,067,078,700 | 28.762376 | 151 | 0.634398 | false |
bplancher/odoo | addons/account_tax_exigible/models/account_move.py | 12 | 1726 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import fields, models, api
class AccountMoveLine(models.Model):
_inherit = "account.move.line"
tax_exigible = fields.Boolean(string='Appears in VAT report', default=True,
help="Technical field used to mark a tax line as exigible in the vat report or not (only exigible journal items are displayed). By default all new journal items are directly exigible, but with the module account_tax_cash_basis, some will become exigible only when the payment is recorded.")
@api.model
def create(self, vals, apply_taxes=True):
taxes = False
if vals.get('tax_line_id'):
taxes = [{'use_cash_basis': self.env['account.tax'].browse(vals['tax_line_id']).use_cash_basis}]
if vals.get('tax_ids'):
taxes = self.env['account.move.line'].resolve_2many_commands('tax_ids', vals['tax_ids'])
if taxes and any([tax['use_cash_basis'] for tax in taxes]) and not vals.get('tax_exigible'):
vals['tax_exigible'] = False
return super(AccountMoveLine, self).create(vals, apply_taxes=apply_taxes)
class AccountPartialReconcileCashBasis(models.Model):
_inherit = 'account.partial.reconcile'
def _check_tax_exigible(self, line):
return line.tax_exigible
def _get_tax_cash_basis_lines(self, value_before_reconciliation):
lines, move_date = super(AccountPartialReconcileCashBasis, self)._get_tax_cash_basis_lines(value_before_reconciliation)
for i in range(len(lines)):
vals = lines[i][2]
vals['tax_exigible'] = True
lines[i] = (0, 0, vals)
return lines, move_date
| agpl-3.0 | -5,645,034,495,406,240,000 | 48.314286 | 298 | 0.670336 | false |
TylerKirby/cltk | cltk/stem/akkadian/cv_pattern.py | 2 | 2145 | """
Return a CV patterned string based on the word.
"""
__author__ = ['M. Willis Monroe <[email protected]>']
__license__ = 'MIT License. See LICENSE.'
from cltk.stem.akkadian.syllabifier import AKKADIAN
class CVPattern(object):
"""Return a patterned string representing the consonants
and vowels of the input word."""
def __init__(self):
self.akkadian = AKKADIAN
def get_cv_pattern(self, word, pprint=False):
"""
input = iparras
pattern = [('V', 1, 'i'), ('C', 1, 'p'), ('V', 2, 'a'), ('C', 2, 'r'),
('C', 2, 'r'), ('V', 2, 'a'), ('C', 3, 's')]
pprint = V₁C₁V₂C₂C₂V₂C₃
"""
subscripts = {
1: '₁',
2: '₂',
3: '₃',
4: '₄',
5: '₅',
6: '₆',
7: '₇',
8: '₈',
9: '₉',
0: '₀'
}
pattern = []
c_count = 1
v_count = 1
count = 0
for char in word:
if char in self.akkadian['consonants']:
cv = 'C'
else:
cv = 'V'
# remove length:
if char in self.akkadian['macron_vowels']:
char = self.akkadian['short_vowels'][self.akkadian['macron_vowels'].index(char)]
elif char in self.akkadian['circumflex_vowels']:
char = self.akkadian['short_vowels'][self.akkadian['circumflex_vowels'].index(char)]
if char not in [x[2] for x in pattern]:
if cv == 'C':
count = c_count
c_count += 1
elif cv == 'V':
count = v_count
v_count += 1
pattern.append((cv, count, char))
elif char in [x[2] for x in pattern]:
pattern.append((cv, next(x[1] for x in pattern if x[2] == char), char))
if pprint:
output = ''
for item in pattern:
output += (item[0] + subscripts[item[1]])
return output
return pattern
| mit | -7,393,484,848,156,466,000 | 30.984848 | 104 | 0.44055 | false |
ky822/scikit-learn | sklearn/utils/estimator_checks.py | 9 | 51912 | from __future__ import print_function
import types
import warnings
import sys
import traceback
import inspect
import pickle
from copy import deepcopy
import numpy as np
from scipy import sparse
import struct
from sklearn.externals.six.moves import zip
from sklearn.externals.joblib import hash, Memory
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regex
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_in
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import META_ESTIMATORS
from sklearn.utils.testing import set_random_state
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import assert_warns
from sklearn.base import (clone, ClassifierMixin, RegressorMixin,
TransformerMixin, ClusterMixin, BaseEstimator)
from sklearn.metrics import accuracy_score, adjusted_rand_score, f1_score
from sklearn.lda import LDA
from sklearn.random_projection import BaseRandomProjection
from sklearn.feature_selection import SelectKBest
from sklearn.svm.base import BaseLibSVM
from sklearn.pipeline import make_pipeline
from sklearn.utils.validation import DataConversionWarning
from sklearn.utils import ConvergenceWarning
from sklearn.cross_validation import train_test_split
from sklearn.utils import shuffle
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import load_iris, load_boston, make_blobs
BOSTON = None
CROSS_DECOMPOSITION = ['PLSCanonical', 'PLSRegression', 'CCA', 'PLSSVD']
MULTI_OUTPUT = ['CCA', 'DecisionTreeRegressor', 'ElasticNet',
'ExtraTreeRegressor', 'ExtraTreesRegressor', 'GaussianProcess',
'KNeighborsRegressor', 'KernelRidge', 'Lars', 'Lasso',
'LassoLars', 'LinearRegression', 'MultiTaskElasticNet',
'MultiTaskElasticNetCV', 'MultiTaskLasso', 'MultiTaskLassoCV',
'OrthogonalMatchingPursuit', 'PLSCanonical', 'PLSRegression',
'RANSACRegressor', 'RadiusNeighborsRegressor',
'RandomForestRegressor', 'Ridge', 'RidgeCV']
def _yield_non_meta_checks(name, Estimator):
yield check_estimators_dtypes
yield check_fit_score_takes_y
yield check_dtype_object
yield check_estimators_fit_returns_self
# Check that all estimator yield informative messages when
# trained on empty datasets
yield check_estimators_empty_data_messages
if name not in CROSS_DECOMPOSITION + ['SpectralEmbedding']:
# SpectralEmbedding is non-deterministic,
# see issue #4236
# cross-decomposition's "transform" returns X and Y
yield check_pipeline_consistency
if name not in ['Imputer']:
# Test that all estimators check their input for NaN's and infs
yield check_estimators_nan_inf
if name not in ['GaussianProcess']:
# FIXME!
# in particular GaussianProcess!
yield check_estimators_overwrite_params
if hasattr(Estimator, 'sparsify'):
yield check_sparsify_coefficients
yield check_estimator_sparse_data
# Test that estimators can be pickled, and once pickled
# give the same answer as before.
yield check_estimators_pickle
def _yield_classifier_checks(name, Classifier):
# test classfiers can handle non-array data
yield check_classifier_data_not_an_array
# test classifiers trained on a single label always return this label
yield check_classifiers_one_label
yield check_classifiers_classes
yield check_estimators_partial_fit_n_features
# basic consistency testing
yield check_classifiers_train
if (name not in ["MultinomialNB", "LabelPropagation", "LabelSpreading"]
# TODO some complication with -1 label
and name not in ["DecisionTreeClassifier",
"ExtraTreeClassifier"]):
# We don't raise a warning in these classifiers, as
# the column y interface is used by the forests.
yield check_supervised_y_2d
# test if NotFittedError is raised
yield check_estimators_unfitted
if 'class_weight' in Classifier().get_params().keys():
yield check_class_weight_classifiers
def _yield_regressor_checks(name, Regressor):
# TODO: test with intercept
# TODO: test with multiple responses
# basic testing
yield check_regressors_train
yield check_regressor_data_not_an_array
yield check_estimators_partial_fit_n_features
yield check_regressors_no_decision_function
yield check_supervised_y_2d
if name != 'CCA':
# check that the regressor handles int input
yield check_regressors_int
# Test if NotFittedError is raised
yield check_estimators_unfitted
def _yield_transformer_checks(name, Transformer):
# All transformers should either deal with sparse data or raise an
# exception with type TypeError and an intelligible error message
if name not in ['AdditiveChi2Sampler', 'Binarizer', 'Normalizer',
'PLSCanonical', 'PLSRegression', 'CCA', 'PLSSVD']:
yield check_transformer_data_not_an_array
# these don't actually fit the data, so don't raise errors
if name not in ['AdditiveChi2Sampler', 'Binarizer',
'FunctionTransformer', 'Normalizer']:
# basic tests
yield check_transformer_general
yield check_transformers_unfitted
def _yield_clustering_checks(name, Clusterer):
yield check_clusterer_compute_labels_predict
if name not in ('WardAgglomeration', "FeatureAgglomeration"):
# this is clustering on the features
# let's not test that here.
yield check_clustering
yield check_estimators_partial_fit_n_features
def _yield_all_checks(name, Estimator):
for check in _yield_non_meta_checks(name, Estimator):
yield check
if issubclass(Estimator, ClassifierMixin):
for check in _yield_classifier_checks(name, Estimator):
yield check
if issubclass(Estimator, RegressorMixin):
for check in _yield_regressor_checks(name, Estimator):
yield check
if issubclass(Estimator, TransformerMixin):
for check in _yield_transformer_checks(name, Estimator):
yield check
if issubclass(Estimator, ClusterMixin):
for check in _yield_clustering_checks(name, Estimator):
yield check
yield check_fit2d_predict1d
yield check_fit2d_1sample
yield check_fit2d_1feature
yield check_fit1d_1feature
yield check_fit1d_1sample
def check_estimator(Estimator):
"""Check if estimator adheres to sklearn conventions.
This estimator will run an extensive test-suite for input validation,
shapes, etc.
Additional tests for classifiers, regressors, clustering or transformers
will be run if the Estimator class inherits from the corresponding mixin
from sklearn.base.
Parameters
----------
Estimator : class
Class to check.
"""
name = Estimator.__class__.__name__
check_parameters_default_constructible(name, Estimator)
for check in _yield_all_checks(name, Estimator):
check(name, Estimator)
def _boston_subset(n_samples=200):
global BOSTON
if BOSTON is None:
boston = load_boston()
X, y = boston.data, boston.target
X, y = shuffle(X, y, random_state=0)
X, y = X[:n_samples], y[:n_samples]
X = StandardScaler().fit_transform(X)
BOSTON = X, y
return BOSTON
def set_fast_parameters(estimator):
# speed up some estimators
params = estimator.get_params()
if ("n_iter" in params
and estimator.__class__.__name__ != "TSNE"):
estimator.set_params(n_iter=5)
if "max_iter" in params:
warnings.simplefilter("ignore", ConvergenceWarning)
if estimator.max_iter is not None:
estimator.set_params(max_iter=min(5, estimator.max_iter))
# LinearSVR
if estimator.__class__.__name__ == 'LinearSVR':
estimator.set_params(max_iter=20)
if "n_resampling" in params:
# randomized lasso
estimator.set_params(n_resampling=5)
if "n_estimators" in params:
# especially gradient boosting with default 100
estimator.set_params(n_estimators=min(5, estimator.n_estimators))
if "max_trials" in params:
# RANSAC
estimator.set_params(max_trials=10)
if "n_init" in params:
# K-Means
estimator.set_params(n_init=2)
if estimator.__class__.__name__ == "SelectFdr":
# be tolerant of noisy datasets (not actually speed)
estimator.set_params(alpha=.5)
if estimator.__class__.__name__ == "TheilSenRegressor":
estimator.max_subpopulation = 100
if isinstance(estimator, BaseRandomProjection):
# Due to the jl lemma and often very few samples, the number
# of components of the random matrix projection will be probably
# greater than the number of features.
# So we impose a smaller number (avoid "auto" mode)
estimator.set_params(n_components=1)
if isinstance(estimator, SelectKBest):
# SelectKBest has a default of k=10
# which is more feature than we have in most case.
estimator.set_params(k=1)
class NotAnArray(object):
" An object that is convertable to an array"
def __init__(self, data):
self.data = data
def __array__(self, dtype=None):
return self.data
def _is_32bit():
"""Detect if process is 32bit Python."""
return struct.calcsize('P') * 8 == 32
def check_estimator_sparse_data(name, Estimator):
rng = np.random.RandomState(0)
X = rng.rand(40, 10)
X[X < .8] = 0
X_csr = sparse.csr_matrix(X)
y = (4 * rng.rand(40)).astype(np.int)
for sparse_format in ['csr', 'csc', 'dok', 'lil', 'coo', 'dia', 'bsr']:
X = X_csr.asformat(sparse_format)
# catch deprecation warnings
with warnings.catch_warnings():
if name in ['Scaler', 'StandardScaler']:
estimator = Estimator(with_mean=False)
else:
estimator = Estimator()
set_fast_parameters(estimator)
# fit and predict
try:
estimator.fit(X, y)
if hasattr(estimator, "predict"):
pred = estimator.predict(X)
assert_equal(pred.shape, (X.shape[0],))
if hasattr(estimator, 'predict_proba'):
probs = estimator.predict_proba(X)
assert_equal(probs.shape, (X.shape[0], 4))
except TypeError as e:
if 'sparse' not in repr(e):
print("Estimator %s doesn't seem to fail gracefully on "
"sparse data: error message state explicitly that "
"sparse input is not supported if this is not the case."
% name)
raise
except Exception:
print("Estimator %s doesn't seem to fail gracefully on "
"sparse data: it should raise a TypeError if sparse input "
"is explicitly not supported." % name)
raise
def check_dtype_object(name, Estimator):
# check that estimators treat dtype object as numeric if possible
rng = np.random.RandomState(0)
X = rng.rand(40, 10).astype(object)
y = (X[:, 0] * 4).astype(np.int)
y = multioutput_estimator_convert_y_2d(name, y)
with warnings.catch_warnings():
estimator = Estimator()
set_fast_parameters(estimator)
estimator.fit(X, y)
if hasattr(estimator, "predict"):
estimator.predict(X)
if hasattr(estimator, "transform"):
estimator.transform(X)
try:
estimator.fit(X, y.astype(object))
except Exception as e:
if "Unknown label type" not in str(e):
raise
X[0, 0] = {'foo': 'bar'}
msg = "argument must be a string or a number"
assert_raises_regex(TypeError, msg, estimator.fit, X, y)
@ignore_warnings
def check_fit2d_predict1d(name, Estimator):
# check by fitting a 2d array and prediting with a 1d array
rnd = np.random.RandomState(0)
X = 3 * rnd.uniform(size=(20, 3))
y = X[:, 0].astype(np.int)
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
if hasattr(estimator, "n_components"):
estimator.n_components = 1
if hasattr(estimator, "n_clusters"):
estimator.n_clusters = 1
set_random_state(estimator, 1)
estimator.fit(X, y)
for method in ["predict", "transform", "decision_function",
"predict_proba"]:
if hasattr(estimator, method):
try:
assert_warns(DeprecationWarning,
getattr(estimator, method), X[0])
except ValueError:
pass
@ignore_warnings
def check_fit2d_1sample(name, Estimator):
# check by fitting a 2d array and prediting with a 1d array
rnd = np.random.RandomState(0)
X = 3 * rnd.uniform(size=(1, 10))
y = X[:, 0].astype(np.int)
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
if hasattr(estimator, "n_components"):
estimator.n_components = 1
if hasattr(estimator, "n_clusters"):
estimator.n_clusters = 1
set_random_state(estimator, 1)
try:
estimator.fit(X, y)
except ValueError:
pass
@ignore_warnings
def check_fit2d_1feature(name, Estimator):
# check by fitting a 2d array and prediting with a 1d array
rnd = np.random.RandomState(0)
X = 3 * rnd.uniform(size=(10, 1))
y = X[:, 0].astype(np.int)
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
if hasattr(estimator, "n_components"):
estimator.n_components = 1
if hasattr(estimator, "n_clusters"):
estimator.n_clusters = 1
set_random_state(estimator, 1)
try:
estimator.fit(X, y)
except ValueError:
pass
@ignore_warnings
def check_fit1d_1feature(name, Estimator):
# check fitting 1d array with 1 feature
rnd = np.random.RandomState(0)
X = 3 * rnd.uniform(size=(20))
y = X.astype(np.int)
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
if hasattr(estimator, "n_components"):
estimator.n_components = 1
if hasattr(estimator, "n_clusters"):
estimator.n_clusters = 1
set_random_state(estimator, 1)
try:
estimator.fit(X, y)
except ValueError:
pass
@ignore_warnings
def check_fit1d_1sample(name, Estimator):
# check fitting 1d array with 1 feature
rnd = np.random.RandomState(0)
X = 3 * rnd.uniform(size=(20))
y = np.array([1])
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
if hasattr(estimator, "n_components"):
estimator.n_components = 1
if hasattr(estimator, "n_clusters"):
estimator.n_clusters = 1
set_random_state(estimator, 1)
try:
estimator.fit(X, y)
except ValueError :
pass
def check_transformer_general(name, Transformer):
X, y = make_blobs(n_samples=30, centers=[[0, 0, 0], [1, 1, 1]],
random_state=0, n_features=2, cluster_std=0.1)
X = StandardScaler().fit_transform(X)
X -= X.min()
_check_transformer(name, Transformer, X, y)
_check_transformer(name, Transformer, X.tolist(), y.tolist())
def check_transformer_data_not_an_array(name, Transformer):
X, y = make_blobs(n_samples=30, centers=[[0, 0, 0], [1, 1, 1]],
random_state=0, n_features=2, cluster_std=0.1)
X = StandardScaler().fit_transform(X)
# We need to make sure that we have non negative data, for things
# like NMF
X -= X.min() - .1
this_X = NotAnArray(X)
this_y = NotAnArray(np.asarray(y))
_check_transformer(name, Transformer, this_X, this_y)
def check_transformers_unfitted(name, Transformer):
X, y = _boston_subset()
with warnings.catch_warnings(record=True):
transformer = Transformer()
assert_raises((AttributeError, ValueError), transformer.transform, X)
def _check_transformer(name, Transformer, X, y):
if name in ('CCA', 'LocallyLinearEmbedding', 'KernelPCA') and _is_32bit():
# Those transformers yield non-deterministic output when executed on
# a 32bit Python. The same transformers are stable on 64bit Python.
# FIXME: try to isolate a minimalistic reproduction case only depending
# on numpy & scipy and/or maybe generate a test dataset that does not
# cause such unstable behaviors.
msg = name + ' is non deterministic on 32bit Python'
raise SkipTest(msg)
n_samples, n_features = np.asarray(X).shape
# catch deprecation warnings
with warnings.catch_warnings(record=True):
transformer = Transformer()
set_random_state(transformer)
set_fast_parameters(transformer)
# fit
if name in CROSS_DECOMPOSITION:
y_ = np.c_[y, y]
y_[::2, 1] *= 2
else:
y_ = y
transformer.fit(X, y_)
X_pred = transformer.fit_transform(X, y=y_)
if isinstance(X_pred, tuple):
for x_pred in X_pred:
assert_equal(x_pred.shape[0], n_samples)
else:
# check for consistent n_samples
assert_equal(X_pred.shape[0], n_samples)
if hasattr(transformer, 'transform'):
if name in CROSS_DECOMPOSITION:
X_pred2 = transformer.transform(X, y_)
X_pred3 = transformer.fit_transform(X, y=y_)
else:
X_pred2 = transformer.transform(X)
X_pred3 = transformer.fit_transform(X, y=y_)
if isinstance(X_pred, tuple) and isinstance(X_pred2, tuple):
for x_pred, x_pred2, x_pred3 in zip(X_pred, X_pred2, X_pred3):
assert_array_almost_equal(
x_pred, x_pred2, 2,
"fit_transform and transform outcomes not consistent in %s"
% Transformer)
assert_array_almost_equal(
x_pred, x_pred3, 2,
"consecutive fit_transform outcomes not consistent in %s"
% Transformer)
else:
assert_array_almost_equal(
X_pred, X_pred2, 2,
"fit_transform and transform outcomes not consistent in %s"
% Transformer)
assert_array_almost_equal(
X_pred, X_pred3, 2,
"consecutive fit_transform outcomes not consistent in %s"
% Transformer)
assert_equal(len(X_pred2), n_samples)
assert_equal(len(X_pred3), n_samples)
# raises error on malformed input for transform
if hasattr(X, 'T'):
# If it's not an array, it does not have a 'T' property
assert_raises(ValueError, transformer.transform, X.T)
@ignore_warnings
def check_pipeline_consistency(name, Estimator):
if name in ('CCA', 'LocallyLinearEmbedding', 'KernelPCA') and _is_32bit():
# Those transformers yield non-deterministic output when executed on
# a 32bit Python. The same transformers are stable on 64bit Python.
# FIXME: try to isolate a minimalistic reproduction case only depending
# scipy and/or maybe generate a test dataset that does not
# cause such unstable behaviors.
msg = name + ' is non deterministic on 32bit Python'
raise SkipTest(msg)
# check that make_pipeline(est) gives same score as est
X, y = make_blobs(n_samples=30, centers=[[0, 0, 0], [1, 1, 1]],
random_state=0, n_features=2, cluster_std=0.1)
X -= X.min()
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator)
pipeline = make_pipeline(estimator)
estimator.fit(X, y)
pipeline.fit(X, y)
funcs = ["score", "fit_transform"]
for func_name in funcs:
func = getattr(estimator, func_name, None)
if func is not None:
func_pipeline = getattr(pipeline, func_name)
result = func(X, y)
result_pipe = func_pipeline(X, y)
assert_array_almost_equal(result, result_pipe)
@ignore_warnings
def check_fit_score_takes_y(name, Estimator):
# check that all estimators accept an optional y
# in fit and score so they can be used in pipelines
rnd = np.random.RandomState(0)
X = rnd.uniform(size=(10, 3))
y = np.arange(10) % 3
y = multioutput_estimator_convert_y_2d(name, y)
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator)
funcs = ["fit", "score", "partial_fit", "fit_predict", "fit_transform"]
for func_name in funcs:
func = getattr(estimator, func_name, None)
if func is not None:
func(X, y)
args = inspect.getargspec(func).args
assert_true(args[2] in ["y", "Y"])
@ignore_warnings
def check_estimators_dtypes(name, Estimator):
rnd = np.random.RandomState(0)
X_train_32 = 3 * rnd.uniform(size=(20, 5)).astype(np.float32)
X_train_64 = X_train_32.astype(np.float64)
X_train_int_64 = X_train_32.astype(np.int64)
X_train_int_32 = X_train_32.astype(np.int32)
y = X_train_int_64[:, 0]
y = multioutput_estimator_convert_y_2d(name, y)
for X_train in [X_train_32, X_train_64, X_train_int_64, X_train_int_32]:
with warnings.catch_warnings(record=True):
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator, 1)
estimator.fit(X_train, y)
for method in ["predict", "transform", "decision_function",
"predict_proba"]:
if hasattr(estimator, method):
getattr(estimator, method)(X_train)
def check_estimators_empty_data_messages(name, Estimator):
e = Estimator()
set_fast_parameters(e)
set_random_state(e, 1)
X_zero_samples = np.empty(0).reshape(0, 3)
# The precise message can change depending on whether X or y is
# validated first. Let us test the type of exception only:
assert_raises(ValueError, e.fit, X_zero_samples, [])
X_zero_features = np.empty(0).reshape(3, 0)
# the following y should be accepted by both classifiers and regressors
# and ignored by unsupervised models
y = multioutput_estimator_convert_y_2d(name, np.array([1, 0, 1]))
msg = "0 feature\(s\) \(shape=\(3, 0\)\) while a minimum of \d* is required."
assert_raises_regex(ValueError, msg, e.fit, X_zero_features, y)
def check_estimators_nan_inf(name, Estimator):
rnd = np.random.RandomState(0)
X_train_finite = rnd.uniform(size=(10, 3))
X_train_nan = rnd.uniform(size=(10, 3))
X_train_nan[0, 0] = np.nan
X_train_inf = rnd.uniform(size=(10, 3))
X_train_inf[0, 0] = np.inf
y = np.ones(10)
y[:5] = 0
y = multioutput_estimator_convert_y_2d(name, y)
error_string_fit = "Estimator doesn't check for NaN and inf in fit."
error_string_predict = ("Estimator doesn't check for NaN and inf in"
" predict.")
error_string_transform = ("Estimator doesn't check for NaN and inf in"
" transform.")
for X_train in [X_train_nan, X_train_inf]:
# catch deprecation warnings
with warnings.catch_warnings(record=True):
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator, 1)
# try to fit
try:
estimator.fit(X_train, y)
except ValueError as e:
if 'inf' not in repr(e) and 'NaN' not in repr(e):
print(error_string_fit, Estimator, e)
traceback.print_exc(file=sys.stdout)
raise e
except Exception as exc:
print(error_string_fit, Estimator, exc)
traceback.print_exc(file=sys.stdout)
raise exc
else:
raise AssertionError(error_string_fit, Estimator)
# actually fit
estimator.fit(X_train_finite, y)
# predict
if hasattr(estimator, "predict"):
try:
estimator.predict(X_train)
except ValueError as e:
if 'inf' not in repr(e) and 'NaN' not in repr(e):
print(error_string_predict, Estimator, e)
traceback.print_exc(file=sys.stdout)
raise e
except Exception as exc:
print(error_string_predict, Estimator, exc)
traceback.print_exc(file=sys.stdout)
else:
raise AssertionError(error_string_predict, Estimator)
# transform
if hasattr(estimator, "transform"):
try:
estimator.transform(X_train)
except ValueError as e:
if 'inf' not in repr(e) and 'NaN' not in repr(e):
print(error_string_transform, Estimator, e)
traceback.print_exc(file=sys.stdout)
raise e
except Exception as exc:
print(error_string_transform, Estimator, exc)
traceback.print_exc(file=sys.stdout)
else:
raise AssertionError(error_string_transform, Estimator)
def check_estimators_pickle(name, Estimator):
"""Test that we can pickle all estimators"""
check_methods = ["predict", "transform", "decision_function",
"predict_proba"]
X, y = make_blobs(n_samples=30, centers=[[0, 0, 0], [1, 1, 1]],
random_state=0, n_features=2, cluster_std=0.1)
# some estimators can't do features less than 0
X -= X.min()
# some estimators only take multioutputs
y = multioutput_estimator_convert_y_2d(name, y)
# catch deprecation warnings
with warnings.catch_warnings(record=True):
estimator = Estimator()
set_random_state(estimator)
set_fast_parameters(estimator)
estimator.fit(X, y)
result = dict()
for method in check_methods:
if hasattr(estimator, method):
result[method] = getattr(estimator, method)(X)
# pickle and unpickle!
pickled_estimator = pickle.dumps(estimator)
unpickled_estimator = pickle.loads(pickled_estimator)
for method in result:
unpickled_result = getattr(unpickled_estimator, method)(X)
assert_array_almost_equal(result[method], unpickled_result)
def check_estimators_partial_fit_n_features(name, Alg):
# check if number of features changes between calls to partial_fit.
if not hasattr(Alg, 'partial_fit'):
return
X, y = make_blobs(n_samples=50, random_state=1)
X -= X.min()
with warnings.catch_warnings(record=True):
alg = Alg()
set_fast_parameters(alg)
if isinstance(alg, ClassifierMixin):
classes = np.unique(y)
alg.partial_fit(X, y, classes=classes)
else:
alg.partial_fit(X, y)
assert_raises(ValueError, alg.partial_fit, X[:, :-1], y)
def check_clustering(name, Alg):
X, y = make_blobs(n_samples=50, random_state=1)
X, y = shuffle(X, y, random_state=7)
X = StandardScaler().fit_transform(X)
n_samples, n_features = X.shape
# catch deprecation and neighbors warnings
with warnings.catch_warnings(record=True):
alg = Alg()
set_fast_parameters(alg)
if hasattr(alg, "n_clusters"):
alg.set_params(n_clusters=3)
set_random_state(alg)
if name == 'AffinityPropagation':
alg.set_params(preference=-100)
alg.set_params(max_iter=100)
# fit
alg.fit(X)
# with lists
alg.fit(X.tolist())
assert_equal(alg.labels_.shape, (n_samples,))
pred = alg.labels_
assert_greater(adjusted_rand_score(pred, y), 0.4)
# fit another time with ``fit_predict`` and compare results
if name is 'SpectralClustering':
# there is no way to make Spectral clustering deterministic :(
return
set_random_state(alg)
with warnings.catch_warnings(record=True):
pred2 = alg.fit_predict(X)
assert_array_equal(pred, pred2)
def check_clusterer_compute_labels_predict(name, Clusterer):
"""Check that predict is invariant of compute_labels"""
X, y = make_blobs(n_samples=20, random_state=0)
clusterer = Clusterer()
if hasattr(clusterer, "compute_labels"):
# MiniBatchKMeans
if hasattr(clusterer, "random_state"):
clusterer.set_params(random_state=0)
X_pred1 = clusterer.fit(X).predict(X)
clusterer.set_params(compute_labels=False)
X_pred2 = clusterer.fit(X).predict(X)
assert_array_equal(X_pred1, X_pred2)
def check_classifiers_one_label(name, Classifier):
error_string_fit = "Classifier can't train when only one class is present."
error_string_predict = ("Classifier can't predict when only one class is "
"present.")
rnd = np.random.RandomState(0)
X_train = rnd.uniform(size=(10, 3))
X_test = rnd.uniform(size=(10, 3))
y = np.ones(10)
# catch deprecation warnings
with warnings.catch_warnings(record=True):
classifier = Classifier()
set_fast_parameters(classifier)
# try to fit
try:
classifier.fit(X_train, y)
except ValueError as e:
if 'class' not in repr(e):
print(error_string_fit, Classifier, e)
traceback.print_exc(file=sys.stdout)
raise e
else:
return
except Exception as exc:
print(error_string_fit, Classifier, exc)
traceback.print_exc(file=sys.stdout)
raise exc
# predict
try:
assert_array_equal(classifier.predict(X_test), y)
except Exception as exc:
print(error_string_predict, Classifier, exc)
raise exc
def check_classifiers_train(name, Classifier):
X_m, y_m = make_blobs(n_samples=300, random_state=0)
X_m, y_m = shuffle(X_m, y_m, random_state=7)
X_m = StandardScaler().fit_transform(X_m)
# generate binary problem from multi-class one
y_b = y_m[y_m != 2]
X_b = X_m[y_m != 2]
for (X, y) in [(X_m, y_m), (X_b, y_b)]:
# catch deprecation warnings
classes = np.unique(y)
n_classes = len(classes)
n_samples, n_features = X.shape
with warnings.catch_warnings(record=True):
classifier = Classifier()
if name in ['BernoulliNB', 'MultinomialNB']:
X -= X.min()
set_fast_parameters(classifier)
set_random_state(classifier)
# raises error on malformed input for fit
assert_raises(ValueError, classifier.fit, X, y[:-1])
# fit
classifier.fit(X, y)
# with lists
classifier.fit(X.tolist(), y.tolist())
assert_true(hasattr(classifier, "classes_"))
y_pred = classifier.predict(X)
assert_equal(y_pred.shape, (n_samples,))
# training set performance
if name not in ['BernoulliNB', 'MultinomialNB']:
assert_greater(accuracy_score(y, y_pred), 0.83)
# raises error on malformed input for predict
assert_raises(ValueError, classifier.predict, X.T)
if hasattr(classifier, "decision_function"):
try:
# decision_function agrees with predict
decision = classifier.decision_function(X)
if n_classes is 2:
assert_equal(decision.shape, (n_samples,))
dec_pred = (decision.ravel() > 0).astype(np.int)
assert_array_equal(dec_pred, y_pred)
if (n_classes is 3
and not isinstance(classifier, BaseLibSVM)):
# 1on1 of LibSVM works differently
assert_equal(decision.shape, (n_samples, n_classes))
assert_array_equal(np.argmax(decision, axis=1), y_pred)
# raises error on malformed input
assert_raises(ValueError,
classifier.decision_function, X.T)
# raises error on malformed input for decision_function
assert_raises(ValueError,
classifier.decision_function, X.T)
except NotImplementedError:
pass
if hasattr(classifier, "predict_proba"):
# predict_proba agrees with predict
y_prob = classifier.predict_proba(X)
assert_equal(y_prob.shape, (n_samples, n_classes))
assert_array_equal(np.argmax(y_prob, axis=1), y_pred)
# check that probas for all classes sum to one
assert_array_almost_equal(np.sum(y_prob, axis=1),
np.ones(n_samples))
# raises error on malformed input
assert_raises(ValueError, classifier.predict_proba, X.T)
# raises error on malformed input for predict_proba
assert_raises(ValueError, classifier.predict_proba, X.T)
def check_estimators_fit_returns_self(name, Estimator):
"""Check if self is returned when calling fit"""
X, y = make_blobs(random_state=0, n_samples=9, n_features=4)
y = multioutput_estimator_convert_y_2d(name, y)
# some want non-negative input
X -= X.min()
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator)
assert_true(estimator.fit(X, y) is estimator)
@ignore_warnings
def check_estimators_unfitted(name, Estimator):
"""Check that predict raises an exception in an unfitted estimator.
Unfitted estimators should raise either AttributeError or ValueError.
The specific exception type NotFittedError inherits from both and can
therefore be adequately raised for that purpose.
"""
# Common test for Regressors as well as Classifiers
X, y = _boston_subset()
with warnings.catch_warnings(record=True):
est = Estimator()
msg = "fit"
if hasattr(est, 'predict'):
assert_raise_message((AttributeError, ValueError), msg,
est.predict, X)
if hasattr(est, 'decision_function'):
assert_raise_message((AttributeError, ValueError), msg,
est.decision_function, X)
if hasattr(est, 'predict_proba'):
assert_raise_message((AttributeError, ValueError), msg,
est.predict_proba, X)
if hasattr(est, 'predict_log_proba'):
assert_raise_message((AttributeError, ValueError), msg,
est.predict_log_proba, X)
def check_supervised_y_2d(name, Estimator):
if "MultiTask" in name:
# These only work on 2d, so this test makes no sense
return
rnd = np.random.RandomState(0)
X = rnd.uniform(size=(10, 3))
y = np.arange(10) % 3
# catch deprecation warnings
with warnings.catch_warnings(record=True):
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator)
# fit
estimator.fit(X, y)
y_pred = estimator.predict(X)
set_random_state(estimator)
# Check that when a 2D y is given, a DataConversionWarning is
# raised
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DataConversionWarning)
warnings.simplefilter("ignore", RuntimeWarning)
estimator.fit(X, y[:, np.newaxis])
y_pred_2d = estimator.predict(X)
msg = "expected 1 DataConversionWarning, got: %s" % (
", ".join([str(w_x) for w_x in w]))
if name not in MULTI_OUTPUT:
# check that we warned if we don't support multi-output
assert_greater(len(w), 0, msg)
assert_true("DataConversionWarning('A column-vector y"
" was passed when a 1d array was expected" in msg)
assert_array_almost_equal(y_pred.ravel(), y_pred_2d.ravel())
def check_classifiers_classes(name, Classifier):
X, y = make_blobs(n_samples=30, random_state=0, cluster_std=0.1)
X, y = shuffle(X, y, random_state=7)
X = StandardScaler().fit_transform(X)
# We need to make sure that we have non negative data, for things
# like NMF
X -= X.min() - .1
y_names = np.array(["one", "two", "three"])[y]
for y_names in [y_names, y_names.astype('O')]:
if name in ["LabelPropagation", "LabelSpreading"]:
# TODO some complication with -1 label
y_ = y
else:
y_ = y_names
classes = np.unique(y_)
# catch deprecation warnings
with warnings.catch_warnings(record=True):
classifier = Classifier()
if name == 'BernoulliNB':
classifier.set_params(binarize=X.mean())
set_fast_parameters(classifier)
set_random_state(classifier)
# fit
classifier.fit(X, y_)
y_pred = classifier.predict(X)
# training set performance
assert_array_equal(np.unique(y_), np.unique(y_pred))
if np.any(classifier.classes_ != classes):
print("Unexpected classes_ attribute for %r: "
"expected %s, got %s" %
(classifier, classes, classifier.classes_))
def check_regressors_int(name, Regressor):
X, _ = _boston_subset()
X = X[:50]
rnd = np.random.RandomState(0)
y = rnd.randint(3, size=X.shape[0])
y = multioutput_estimator_convert_y_2d(name, y)
rnd = np.random.RandomState(0)
# catch deprecation warnings
with warnings.catch_warnings(record=True):
# separate estimators to control random seeds
regressor_1 = Regressor()
regressor_2 = Regressor()
set_fast_parameters(regressor_1)
set_fast_parameters(regressor_2)
set_random_state(regressor_1)
set_random_state(regressor_2)
if name in CROSS_DECOMPOSITION:
y_ = np.vstack([y, 2 * y + rnd.randint(2, size=len(y))])
y_ = y_.T
else:
y_ = y
# fit
regressor_1.fit(X, y_)
pred1 = regressor_1.predict(X)
regressor_2.fit(X, y_.astype(np.float))
pred2 = regressor_2.predict(X)
assert_array_almost_equal(pred1, pred2, 2, name)
def check_regressors_train(name, Regressor):
X, y = _boston_subset()
y = StandardScaler().fit_transform(y.reshape(-1, 1)) # X is already scaled
y = y.ravel()
y = multioutput_estimator_convert_y_2d(name, y)
rnd = np.random.RandomState(0)
# catch deprecation warnings
with warnings.catch_warnings(record=True):
regressor = Regressor()
set_fast_parameters(regressor)
if not hasattr(regressor, 'alphas') and hasattr(regressor, 'alpha'):
# linear regressors need to set alpha, but not generalized CV ones
regressor.alpha = 0.01
if name == 'PassiveAggressiveRegressor':
regressor.C = 0.01
# raises error on malformed input for fit
assert_raises(ValueError, regressor.fit, X, y[:-1])
# fit
if name in CROSS_DECOMPOSITION:
y_ = np.vstack([y, 2 * y + rnd.randint(2, size=len(y))])
y_ = y_.T
else:
y_ = y
set_random_state(regressor)
regressor.fit(X, y_)
regressor.fit(X.tolist(), y_.tolist())
y_pred = regressor.predict(X)
assert_equal(y_pred.shape, y_.shape)
# TODO: find out why PLS and CCA fail. RANSAC is random
# and furthermore assumes the presence of outliers, hence
# skipped
if name not in ('PLSCanonical', 'CCA', 'RANSACRegressor'):
print(regressor)
assert_greater(regressor.score(X, y_), 0.5)
@ignore_warnings
def check_regressors_no_decision_function(name, Regressor):
# checks whether regressors have decision_function or predict_proba
rng = np.random.RandomState(0)
X = rng.normal(size=(10, 4))
y = multioutput_estimator_convert_y_2d(name, X[:, 0])
regressor = Regressor()
set_fast_parameters(regressor)
if hasattr(regressor, "n_components"):
# FIXME CCA, PLS is not robust to rank 1 effects
regressor.n_components = 1
regressor.fit(X, y)
funcs = ["decision_function", "predict_proba", "predict_log_proba"]
for func_name in funcs:
func = getattr(regressor, func_name, None)
if func is None:
# doesn't have function
continue
# has function. Should raise deprecation warning
msg = func_name
assert_warns_message(DeprecationWarning, msg, func, X)
def check_class_weight_classifiers(name, Classifier):
if name == "NuSVC":
# the sparse version has a parameter that doesn't do anything
raise SkipTest
if name.endswith("NB"):
# NaiveBayes classifiers have a somewhat different interface.
# FIXME SOON!
raise SkipTest
for n_centers in [2, 3]:
# create a very noisy dataset
X, y = make_blobs(centers=n_centers, random_state=0, cluster_std=20)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5,
random_state=0)
n_centers = len(np.unique(y_train))
if n_centers == 2:
class_weight = {0: 1000, 1: 0.0001}
else:
class_weight = {0: 1000, 1: 0.0001, 2: 0.0001}
with warnings.catch_warnings(record=True):
classifier = Classifier(class_weight=class_weight)
if hasattr(classifier, "n_iter"):
classifier.set_params(n_iter=100)
if hasattr(classifier, "min_weight_fraction_leaf"):
classifier.set_params(min_weight_fraction_leaf=0.01)
set_random_state(classifier)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
assert_greater(np.mean(y_pred == 0), 0.89)
def check_class_weight_balanced_classifiers(name, Classifier, X_train, y_train,
X_test, y_test, weights):
with warnings.catch_warnings(record=True):
classifier = Classifier()
if hasattr(classifier, "n_iter"):
classifier.set_params(n_iter=100)
set_random_state(classifier)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifier.set_params(class_weight='balanced')
classifier.fit(X_train, y_train)
y_pred_balanced = classifier.predict(X_test)
assert_greater(f1_score(y_test, y_pred_balanced, average='weighted'),
f1_score(y_test, y_pred, average='weighted'))
def check_class_weight_balanced_linear_classifier(name, Classifier):
"""Test class weights with non-contiguous class labels."""
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = np.array([1, 1, 1, -1, -1])
with warnings.catch_warnings(record=True):
classifier = Classifier()
if hasattr(classifier, "n_iter"):
# This is a very small dataset, default n_iter are likely to prevent
# convergence
classifier.set_params(n_iter=1000)
set_random_state(classifier)
# Let the model compute the class frequencies
classifier.set_params(class_weight='balanced')
coef_balanced = classifier.fit(X, y).coef_.copy()
# Count each label occurrence to reweight manually
n_samples = len(y)
n_classes = float(len(np.unique(y)))
class_weight = {1: n_samples / (np.sum(y == 1) * n_classes),
-1: n_samples / (np.sum(y == -1) * n_classes)}
classifier.set_params(class_weight=class_weight)
coef_manual = classifier.fit(X, y).coef_.copy()
assert_array_almost_equal(coef_balanced, coef_manual)
def check_estimators_overwrite_params(name, Estimator):
X, y = make_blobs(random_state=0, n_samples=9)
y = multioutput_estimator_convert_y_2d(name, y)
# some want non-negative input
X -= X.min()
with warnings.catch_warnings(record=True):
# catch deprecation warnings
estimator = Estimator()
set_fast_parameters(estimator)
set_random_state(estimator)
# Make a physical copy of the orginal estimator parameters before fitting.
params = estimator.get_params()
original_params = deepcopy(params)
# Fit the model
estimator.fit(X, y)
# Compare the state of the model parameters with the original parameters
new_params = estimator.get_params()
for param_name, original_value in original_params.items():
new_value = new_params[param_name]
# We should never change or mutate the internal state of input
# parameters by default. To check this we use the joblib.hash function
# that introspects recursively any subobjects to compute a checksum.
# The only exception to this rule of immutable constructor parameters
# is possible RandomState instance but in this check we explicitly
# fixed the random_state params recursively to be integer seeds.
assert_equal(hash(new_value), hash(original_value),
"Estimator %s should not change or mutate "
" the parameter %s from %s to %s during fit."
% (name, param_name, original_value, new_value))
def check_sparsify_coefficients(name, Estimator):
X = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1],
[-1, -2], [2, 2], [-2, -2]])
y = [1, 1, 1, 2, 2, 2, 3, 3, 3]
est = Estimator()
est.fit(X, y)
pred_orig = est.predict(X)
# test sparsify with dense inputs
est.sparsify()
assert_true(sparse.issparse(est.coef_))
pred = est.predict(X)
assert_array_equal(pred, pred_orig)
# pickle and unpickle with sparse coef_
est = pickle.loads(pickle.dumps(est))
assert_true(sparse.issparse(est.coef_))
pred = est.predict(X)
assert_array_equal(pred, pred_orig)
def check_classifier_data_not_an_array(name, Estimator):
X = np.array([[3, 0], [0, 1], [0, 2], [1, 1], [1, 2], [2, 1]])
y = [1, 1, 1, 2, 2, 2]
y = multioutput_estimator_convert_y_2d(name, y)
check_estimators_data_not_an_array(name, Estimator, X, y)
def check_regressor_data_not_an_array(name, Estimator):
X, y = _boston_subset(n_samples=50)
y = multioutput_estimator_convert_y_2d(name, y)
check_estimators_data_not_an_array(name, Estimator, X, y)
def check_estimators_data_not_an_array(name, Estimator, X, y):
if name in CROSS_DECOMPOSITION:
raise SkipTest
# catch deprecation warnings
with warnings.catch_warnings(record=True):
# separate estimators to control random seeds
estimator_1 = Estimator()
estimator_2 = Estimator()
set_fast_parameters(estimator_1)
set_fast_parameters(estimator_2)
set_random_state(estimator_1)
set_random_state(estimator_2)
y_ = NotAnArray(np.asarray(y))
X_ = NotAnArray(np.asarray(X))
# fit
estimator_1.fit(X_, y_)
pred1 = estimator_1.predict(X_)
estimator_2.fit(X, y)
pred2 = estimator_2.predict(X)
assert_array_almost_equal(pred1, pred2, 2, name)
def check_parameters_default_constructible(name, Estimator):
classifier = LDA()
# test default-constructibility
# get rid of deprecation warnings
with warnings.catch_warnings(record=True):
if name in META_ESTIMATORS:
estimator = Estimator(classifier)
else:
estimator = Estimator()
# test cloning
clone(estimator)
# test __repr__
repr(estimator)
# test that set_params returns self
assert_true(estimator.set_params() is estimator)
# test if init does nothing but set parameters
# this is important for grid_search etc.
# We get the default parameters from init and then
# compare these against the actual values of the attributes.
# this comes from getattr. Gets rid of deprecation decorator.
init = getattr(estimator.__init__, 'deprecated_original',
estimator.__init__)
try:
args, varargs, kws, defaults = inspect.getargspec(init)
except TypeError:
# init is not a python function.
# true for mixins
return
params = estimator.get_params()
if name in META_ESTIMATORS:
# they need a non-default argument
args = args[2:]
else:
args = args[1:]
if args:
# non-empty list
assert_equal(len(args), len(defaults))
else:
return
for arg, default in zip(args, defaults):
assert_in(type(default), [str, int, float, bool, tuple, type(None),
np.float64, types.FunctionType, Memory])
if arg not in params.keys():
# deprecated parameter, not in get_params
assert_true(default is None)
continue
if isinstance(params[arg], np.ndarray):
assert_array_equal(params[arg], default)
else:
assert_equal(params[arg], default)
def multioutput_estimator_convert_y_2d(name, y):
# Estimators in mono_output_task_error raise ValueError if y is of 1-D
# Convert into a 2-D y for those estimators.
if name in (['MultiTaskElasticNetCV', 'MultiTaskLassoCV',
'MultiTaskLasso', 'MultiTaskElasticNet']):
return y[:, np.newaxis]
return y
def check_non_transformer_estimators_n_iter(name, estimator,
multi_output=False):
# Check if all iterative solvers, run for more than one iteratiom
iris = load_iris()
X, y_ = iris.data, iris.target
if multi_output:
y_ = y_[:, np.newaxis]
set_random_state(estimator, 0)
if name == 'AffinityPropagation':
estimator.fit(X)
else:
estimator.fit(X, y_)
assert_greater(estimator.n_iter_, 0)
def check_transformer_n_iter(name, estimator):
if name in CROSS_DECOMPOSITION:
# Check using default data
X = [[0., 0., 1.], [1., 0., 0.], [2., 2., 2.], [2., 5., 4.]]
y_ = [[0.1, -0.2], [0.9, 1.1], [0.1, -0.5], [0.3, -0.2]]
else:
X, y_ = make_blobs(n_samples=30, centers=[[0, 0, 0], [1, 1, 1]],
random_state=0, n_features=2, cluster_std=0.1)
X -= X.min() - 0.1
set_random_state(estimator, 0)
estimator.fit(X, y_)
# These return a n_iter per component.
if name in CROSS_DECOMPOSITION:
for iter_ in estimator.n_iter_:
assert_greater(iter_, 1)
else:
assert_greater(estimator.n_iter_, 1)
def check_get_params_invariance(name, estimator):
class T(BaseEstimator):
"""Mock classifier
"""
def __init__(self):
pass
def fit(self, X, y):
return self
if name in ('FeatureUnion', 'Pipeline'):
e = estimator([('clf', T())])
elif name in ('GridSearchCV' 'RandomizedSearchCV'):
return
else:
e = estimator()
shallow_params = e.get_params(deep=False)
deep_params = e.get_params(deep=True)
assert_true(all(item in deep_params.items() for item in
shallow_params.items()))
| bsd-3-clause | -7,728,675,816,565,337,000 | 34.92526 | 81 | 0.618855 | false |
Rav3nPL/doubloons-0.10 | qa/rpc-tests/mempool_coinbase_spends.py | 143 | 3850 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test re-org scenarios with a mempool that contains transactions
# that spend (directly or indirectly) coinbase transactions.
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
import os
import shutil
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(BitcoinTestFramework):
alert_filename = None # Set by setup_network
def setup_network(self):
args = ["-checkmempool", "-debug=mempool"]
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, args))
self.nodes.append(start_node(1, self.options.tmpdir, args))
connect_nodes(self.nodes[1], 0)
self.is_network_split = False
self.sync_all
def create_tx(self, from_txid, to_address, amount):
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
signresult = self.nodes[0].signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
def run_test(self):
start_count = self.nodes[0].getblockcount()
# Mine three blocks. After this, nodes[0] blocks
# 101, 102, and 103 are spend-able.
new_blocks = self.nodes[1].setgenerate(True, 4)
self.sync_all()
node0_address = self.nodes[0].getnewaddress()
node1_address = self.nodes[1].getnewaddress()
# Three scenarios for re-orging coinbase spends in the memory pool:
# 1. Direct coinbase spend : spend_101
# 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
b = [ self.nodes[0].getblockhash(n) for n in range(102, 105) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spend_101_raw = self.create_tx(coinbase_txids[0], node1_address, 50)
spend_102_raw = self.create_tx(coinbase_txids[1], node0_address, 50)
spend_103_raw = self.create_tx(coinbase_txids[2], node0_address, 50)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].setgenerate(True, 1)
# Create 102_1 and 103_1:
spend_102_1_raw = self.create_tx(spend_102_id, node1_address, 50)
spend_103_1_raw = self.create_tx(spend_103_id, node1_address, 50)
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
self.nodes[0].setgenerate(True, 1)
# ... now put spend_101 and spend_102_1 in memory pools:
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
self.sync_all()
assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id ]))
# Use invalidateblock to re-org back and make all those coinbase spends
# immature/invalid:
for node in self.nodes:
node.invalidateblock(new_blocks[0])
self.sync_all()
# mempool should be empty.
assert_equal(set(self.nodes[0].getrawmempool()), set())
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| mit | -2,729,291,827,385,604,000 | 39.957447 | 98 | 0.655844 | false |
kmggh/python-guess-number | guess_num_player.py | 1 | 1325 | #!/usr/bin/env python
# Fri 2013-05-03 23:49:55 -0400
# Copyright (c) 2013 by Ken Guyton. All Rights Reserved.
"""A game for guessing a number. Used as a Python class example.
A computer player plays the game.
"""
__author__ = 'Ken Guyton'
import argparse
import guess_num2
import player
MAX_DEFAULT = 100
MIN_DEFAULT = 0
MAP = {-1: 'low', 1: 'high'}
def parse_args():
"""Parse the command line args for main."""
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--max', help='Max guess amount', type=int,
default=100)
parser.add_argument('-b', '--binary', action='store_true',
help='Use the binary player')
parser.add_argument('-r', '--random', action='store_true',
help='Use the random player')
return parser.parse_args()
def main():
args = parse_args()
game = guess_num2.Game(max_val=args.max)
if args.binary:
the_player = player.BinaryPlayer(game)
elif args.random:
the_player = player.RandomPlayer(game)
else:
the_player = player.BinaryPlayer(game)
print 'Guessing from {0} to {1}: '.format(game.min, game.max)
for guess, direction in the_player.play():
print game.evaluate(guess, direction)
print 'You guessed {0} times.'.format(game.count)
if __name__ == '__main__':
main()
| artistic-2.0 | -4,024,513,252,216,439,000 | 22.660714 | 71 | 0.634717 | false |
rpip/mpower-python | setup.py | 2 | 2405 | from setuptools import setup, Command
from unittest import TextTestRunner, TestLoader
from glob import glob
from os.path import splitext, basename, join as pjoin
try:
from os.path import walk
except ImportError:
from os import walk
import os
class TestCommand(Command):
user_options = []
def initialize_options(self):
self._dir = os.getcwd()
def finalize_options(self):
pass
def run(self):
'''
Finds all the tests modules in tests/, and runs them.
'''
testfiles = []
for t in glob(pjoin(self._dir, 'tests', '*.py')):
if not t.endswith('__init__.py'):
testfiles.append('.'.join(
['tests', splitext(basename(t))[0]])
)
import sys
ROOT = os.path.dirname(os.getcwd())
MPOWER_LIBS = os.path.join(ROOT, "mpower")
sys.path.append(MPOWER_LIBS)
tests = TestLoader().loadTestsFromNames(testfiles)
t = TextTestRunner(verbosity=1)
t.run(tests)
class CleanCommand(Command):
"""Recursively Delete all compile python modules"""
user_options = []
def initialize_options(self):
self._clean_me = []
for root, dirs, files in os.walk('.'):
for f in files:
if f.endswith('.pyc'):
self._clean_me.append(pjoin(root, f))
def finalize_options(self):
pass
def run(self):
for clean_me in self._clean_me:
try:
os.unlink(clean_me)
except:
pass
def readme(filename='README.rst'):
with open('README.rst') as f:
text = f.read()
f.close()
return text
setup(
name='mpower',
version=__import__('mpower').__version__,
author='Mawuli Adzaku',
author_email='[email protected]',
packages=['mpower'],
cmdclass={'test': TestCommand, 'clean': CleanCommand},
scripts=[],
url='https://github.com/mawuli/mpower-python',
license='LICENSE.txt',
keywords="mpower mobile money payments",
description='MPower Payments Python client library',
long_description=readme('README.rst'),
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
install_requires=['requests >=2.0'],
)
| mit | 6,796,190,529,577,032,000 | 26.329545 | 61 | 0.577963 | false |
diofant/diofant | diofant/core/power.py | 1 | 47841 | import math
from mpmath.libmp import sqrtrem as mpmath_sqrtrem
from ..logic import true
from ..utilities import sift
from .add import Add
from .cache import cacheit
from .compatibility import as_int
from .evalf import PrecisionExhausted
from .evaluate import global_evaluate
from .expr import Expr
from .function import (_coeff_isneg, expand_complex, expand_mul,
expand_multinomial)
from .logic import fuzzy_or
from .mul import Mul, _keep_coeff
from .numbers import E, I, Integer, Rational, nan, oo, pi, zoo
from .singleton import S
from .symbol import Dummy, symbols
from .sympify import sympify
def integer_nthroot(y, n):
"""
Return a tuple containing x = floor(y**(1/n))
and a boolean indicating whether the result is exact (that is,
whether x**n == y).
>>> integer_nthroot(16, 2)
(4, True)
>>> integer_nthroot(26, 2)
(5, False)
"""
y, n = int(y), int(n)
if y < 0:
raise ValueError('y must be nonnegative')
if n < 1:
raise ValueError('n must be positive')
if y in (0, 1):
return y, True
if n == 1:
return y, True
if n == 2:
x, rem = mpmath_sqrtrem(y)
return int(x), not rem
if n > y:
return 1, False
# Get initial estimate for Newton's method. Care must be taken to
# avoid overflow
try:
guess = int(y**(1./n) + 0.5)
except OverflowError:
exp = math.log(y, 2)/n
if exp > 53:
shift = int(exp - 53)
guess = int(2.0**(exp - shift) + 1) << shift
else:
guess = int(2.0**exp)
if guess > 2**50:
# Newton iteration
xprev, x = -1, guess
while 1:
t = x**(n - 1)
xprev, x = x, ((n - 1)*x + y//t)//n
if abs(x - xprev) < 2:
break
else:
x = guess
# Compensate
t = x**n
while t < y:
x += 1
t = x**n
while t > y:
x -= 1
t = x**n
return x, t == y
class Pow(Expr):
"""
Defines the expression x**y as "x raised to a power y".
For complex numbers `x` and `y`, ``Pow`` gives the principal
value of `exp(y*log(x))`.
Singleton definitions involving (0, 1, -1, oo, -oo, I, -I):
+--------------+---------+-----------------------------------------------+
| expr | value | reason |
+==============+=========+===============================================+
| z**0 | 1 | Although arguments over 0**0 exist, see [2]. |
+--------------+---------+-----------------------------------------------+
| z**1 | z | |
+--------------+---------+-----------------------------------------------+
| (-oo)**(-1) | 0 | |
+--------------+---------+-----------------------------------------------+
| (-1)**-1 | -1 | |
+--------------+---------+-----------------------------------------------+
| 0**-1 | zoo | This is not strictly true, as 0**-1 may be |
| | | undefined, but is convenient in some contexts |
| | | where the base is assumed to be positive. |
+--------------+---------+-----------------------------------------------+
| 1**-1 | 1 | |
+--------------+---------+-----------------------------------------------+
| oo**-1 | 0 | |
+--------------+---------+-----------------------------------------------+
| 0**oo | 0 | Because for all complex numbers z near |
| | | 0, z**oo -> 0. |
+--------------+---------+-----------------------------------------------+
| 0**-oo | zoo | This is not strictly true, as 0**oo may be |
| | | oscillating between positive and negative |
| | | values or rotating in the complex plane. |
| | | It is convenient, however, when the base |
| | | is positive. |
+--------------+---------+-----------------------------------------------+
| 1**oo | nan | Because there are various cases where |
| 1**-oo | | lim(x(t),t)=1, lim(y(t),t)=oo (or -oo), |
| | | but lim(x(t)**y(t), t) != 1. See [3]. |
+--------------+---------+-----------------------------------------------+
| z**zoo | nan | No limit for z**t for t -> zoo. |
+--------------+---------+-----------------------------------------------+
| (-1)**oo | nan | Because of oscillations in the limit. |
| (-1)**(-oo) | | |
+--------------+---------+-----------------------------------------------+
| oo**oo | oo | |
+--------------+---------+-----------------------------------------------+
| oo**-oo | 0 | |
+--------------+---------+-----------------------------------------------+
| (-oo)**oo | nan | |
| (-oo)**-oo | | |
+--------------+---------+-----------------------------------------------+
| oo**I | nan | oo**e could probably be best thought of as |
| (-oo)**I | | the limit of x**e for real x as x tends to |
| | | oo. If e is I, then the limit does not exist |
| | | and nan is used to indicate that. |
+--------------+---------+-----------------------------------------------+
| oo**(1+I) | zoo | If the real part of e is positive, then the |
| (-oo)**(1+I) | | limit of abs(x**e) is oo. So the limit value |
| | | is zoo. |
+--------------+---------+-----------------------------------------------+
| oo**(-1+I) | 0 | If the real part of e is negative, then the |
| -oo**(-1+I) | | limit is 0. |
+--------------+---------+-----------------------------------------------+
Because symbolic computations are more flexible that floating point
calculations and we prefer to never return an incorrect answer,
we choose not to conform to all IEEE 754 conventions. This helps
us avoid extra test-case code in the calculation of limits.
See Also
========
diofant.core.numbers.Infinity
diofant.core.numbers.NaN
References
==========
* https://en.wikipedia.org/wiki/Exponentiation
* https://en.wikipedia.org/wiki/Zero_to_the_power_of_zero
* https://en.wikipedia.org/wiki/Indeterminate_forms
"""
is_Pow = True
@cacheit
def __new__(cls, b, e, evaluate=None):
if evaluate is None:
evaluate = global_evaluate[0]
from ..functions.elementary.exponential import exp_polar
b = sympify(b, strict=True)
e = sympify(e, strict=True)
if evaluate:
if nan in (b, e):
return nan
elif e is S.Zero:
return Integer(1)
elif e is S.One:
return b
elif e is zoo:
return nan
elif e.is_integer and _coeff_isneg(b):
if e.is_even:
b = -b
elif e.is_odd:
return -Pow(-b, e)
if b is S.One:
if abs(e).is_infinite:
return nan
return Integer(1)
else:
# recognize base as E
if not e.is_Atom and b is not E and not isinstance(b, exp_polar):
from ..functions import im, log, sign
from ..simplify import denom, numer
from .exprtools import factor_terms
c, ex = factor_terms(e, sign=False).as_coeff_Mul()
den = denom(ex)
if isinstance(den, log) and den.args[0] == b:
return E**(c*numer(ex))
elif den.is_Add:
s = sign(im(b))
if s.is_Number and s and den == \
log(-factor_terms(b, sign=False)) + s*I*pi:
return E**(c*numer(ex))
obj = b._eval_power(e)
if obj is not None:
return obj
return Expr.__new__(cls, b, e)
def _eval_is_commutative(self):
return self.base.is_commutative and self.exp.is_commutative
@property
def base(self):
"""Returns base of the power expression."""
return self.args[0]
@property
def exp(self):
"""Returns exponent of the power expression."""
return self.args[1]
@classmethod
def class_key(cls):
"""Nice order of classes."""
return 4, 2, cls.__name__
def _eval_power(self, other):
from ..functions import Abs, arg, exp, floor, im, log, re, sign
b, e = self.as_base_exp()
if b is nan:
return (b**e)**other # let __new__ handle it
s = None
if other.is_integer:
s = 1
elif b.is_polar: # e.g. exp_polar, besselj, var('p', polar=True)...
s = 1
elif e.is_extended_real is not None:
# helper functions ===========================
def _half(e):
"""Return True if the exponent has a literal 2 as the
denominator, else None.
"""
if getattr(e, 'denominator', None) == 2:
return True
n, d = e.as_numer_denom()
if n.is_integer and d == 2:
return True
def _n2(e):
"""Return ``e`` evaluated to a Number with 2 significant
digits, else None.
"""
try:
rv = e.evalf(2)
if rv.is_Number:
return rv
except PrecisionExhausted: # pragma: no cover
pass
# ===================================================
if e.is_extended_real:
# we need _half(other) with constant floor or
# floor(Rational(1, 2) - e*arg(b)/2/pi) == 0
# handle -1 as special case
if (e == -1):
# floor arg. is 1/2 + arg(b)/2/pi
if _half(other):
if b.is_negative is True:
return (-1)**other*Pow(-b, e*other)
if b.is_extended_real is False:
return Pow(b.conjugate()/Abs(b)**2, other)
elif e.is_even:
if b.is_extended_real:
b = abs(b)
if b.is_imaginary:
b = abs(im(b))*I
if (abs(e) < 1) == true or (e == 1):
s = 1 # floor = 0
elif b.is_nonnegative:
s = 1 # floor = 0
elif re(b).is_nonnegative and (abs(e) < 2) == true:
s = 1 # floor = 0
elif im(b).is_nonzero and (abs(e) == 2):
s = 1 # floor = 0
elif b.is_imaginary and (abs(e) == 2):
s = 1 # floor = 0
elif _half(other):
s = exp(2*pi*I*other*floor(
Rational(1, 2) - e*arg(b)/(2*pi)))
if s.is_extended_real and _n2(sign(s) - s) == 0:
s = sign(s)
else:
s = None
else:
# e.is_extended_real is False requires:
# _half(other) with constant floor or
# floor(Rational(1, 2) - im(e*log(b))/2/pi) == 0
s = exp(2*I*pi*other*floor(Rational(1, 2) - im(e*log(b))/2/pi))
# be careful to test that s is -1 or 1 b/c sign(I) == I:
# so check that s is real
if s.is_extended_real and _n2(sign(s) - s) == 0:
s = sign(s)
else:
s = None
if s is not None:
return s*Pow(b, e*other)
def _eval_is_positive(self):
b, e = self.base, self.exp
if b.is_nonnegative and b == e:
return True
elif b.is_positive and (e.is_real or e.is_positive):
return True
elif b.is_negative and e.is_integer and (b.is_finite or e.is_nonnegative):
return e.is_even
elif b.is_nonpositive and e.is_odd and (b.is_finite or e.is_nonnegative):
return False
elif b in {I, -I} and e.is_imaginary:
return True
def _eval_is_nonnegative(self):
b, e = self.base, self.exp
if b.is_imaginary and e.is_nonnegative:
m = e % 4
if m.is_integer:
return m.is_zero
def _eval_is_negative(self):
b, e = self.base, self.exp
if b.is_negative:
if e.is_odd and (b.is_finite or e.is_positive):
return True
if e.is_even:
return False
elif b.is_positive:
if e.is_extended_real:
return False
elif b.is_nonnegative:
if e.is_nonnegative:
return False
elif b.is_nonpositive:
if e.is_even:
return False
elif b.is_extended_real:
if e.is_even:
return False
def _eval_is_zero(self):
b, e = self.base, self.exp
if b.is_zero:
if e.is_positive:
return True
elif e.is_nonpositive:
return False
elif b.is_nonzero:
if e.is_finite:
return False
elif e.is_infinite:
if (1 - abs(b)).is_positive:
return e.is_positive
elif (1 - abs(b)).is_negative:
return e.is_negative
def _eval_is_integer(self):
b, e = self.base, self.exp
if b.is_rational:
if b.is_integer is False and e.is_positive:
return False # rat**nonneg
if b.is_integer and e.is_integer:
if b is S.NegativeOne:
return True
if e.is_nonnegative or e.is_positive:
return True
if b.is_integer and e.is_negative and (e.is_finite or e.is_integer):
if (b - 1).is_nonzero and (b + 1).is_nonzero:
return False
if b.is_Number and e.is_Number:
check = self.func(*self.args)
if check.is_Integer:
return True
def _eval_is_extended_real(self):
from ..functions import arg, log
from .mul import Mul
b, e = self.base, self.exp
if b is E:
if e.is_extended_real:
return True
elif e.is_imaginary:
return (2*I*e/pi).is_even
if b.is_extended_real is None:
if b.func == self.func and b.base is E and b.exp.is_imaginary:
return e.is_imaginary
if e.is_extended_real is None:
return
if b.is_extended_real and e.is_extended_real:
if b.is_positive:
return True
elif b.is_nonnegative:
if e.is_nonnegative:
return True
else:
if e.is_integer:
if b.is_nonzero or e.is_nonnegative:
return True
elif b.is_negative:
if e.is_rational and e.is_noninteger:
return False
if b.is_nonzero and e.is_negative:
return (b**-e).is_extended_real
if b.is_imaginary:
if e.is_integer:
if e.is_even:
if b.is_nonzero or e.is_nonnegative:
return True
elif e.is_odd:
return False
elif e.is_imaginary and log(b).is_imaginary:
return True
elif e.is_Add:
c, a = e.as_coeff_Add()
if c and c.is_Integer:
return Mul(b**c, b**a, evaluate=False).is_extended_real
elif b in (-I, I) and (e/2).is_noninteger:
return False
return
if b.is_extended_real and e.is_imaginary:
if b is S.NegativeOne:
return True
c = e.coeff(I)
if c in (1, -1):
if b == 2:
return False
if b.is_extended_real is False: # we already know it's not imag
i = arg(b)*e/pi
return i.is_integer
def _eval_is_complex(self):
from ..functions import log
b, e = self.base, self.exp
if b.is_complex:
exp = log(b)*e
return fuzzy_or([exp.is_complex, exp.is_negative])
def _eval_is_imaginary(self):
from ..functions import arg, log
b, e = self.base, self.exp
if b.is_imaginary:
if e.is_integer:
return e.is_odd
if e.is_imaginary and e.is_nonzero:
if log(b).is_imaginary:
return False
if b.is_real and e.is_real:
if b.is_positive:
return False
else:
if e.is_integer:
return False
else:
if (2*e).is_integer:
return b.is_negative
if b.is_real is False: # we already know it's not imag
return (2*arg(b)*e/pi).is_odd
def _eval_is_odd(self):
b, e = self.base, self.exp
if e.is_integer:
if e.is_positive:
return b.is_odd
elif e.is_nonnegative and b.is_odd:
return True
elif b is S.NegativeOne:
return True
def _eval_is_finite(self):
b, e = self.base, self.exp
if e.is_negative:
if b.is_zero:
return False
if b.is_finite and e.is_finite:
if e.is_nonnegative or b.is_nonzero:
return True
def _eval_is_polar(self):
b, e = self.base, self.exp
if b.is_polar and e.is_commutative:
return True
def _eval_subs(self, old, new):
from ..functions import log
from .symbol import Symbol
def _check(ct1, ct2, old):
"""Return bool, pow where, if bool is True, then the exponent of
Pow `old` will combine with `pow` so the substitution is valid,
otherwise bool will be False,
cti are the coefficient and terms of an exponent of self or old
In this _eval_subs routine a change like (b**(2*x)).subs({b**x: y})
will give y**2 since (b**x)**2 == b**(2*x); if that equality does
not hold then the substitution should not occur so `bool` will be
False.
"""
coeff1, terms1 = ct1
coeff2, terms2 = ct2
if terms1 == terms2:
pow = coeff1/coeff2
try:
pow = as_int(pow)
combines = True
except ValueError:
combines = Pow._eval_power(
Pow(*old.as_base_exp(), evaluate=False),
pow)
if isinstance(combines, Pow):
combines = combines.base is old.base
else:
combines = False
return combines, pow
return False, None
if old == self.base:
return new**self.exp._subs(old, new)
if old.func is self.func and self.exp == old.exp:
l = log(self.base, old.base)
if l.is_Number:
return Pow(new, l)
if isinstance(old, self.func) and self.base == old.base:
if self.exp.is_Add is False:
ct2 = old.exp.as_independent(Symbol, as_Add=False)
ct1 = (self.exp/ct2[1], ct2[1])
ok, pow = _check(ct1, ct2, old)
if ok:
# issue sympy/sympy#5180: (x**(6*y)).subs({x**(3*y):z})->z**2
return self.func(new, pow)
else: # b**(6*x+a).subs({b**(3*x): y}) -> y**2 * b**a
# exp(exp(x) + exp(x**2)).subs({exp(exp(x)): w}) -> w * exp(exp(x**2))
oarg = old.exp
new_l = []
o_al = []
ct2 = oarg.as_coeff_mul()
for a in self.exp.args:
newa = a._subs(old, new)
ct1 = newa.as_coeff_mul()
ok, pow = _check(ct1, ct2, old)
if ok:
new_l.append(new**pow)
continue
o_al.append(newa)
if new_l:
new_l.append(Pow(self.base, Add(*o_al), evaluate=False))
return Mul(*new_l)
if old.is_Pow and old.base is E and self.exp.is_extended_real and self.base.is_positive:
ct1 = old.exp.as_independent(Symbol, as_Add=False)
ct2 = (self.exp*log(self.base)).as_independent(
Symbol, as_Add=False)
ok, pow = _check(ct1, ct2, old)
if ok:
return self.func(new, pow) # (2**x).subs({exp(x*log(2)): z}) -> z
def as_base_exp(self):
"""Return base and exp of self.
If base is 1/Integer, then return Integer, -exp. If this extra
processing is not needed, the base and exp properties will
give the raw arguments
Examples
========
>>> p = Pow(Rational(1, 2), 2, evaluate=False)
>>> p.as_base_exp()
(2, -2)
>>> p.args
(1/2, 2)
"""
b, e = self.base, self.exp
if b.is_Rational and b.numerator == 1 and b.denominator != 1:
return Integer(b.denominator), -e
return b, e
def _eval_adjoint(self):
from ..functions.elementary.complexes import adjoint
i, p = self.exp.is_integer, self.base.is_positive
if i:
return adjoint(self.base)**self.exp
if p:
return self.base**adjoint(self.exp)
def _eval_conjugate(self):
if self.is_extended_real:
return self
from ..functions.elementary.complexes import conjugate as c
i, p = self.exp.is_integer, self.base.is_positive
if i:
return c(self.base)**self.exp
if p:
return self.base**c(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
assert expanded != self
return c(expanded)
def _eval_transpose(self):
from ..functions.elementary.complexes import transpose
i, p = self.exp.is_integer, self.base.is_complex
if p:
return self.base**self.exp
if i:
return transpose(self.base)**self.exp
def _eval_expand_power_exp(self, **hints):
"""a**(n+m) -> a**n*a**m."""
b = self.base
e = self.exp
if e.is_Add and e.is_commutative:
expr = []
for x in e.args:
expr.append(self.func(self.base, x))
return Mul(*expr)
return self.func(b, e)
def _eval_expand_power_base(self, **hints):
"""(a*b)**n -> a**n * b**n."""
force = hints.get('force', False)
b = self.base
e = self.exp
if not b.is_Mul:
return self
cargs, nc = b.args_cnc(split_1=False)
# expand each term - this is top-level-only
# expansion but we have to watch out for things
# that don't have an _eval_expand method
if nc:
nc = [i._eval_expand_power_base(**hints)
if hasattr(i, '_eval_expand_power_base') else i
for i in nc]
if e.is_Integer:
if e.is_positive:
rv = Mul(*nc*e)
else:
rv = 1/Mul(*nc*-e)
if cargs:
rv *= Mul(*cargs)**e
return rv
if not cargs:
return self.func(Mul(*nc), e, evaluate=False)
nc = [Mul(*nc)]
# sift the commutative bases
def pred(x):
if x is I:
return I
polar = x.is_polar
if polar:
return True
if polar is None:
return fuzzy_or([x.is_nonnegative, (1/x).is_nonnegative])
sifted = sift(cargs, pred)
nonneg = sifted[True]
other = sifted[None]
neg = sifted[False]
imag = sifted[I]
if imag:
i = len(imag) % 4
if i == 0:
pass
elif i == 1:
other.append(I)
elif i == 2:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(Integer(-1))
else:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(Integer(-1))
other.append(I)
del imag
# bring out the bases that can be separated from the base
if force or e.is_integer:
# treat all commutatives the same and put nc in other
cargs = nonneg + neg + other
other = nc
else:
# this is just like what is happening automatically, except
# that now we are doing it for an arbitrary exponent for which
# no automatic expansion is done
assert not e.is_Integer
# handle negatives by making them all positive and putting
# the residual -1 in other
if len(neg) > 1:
o = Integer(1)
if not other and neg[0].is_Number:
o *= neg.pop(0)
if len(neg) % 2:
o = -o
for n in neg:
nonneg.append(-n)
if o is not S.One:
other.append(o)
elif neg and other:
if neg[0].is_Number and neg[0] is not S.NegativeOne:
other.append(Integer(-1))
nonneg.append(-neg[0])
else:
other.extend(neg)
else:
other.extend(neg)
del neg
cargs = nonneg
other += nc
rv = Integer(1)
if cargs:
rv *= Mul(*[self.func(b, e, evaluate=False) for b in cargs])
if other:
rv *= self.func(Mul(*other), e, evaluate=False)
return rv
def _eval_expand_multinomial(self, **hints):
"""(a+b+..) ** n -> a**n + n*a**(n-1)*b + .., n is nonzero integer."""
base, exp = self.base, self.exp
result = self
if exp.is_Rational and exp.numerator > 0 and base.is_Add:
if not exp.is_Integer:
n = Integer(exp.numerator // exp.denominator)
if not n:
return result
else:
radical, result = self.func(base, exp - n), []
expanded_base_n = self.func(base, n)
if expanded_base_n.is_Pow:
expanded_base_n = \
expanded_base_n._eval_expand_multinomial()
for term in Add.make_args(expanded_base_n):
result.append(term*radical)
return Add(*result)
n = int(exp)
if base.is_commutative:
order_terms, other_terms = [], []
for b in base.args:
if b.is_Order:
order_terms.append(b)
else:
other_terms.append(b)
if order_terms:
# (f(x) + O(x^n))^m -> f(x)^m + m*f(x)^{m-1} *O(x^n)
f = Add(*other_terms)
o = Add(*order_terms)
if n == 2:
return expand_multinomial(f**n, deep=False) + n*f*o
else:
g = expand_multinomial(f**(n - 1), deep=False)
return expand_mul(f*g, deep=False) + n*g*o
if base.is_number:
# Efficiently expand expressions of the form (a + b*I)**n
# where 'a' and 'b' are real numbers and 'n' is integer.
a, b = base.as_real_imag()
if a.is_Rational and b.is_Rational:
if not a.is_Integer:
if not b.is_Integer:
k = self.func(a.denominator * b.denominator, n)
a, b = a.numerator*b.denominator, a.denominator*b.numerator
else:
k = self.func(a.denominator, n)
a, b = a.numerator, a.denominator*b
elif not b.is_Integer:
k = self.func(b.denominator, n)
a, b = a*b.denominator, b.numerator
else:
k = 1
a, b, c, d = int(a), int(b), 1, 0
while n:
if n & 1:
c, d = a*c - b*d, b*c + a*d
n -= 1
a, b = a*a - b*b, 2*a*b
n //= 2
if k == 1:
return c + I*d
else:
return Integer(c)/k + I*d/k
p = other_terms
# (x+y)**3 -> x**3 + 3*x**2*y + 3*x*y**2 + y**3
# in this particular example:
# p = [x,y]; n = 3
# so now it's easy to get the correct result -- we get the
# coefficients first:
from ..ntheory import multinomial_coefficients
from ..polys import Poly
expansion_dict = multinomial_coefficients(len(p), n)
# in our example: {(3, 0): 1, (1, 2): 3, (0, 3): 1, (2, 1): 3}
# and now construct the expression.
return Poly(expansion_dict, *p).as_expr()
else:
if n == 2:
return Add(*[f*g for f in base.args for g in base.args])
else:
multi = (base**(n - 1))._eval_expand_multinomial()
assert multi.is_Add
return Add(*[f*g for f in base.args for g in multi.args])
elif (exp.is_Rational and exp.numerator < 0 and base.is_Add and
abs(exp.numerator) > exp.denominator):
return 1 / self.func(base, -exp)._eval_expand_multinomial()
elif exp.is_Add and base.is_Number:
# a + b a b
# n --> n n , where n, a, b are Numbers
coeff, tail = Integer(1), Integer(0)
for term in exp.args:
if term.is_Number:
coeff *= self.func(base, term)
else:
tail += term
return coeff * self.func(base, tail)
else:
return result
def as_real_imag(self, deep=True, **hints):
"""Returns real and imaginary parts of self
See Also
========
diofant.core.expr.Expr.as_real_imag
"""
from ..functions import arg, cos, sin
if self.exp.is_Integer:
exp = self.exp
re, im = self.base.as_real_imag(deep=deep)
if not im:
return self, Integer(0)
a, b = symbols('a b', cls=Dummy)
if exp >= 0:
if re.is_Number and im.is_Number:
# We can be more efficient in this case
expr = expand_multinomial(self.base**exp)
return expr.as_real_imag()
expr = ((a + b)**exp).as_poly() # a = re, b = im; expr = (a + b*I)**exp
else:
mag = re**2 + im**2
re, im = re/mag, -im/mag
if re.is_Number and im.is_Number:
# We can be more efficient in this case
expr = expand_multinomial((re + im*I)**-exp)
return expr.as_real_imag()
expr = ((a + b)**-exp).as_poly()
# Terms with even b powers will be real
r = [i for i in expr.terms() if not i[0][1] % 2]
re_part = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
# Terms with odd b powers will be imaginary
r = [i for i in expr.terms() if i[0][1] % 4 == 1]
im_part1 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
r = [i for i in expr.terms() if i[0][1] % 4 == 3]
im_part3 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
return (re_part.subs({a: re, b: I*im}),
im_part1.subs({a: re, b: im}) + im_part3.subs({a: re, b: -im}))
elif self.exp.is_Rational:
re, im = self.base.as_real_imag(deep=deep)
if im.is_zero and self.exp is S.Half:
if re.is_nonnegative:
return self, Integer(0)
if re.is_nonpositive:
return Integer(0), (-self.base)**self.exp
# XXX: This is not totally correct since for x**(p/q) with
# x being imaginary there are actually q roots, but
# only a single one is returned from here.
r = self.func(self.func(re, 2) + self.func(im, 2), Rational(1, 2))
t = arg(re + I*im)
rp, tp = self.func(r, self.exp), t*self.exp
return rp*cos(tp), rp*sin(tp)
elif self.base is E:
from ..functions import exp
re, im = self.exp.as_real_imag()
if deep:
re = re.expand(deep, **hints)
im = im.expand(deep, **hints)
c, s = cos(im), sin(im)
return exp(re)*c, exp(re)*s
else:
from ..functions import im, re
if deep:
hints['complex'] = False
expanded = self.expand(deep, **hints)
if hints.get('ignore') != expanded:
return re(expanded), im(expanded)
else:
return re(self), im(self)
def _eval_derivative(self, s):
from ..functions import log
dbase = self.base.diff(s)
dexp = self.exp.diff(s)
return self * (dexp * log(self.base) + dbase * self.exp/self.base)
def _eval_evalf(self, prec):
base, exp = self.as_base_exp()
base = base._evalf(prec)
if not exp.is_Integer:
exp = exp._evalf(prec)
return self.func(base, exp)
def _eval_is_polynomial(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return bool(self.base._eval_is_polynomial(syms) and
self.exp.is_Integer and (self.exp >= 0))
else:
return True
def _eval_is_rational(self):
p = self.func(*self.as_base_exp()) # in case it's unevaluated
if not p.is_Pow:
return p.is_rational
b, e = p.base, p.exp
if e.is_Rational and b.is_Rational:
# we didn't check that e is not an Integer
# because Rational**Integer autosimplifies
return False
if e.is_integer:
if b.is_rational:
if b.is_nonzero or e.is_nonnegative:
return True
if b == e: # always rational, even for 0**0
return True
elif b.is_irrational:
if e.is_zero:
return True
if b is E:
if e.is_rational and e.is_nonzero:
return False
def _eval_is_algebraic(self):
b, e = self.base, self.exp
if b.is_zero or (b - 1).is_zero:
return True
elif b is E:
s = self.doit()
if s.func == self.func:
if e.is_nonzero:
if e.is_algebraic:
return False
elif (e/pi).is_rational:
return False
elif (e/(I*pi)).is_rational:
return True
else:
return s.is_algebraic
elif e.is_rational and e.is_nonzero:
if b.is_nonzero or e.is_nonnegative:
return b.is_algebraic
elif b.is_algebraic and e.is_algebraic:
if (b.is_nonzero and (b - 1).is_nonzero) or b.is_irrational:
return e.is_rational
def _eval_is_rational_function(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_rational_function(syms) and \
self.exp.is_Integer
else:
return True
def _eval_is_algebraic_expr(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_algebraic_expr(syms) and \
self.exp.is_Rational
else:
return True
def _eval_as_numer_denom(self):
"""Expression -> a/b -> a, b.
See Also
========
diofant.core.expr.Expr.as_numer_denom
"""
if not self.is_commutative:
return self, Integer(1)
base, exp = self.as_base_exp()
if base is S.One:
return self, Integer(1)
n, d = base.as_numer_denom()
neg_exp = exp.is_negative
if not neg_exp and not (-exp).is_negative:
neg_exp = _coeff_isneg(exp)
int_exp = exp.is_integer
# the denominator cannot be separated from the numerator if
# its sign is unknown unless the exponent is an integer, e.g.
# sqrt(a/b) != sqrt(a)/sqrt(b) when a=1 and b=-1. But if the
# denominator is negative the numerator and denominator can
# be negated and the denominator (now positive) separated.
if not (d.is_extended_real or int_exp):
n = base
d = Integer(1)
dnonpos = d.is_nonpositive
if dnonpos:
n, d = -n, -d
elif dnonpos is None and not int_exp:
n = base
d = Integer(1)
if neg_exp:
n, d = d, n
exp = -exp
if d is S.One:
return self.func(n, exp), Integer(1)
if n is S.One:
return Integer(1), self.func(d, exp)
return self.func(n, exp), self.func(d, exp)
def _matches(self, expr, repl_dict={}):
"""Helper method for match().
See Also
========
diofant.core.basic.Basic.matches
"""
expr = sympify(expr, strict=True)
# special case, pattern = 1 and expr.exp can match to 0
if expr is S.One:
d = repl_dict.copy()
d = self.exp._matches(Integer(0), d)
if d is not None:
return d
# make sure the expression to be matched is an Expr
if not isinstance(expr, Expr):
return
b, e = expr.as_base_exp()
# special case number
sb, se = self.as_base_exp()
if sb.is_Symbol and se.is_Integer and expr:
if e.is_rational:
return sb._matches(b**(e/se), repl_dict)
return sb._matches(expr**(1/se), repl_dict)
d = repl_dict.copy()
d = self.base._matches(b, d)
if d is None:
return
d = self.exp.xreplace(d)._matches(e, d)
if d is None:
return Expr._matches(self, expr, repl_dict)
return d
def _eval_nseries(self, x, n, logx):
from ..functions import arg, exp, floor, log
from ..series import Order, limit
from ..simplify import powsimp
if self.base is E:
e_series = self.exp.nseries(x, n=n, logx=logx)
if e_series.is_Order:
return 1 + e_series
e0 = limit(e_series.removeO(), x, 0)
if e0 in (-oo, oo):
return self
t = e_series - e0
exp_series = term = exp(e0)
# series of exp(e0 + t) in t
for i in range(1, n):
term *= t/i
term = term.nseries(x, n=n, logx=logx)
exp_series += term
exp_series += Order(t**n, x)
return powsimp(exp_series, deep=True, combine='exp')
elif self.exp.has(x):
return exp(self.exp*log(self.base)).nseries(x, n=n, logx=logx)
else:
b_series = self.base.nseries(x, n=n, logx=logx)
while b_series.is_Order:
n += 1
b_series = self.base.nseries(x, n=n, logx=logx)
b0 = b_series.as_leading_term(x)
t = expand_mul((b_series/b0 - 1).cancel())
if t.is_Add:
t = t.func(*[i for i in t.args if i.limit(x, 0).is_finite])
c, e = b0.as_coeff_exponent(x)
if self.exp is oo:
if e != 0:
sig = -e
else:
sig = abs(c) - 1 if c != 1 else t.removeO()
if sig.is_positive:
return oo
elif sig.is_negative:
return Integer(0)
else:
raise NotImplementedError
pow_series = term = Integer(1)
# series of (1 + t)**e in t
for i in range(1, n):
term *= (self.exp - i + 1)*t/i
term = term.nseries(x, n=n, logx=logx)
pow_series += term
factor = b0**self.exp
if t != 0 and not (self.exp.is_Integer and self.exp >= 0 and n > self.exp):
pow_series += Order(t**n, x)
# branch handling
if c.is_negative:
l = floor(arg(t.removeO()*c)/(2*pi)).limit(x, 0)
assert l.is_finite
factor *= exp(2*pi*I*self.exp*l)
pow_series = expand_mul(factor*pow_series)
return powsimp(pow_series, deep=True, combine='exp')
def _eval_as_leading_term(self, x):
from ..functions import exp, log
from ..series import Order
if not self.exp.has(x):
return self.func(self.base.as_leading_term(x), self.exp)
elif self.base is E:
if self.exp.is_Mul:
k, arg = self.exp.as_independent(x)
else:
k, arg = Integer(1), self.exp
if arg.is_Add:
return Mul(*[exp(k*f).as_leading_term(x) for f in arg.args])
arg = self.exp.as_leading_term(x)
if Order(1, x).contains(arg):
return Integer(1)
return exp(arg)
else:
return exp(self.exp*log(self.base)).as_leading_term(x)
def _eval_rewrite_as_sin(self, base, exp):
from ..functions import sin
if self.base is E:
return sin(I*self.exp + pi/2) - I*sin(I*self.exp)
def _eval_rewrite_as_cos(self, base, exp):
from ..functions import cos
if self.base is E:
return cos(I*self.exp) + I*cos(I*self.exp + pi/2)
def _eval_rewrite_as_tanh(self, base, exp):
from ..functions import tanh
if self.base is E:
return (1 + tanh(self.exp/2))/(1 - tanh(self.exp/2))
def as_content_primitive(self, radical=False):
"""Return the tuple (R, self/R) where R is the positive Rational
extracted from self.
Examples
========
>>> sqrt(4 + 4*sqrt(2)).as_content_primitive()
(2, sqrt(1 + sqrt(2)))
>>> sqrt(3 + 3*sqrt(2)).as_content_primitive()
(1, sqrt(3)*sqrt(1 + sqrt(2)))
>>> ((2*x + 2)**2).as_content_primitive()
(4, (x + 1)**2)
>>> (4**((1 + y)/2)).as_content_primitive()
(2, 4**(y/2))
>>> (3**((1 + y)/2)).as_content_primitive()
(1, 3**((y + 1)/2))
>>> (3**((5 + y)/2)).as_content_primitive()
(9, 3**((y + 1)/2))
>>> eq = 3**(2 + 2*x)
>>> powsimp(eq) == eq
True
>>> eq.as_content_primitive()
(9, 3**(2*x))
>>> powsimp(Mul(*_))
3**(2*x + 2)
>>> eq = (2 + 2*x)**y
>>> s = expand_power_base(eq)
>>> s.is_Mul, s
(False, (2*x + 2)**y)
>>> eq.as_content_primitive()
(1, (2*(x + 1))**y)
>>> s = expand_power_base(_[1])
>>> s.is_Mul, s
(True, 2**y*(x + 1)**y)
See Also
========
diofant.core.expr.Expr.as_content_primitive
"""
b, e = self.as_base_exp()
b = _keep_coeff(*b.as_content_primitive(radical=radical))
ce, pe = e.as_content_primitive(radical=radical)
if b.is_Rational:
# e
# = ce*pe
# = ce*(h + t)
# = ce*h + ce*t
# => self
# = b**(ce*h)*b**(ce*t)
# = b**(cehp/cehq)*b**(ce*t)
# = b**(iceh+r/cehq)*b**(ce*t)
# = b**iceh*b**(r/cehq)*b**(ce*t)
# = b**iceh*b**(ce*t + r/cehq)
h, t = pe.as_coeff_Add()
if h.is_Rational:
ceh = ce*h
c = self.func(b, ceh)
r = Integer(0)
if not c.is_Rational:
iceh, r = divmod(ceh.numerator, ceh.denominator)
c = self.func(b, iceh)
return c, self.func(b, _keep_coeff(ce, t + r/ce/ceh.denominator))
e = _keep_coeff(ce, pe)
# b**e = (h*t)**e = h**e*t**e = c*m*t**e
if e.is_Rational and b.is_Mul:
h, t = b.as_content_primitive(radical=radical) # h is positive
c, m = self.func(h, e).as_coeff_Mul() # so c is positive
m, me = m.as_base_exp()
if m is S.One or me == e: # probably always true
# return the following, not return c, m*Pow(t, e)
# which would change Pow into Mul; we let diofant
# decide what to do by using the unevaluated Mul, e.g
# should it stay as sqrt(2 + 2*sqrt(5)) or become
# sqrt(2)*sqrt(1 + sqrt(5))
return c, self.func(_keep_coeff(m, t), e)
return Integer(1), self.func(b, e)
| bsd-3-clause | -1,297,017,511,015,771,400 | 35.106415 | 96 | 0.433561 | false |
openaid-IATI/deprecated-version-OIPA-v2 | iati/data/migrations/0013_auto__add_field_country_dac_region_code__add_field_country_dac_region_.py | 1 | 19581 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Country.dac_region_code'
db.add_column('data_country', 'dac_region_code',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'Country.dac_region_name'
db.add_column('data_country', 'dac_region_name',
self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Country.dac_region_code'
db.delete_column('data_country', 'dac_region_code')
# Deleting field 'Country.dac_region_name'
db.delete_column('data_country', 'dac_region_name')
models = {
'data.activitystatistics': {
'Meta': {'object_name': 'ActivityStatistics'},
'iati_identifier': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.IATIActivity']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'total_budget': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '2', 'blank': 'True'})
},
'data.activitystatustype': {
'Meta': {'object_name': 'ActivityStatusType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '8', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Country']", 'null': 'True', 'blank': 'True'})
},
'data.aidtype': {
'Meta': {'object_name': 'AidType'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.budget': {
'Meta': {'object_name': 'Budget'},
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.CurrencyType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'period_end': ('django.db.models.fields.DateField', [], {}),
'period_start': ('django.db.models.fields.DateField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'})
},
'data.collaborationtype': {
'Meta': {'object_name': 'CollaborationType'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '55', 'primary_key': 'True'})
},
'data.country': {
'Meta': {'object_name': 'Country'},
'dac_region_code': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dac_region_name': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'iso': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'})
},
'data.countrystatistics': {
'Meta': {'object_name': 'CountryStatistics'},
'country': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.Country']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'total_activities': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'data.currencytype': {
'Meta': {'object_name': 'CurrencyType'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Country']", 'null': 'True', 'blank': 'True'})
},
'data.financetype': {
'Meta': {'object_name': 'FinanceType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.flowtype': {
'Meta': {'object_name': 'FlowType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.iatiactivity': {
'Meta': {'object_name': 'IATIActivity'},
'activity_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.ActivityStatusType']", 'null': 'True', 'blank': 'True'}),
'collaboration_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.CollaborationType']", 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'default_aid_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.AidType']", 'null': 'True', 'blank': 'True'}),
'default_finance_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.FinanceType']", 'null': 'True', 'blank': 'True'}),
'default_flow_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.FlowType']", 'null': 'True', 'blank': 'True'}),
'default_tied_status_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.TiedAidStatusType']", 'null': 'True', 'blank': 'True'}),
'end_actual': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_planned': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'iati_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'reporting_organisation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Organisation']"}),
'start_actual': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'start_planned': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'data.iatiactivitybudget': {
'Meta': {'object_name': 'IATIActivityBudget', '_ormbases': ['data.Budget']},
'budget_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.Budget']", 'unique': 'True', 'primary_key': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"})
},
'data.iatiactivitycontact': {
'Meta': {'object_name': 'IATIActivityContact'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mailing_address': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'organisation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'person_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'data.iatiactivitycountry': {
'Meta': {'object_name': 'IATIActivityCountry'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Country']"}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'data.iatiactivitydescription': {
'Meta': {'object_name': 'IATIActivityDescription'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Language']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'})
},
'data.iatiactivitydocument': {
'Meta': {'object_name': 'IATIActivityDocument'},
'format': ('django.db.models.fields.CharField', [], {'max_length': '55', 'null': 'True', 'blank': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Language']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'data.iatiactivitypolicymarker': {
'Meta': {'object_name': 'IATIActivityPolicyMarker'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'significance_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.SignificanceType']", 'null': 'True', 'blank': 'True'}),
'vocabulary_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.VocabularyType']", 'null': 'True', 'blank': 'True'})
},
'data.iatiactivityregion': {
'Meta': {'object_name': 'IATIActivityRegion'},
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Region']"})
},
'data.iatiactivitysector': {
'Meta': {'object_name': 'IATIActivitySector'},
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sectors'", 'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'sector': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Sector']"})
},
'data.iatiactivitytitle': {
'Meta': {'object_name': 'IATIActivityTitle'},
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.Language']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'data.iatiactivitywebsite': {
'Meta': {'object_name': 'IATIActivityWebsite'},
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'data.iatitransaction': {
'Meta': {'object_name': 'IATITransaction', '_ormbases': ['data.Transaction']},
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'transaction_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.Transaction']", 'unique': 'True', 'primary_key': 'True'})
},
'data.language': {
'Meta': {'object_name': 'Language'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '55'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'data.organisation': {
'Meta': {'object_name': 'Organisation'},
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'org_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'org_name_lang': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '25', 'primary_key': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'data.organisationstatistics': {
'Meta': {'object_name': 'OrganisationStatistics'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organisation': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['data.Organisation']", 'unique': 'True'}),
'total_activities': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'data.otheridentifier': {
'Meta': {'object_name': 'OtherIdentifier'},
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner_ref': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'data.participatingorganisation': {
'Meta': {'object_name': 'ParticipatingOrganisation'},
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'org_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'org_name_lang': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'data.planneddisbursement': {
'Meta': {'object_name': 'PlannedDisbursement'},
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.CurrencyType']"}),
'iati_activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.IATIActivity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'period_end': ('django.db.models.fields.DateField', [], {}),
'period_start': ('django.db.models.fields.DateField', [], {})
},
'data.region': {
'Meta': {'object_name': 'Region'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.sector': {
'Meta': {'object_name': 'Sector'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '55', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'vocabulary_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.VocabularyType']", 'null': 'True', 'blank': 'True'})
},
'data.significancetype': {
'Meta': {'object_name': 'SignificanceType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.tiedaidstatustype': {
'Meta': {'object_name': 'TiedAidStatusType'},
'code': ('django.db.models.fields.IntegerField', [], {'max_length': '5', 'primary_key': 'True'})
},
'data.transaction': {
'Meta': {'object_name': 'Transaction'},
'aid_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.AidType']", 'null': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.CurrencyType']", 'null': 'True', 'blank': 'True'}),
'disbursement_channel': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'finance_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.FinanceType']", 'null': 'True', 'blank': 'True'}),
'flow_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['data.FlowType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'provider_org': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provider_org'", 'to': "orm['data.Organisation']"}),
'receiver_org': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'receiver_org'", 'null': 'True', 'to': "orm['data.Organisation']"}),
'tied_aid_status_type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'transaction_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'transaction_type': ('django.db.models.fields.CharField', [], {'max_length': '55'}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'}),
'value_date': ('django.db.models.fields.DateField', [], {})
},
'data.vocabularytype': {
'Meta': {'object_name': 'VocabularyType'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'primary_key': 'True'})
}
}
complete_apps = ['data'] | agpl-3.0 | 6,472,893,859,356,375,000 | 71.525926 | 182 | 0.547163 | false |
axsauze/eventsfinder | django/db/backends/postgresql_psycopg2/creation.py | 107 | 4139 | import psycopg2.extensions
from django.db.backends.creation import BaseDatabaseCreation
from django.db.backends.util import truncate_name
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated PostgreSQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
'AutoField': 'serial',
'BooleanField': 'boolean',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'timestamp with time zone',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'inet',
'GenericIPAddressField': 'inet',
'NullBooleanField': 'boolean',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer CHECK ("%(column)s" >= 0)',
'PositiveSmallIntegerField': 'smallint CHECK ("%(column)s" >= 0)',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'text',
'TimeField': 'time',
}
def sql_table_creation_suffix(self):
assert self.connection.settings_dict['TEST_COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time."
if self.connection.settings_dict['TEST_CHARSET']:
return "WITH ENCODING '%s'" % self.connection.settings_dict['TEST_CHARSET']
return ''
def sql_indexes_for_field(self, model, f, style):
output = []
if f.db_index or f.unique:
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
tablespace = f.db_tablespace or model._meta.db_tablespace
if tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(tablespace)
if tablespace_sql:
tablespace_sql = ' ' + tablespace_sql
else:
tablespace_sql = ''
def get_index_sql(index_name, opclass=''):
return (style.SQL_KEYWORD('CREATE INDEX') + ' ' +
style.SQL_TABLE(qn(truncate_name(index_name,self.connection.ops.max_name_length()))) + ' ' +
style.SQL_KEYWORD('ON') + ' ' +
style.SQL_TABLE(qn(db_table)) + ' ' +
"(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) +
"%s;" % tablespace_sql)
if not f.unique:
output = [get_index_sql('%s_%s' % (db_table, f.column))]
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
db_type = f.db_type(connection=self.connection)
if db_type.startswith('varchar'):
output.append(get_index_sql('%s_%s_like' % (db_table, f.column),
' varchar_pattern_ops'))
elif db_type.startswith('text'):
output.append(get_index_sql('%s_%s_like' % (db_table, f.column),
' text_pattern_ops'))
return output
def set_autocommit(self):
self._prepare_for_test_db_ddl()
def _prepare_for_test_db_ddl(self):
"""Rollback and close the active transaction."""
self.connection.connection.rollback()
self.connection.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
| bsd-3-clause | -4,742,521,401,117,939,000 | 46.574713 | 146 | 0.566079 | false |
Exterminus/harpia | harpia/bpGUI/matchTem.py | 2 | 8161 | # -*- coding: utf-8 -*-
# [HARPIA PROJECT]
#
#
# S2i - Intelligent Industrial Systems
# DAS - Automation and Systems Department
# UFSC - Federal University of Santa Catarina
# Copyright: 2006 - 2007 Luis Carlos Dill Junges ([email protected]), Clovis Peruchi Scotti ([email protected]),
# Guilherme Augusto Rutzen ([email protected]), Mathias Erdtmann ([email protected]) and S2i (www.s2i.das.ufsc.br)
# 2007 - 2009 Clovis Peruchi Scotti ([email protected]), S2i (www.s2i.das.ufsc.br)
#
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information, check the COPYING file distributed with this software.
#
# ----------------------------------------------------------------------
import gtk
from harpia.GladeWindow import GladeWindow
from harpia.s2icommonproperties import S2iCommonProperties, APP, DIR
# i18n
import os
from harpia.utils.XMLUtils import XMLParser
import gettext
_ = gettext.gettext
gettext.bindtextdomain(APP, DIR)
gettext.textdomain(APP)
# ----------------------------------------------------------------------
class Properties(GladeWindow, S2iCommonProperties):
# ----------------------------------------------------------------------
def __init__(self, PropertiesXML, S2iBlockProperties):
self.m_sDataDir = os.environ['HARPIA_DATA_DIR']
filename = self.m_sDataDir + 'glade/matchTem.ui'
self.m_oPropertiesXML = PropertiesXML
self.m_oS2iBlockProperties = S2iBlockProperties
widget_list = [
'Properties',
'method',
'scaleFactor',
'BackgroundColor',
'BorderColor',
'HelpView'
]
handlers = [
'on_cancel_clicked',
'on_prop_confirm_clicked',
'on_BackColorButton_clicked',
'on_BorderColorButton_clicked'
]
top_window = 'Properties'
GladeWindow.__init__(self, filename, top_window, widget_list, handlers)
# load properties values
self.block_properties = self.m_oPropertiesXML.getTag("properties").getTag("block").getChildTags("property")
for Property in self.block_properties:
if Property.name == "scaleFactor":
self.widgets['scaleFactor'].set_value(float(Property.value));
if Property.name == "method":
if Property.value == "CV_TM_CCOEFF_NORMED":
self.widgets['method'].set_active(int(0))
if Property.value == "CV_TM_CCOEFF":
self.widgets['method'].set_active(int(1))
if Property.value == "CV_TM_CCORR_NORMED":
self.widgets['method'].set_active(int(2))
if Property.value == "CV_TM_CCORR":
self.widgets['method'].set_active(int(3))
if Property.value == "CV_TM_SQDIFF_NORMED":
self.widgets['method'].set_active(int(4))
if Property.value == "CV_TM_SQDIFF":
self.widgets['method'].set_active(int(5))
self.configure()
# load help text
# t_oS2iHelp = XMLParser(self.m_sDataDir + "help/checkCir" + _("_en.help"))
# t_oTextBuffer = gtk.TextBuffer()
# t_oTextBuffer.set_text(unicode(str(t_oS2iHelp.getTag("help").getTag("content").getTagContent())))
# self.widgets['HelpView'].set_buffer(t_oTextBuffer)
# ----------------------------------------------------------------------
def getHelp(self):
return "operação de filtragem destinada a suavizar uma imagem."
# ----------------------------------------------------------------------
def __del__(self):
pass
# ----------------------------------------------------------------------
def on_prop_confirm_clicked(self, *args):
for Property in self.block_properties:
if Property.name == "scaleFactor":
Property.value = unicode(self.widgets['scaleFactor'].get_value())
if Property.name == "method":
Active = self.widgets['method'].get_active()
if int(Active) == 0:
Property.value = unicode("CV_TM_CCOEFF_NORMED")
if int(Active) == 1:
Property.value = unicode("CV_TM_CCOEFF")
if int(Active) == 2:
Property.value = unicode("CV_TM_CCORR_NORMED")
if int(Active) == 3:
Property.value = unicode("CV_TM_CCORR")
if int(Active) == 4:
Property.value = unicode("CV_TM_SQDIFF_NORMED")
if int(Active) == 5:
Property.value = unicode("CV_TM_SQDIFF")
self.m_oS2iBlockProperties.SetPropertiesXML(self.m_oPropertiesXML)
self.m_oS2iBlockProperties.SetBorderColor(self.m_oBorderColor)
self.m_oS2iBlockProperties.SetBackColor(self.m_oBackColor)
self.widgets['Properties'].destroy()
# ----------------------------------------------------------------------
# propProperties = Properties()()
# propProperties.show( center=0 )
# ------------------------------------------------------------------------------
# Code generation
# ------------------------------------------------------------------------------
def generate(blockTemplate):
for propIter in blockTemplate.properties:
if propIter[0] == 'method':
interMethod = propIter[1]
if propIter[0] == "scaleFactor":
scaleFactor = propIter[1]
blockTemplate.imagesIO = \
'IplImage * block$$_img_i1 = NULL;\n' + \
'IplImage * block$$_img_i2 = NULL;\n' + \
'IplImage * block$$_img_t1 = NULL;\n' + \
'IplImage * block$$_img_o1 = NULL;\n'
blockTemplate.functionCall = '\nif(block$$_img_i1 && block$$_img_i2)\n' + \
'{\n' + \
' block$$_img_t1 = cvCreateImage(cvSize(block$$_img_i1->width - block$$_img_i2->width +1,block$$_img_i1->height - block$$_img_i2->height +1),32,1);\n' + \
' block$$_img_o1 = cvCreateImage(cvSize(block$$_img_i1->width - block$$_img_i2->width +1,block$$_img_i1->height - block$$_img_i2->height +1),8,1);\n' + \
' cvMatchTemplate(block$$_img_i1 , block$$_img_i2, block$$_img_t1, ' + interMethod + ');\n' + \
' cvConvertScale(block$$_img_t1,block$$_img_o1, ' + str(
10 ** -(int(float(scaleFactor)))) + ',0);\n' + \
'}\n'
blockTemplate.dealloc = 'cvReleaseImage(&block$$_img_o1);\n' + \
'cvReleaseImage(&block$$_img_t1);\n' + \
'cvReleaseImage(&block$$_img_i2);\n' + \
'cvReleaseImage(&block$$_img_i1);\n'
# ------------------------------------------------------------------------------
# Block Setup
# ------------------------------------------------------------------------------
def getBlock():
return {"Label": _("Match Template"),
"Path": {"Python": "matchTem",
"Glade": "glade/matchTem.ui",
"Xml": "xml/matchTem.xml"},
"Icon": "images/matchTem.png",
"Color": "180:180:10:150",
"InTypes": {0: "HRP_IMAGE", 1: "HRP_IMAGE"},
"OutTypes": {0: "HRP_IMAGE"},
"Description": _("Output shows the matching relation between image (input1) and template (input2)"),
"TreeGroup": _("Feature Detection")
}
| gpl-2.0 | 2,609,609,050,396,005,400 | 41.056701 | 187 | 0.51244 | false |
pschella/scipy | scipy/_lib/tests/test__util.py | 25 | 2076 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_equal, assert_, assert_raises
from scipy._lib._util import _aligned_zeros, check_random_state
def test__aligned_zeros():
niter = 10
def check(shape, dtype, order, align):
err_msg = repr((shape, dtype, order, align))
x = _aligned_zeros(shape, dtype, order, align=align)
if align is None:
align = np.dtype(dtype).alignment
assert_equal(x.__array_interface__['data'][0] % align, 0)
if hasattr(shape, '__len__'):
assert_equal(x.shape, shape, err_msg)
else:
assert_equal(x.shape, (shape,), err_msg)
assert_equal(x.dtype, dtype)
if order == "C":
assert_(x.flags.c_contiguous, err_msg)
elif order == "F":
if x.size > 0:
# Size-0 arrays get invalid flags on Numpy 1.5
assert_(x.flags.f_contiguous, err_msg)
elif order is None:
assert_(x.flags.c_contiguous, err_msg)
else:
raise ValueError()
# try various alignments
for align in [1, 2, 3, 4, 8, 16, 32, 64, None]:
for n in [0, 1, 3, 11]:
for order in ["C", "F", None]:
for dtype in [np.uint8, np.float64]:
for shape in [n, (1, 2, 3, n)]:
for j in range(niter):
check(shape, dtype, order, align)
def test_check_random_state():
# If seed is None, return the RandomState singleton used by np.random.
# If seed is an int, return a new RandomState instance seeded with seed.
# If seed is already a RandomState instance, return it.
# Otherwise raise ValueError.
rsi = check_random_state(1)
assert_equal(type(rsi), np.random.RandomState)
rsi = check_random_state(rsi)
assert_equal(type(rsi), np.random.RandomState)
rsi = check_random_state(None)
assert_equal(type(rsi), np.random.RandomState)
assert_raises(ValueError, check_random_state, 'a')
| bsd-3-clause | -5,754,178,822,858,437,000 | 36.745455 | 76 | 0.585742 | false |
seanli9jan/tensorflow | tensorflow/contrib/memory_stats/__init__.py | 53 | 1224 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for memory statistics.
@@BytesInUse
@@BytesLimit
@@MaxBytesInUse
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.memory_stats.python.ops.memory_stats_ops import BytesInUse
from tensorflow.contrib.memory_stats.python.ops.memory_stats_ops import BytesLimit
from tensorflow.contrib.memory_stats.python.ops.memory_stats_ops import MaxBytesInUse
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
| apache-2.0 | 8,454,416,519,961,227,000 | 38.483871 | 85 | 0.732843 | false |
Philmod/mongo-connector | mongo_connector/doc_managers/solr_doc_manager.py | 8 | 11225 | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Receives documents from the oplog worker threads and indexes them
into the backend.
This file is a document manager for the Solr search engine, but the intent
is that this file can be used as an example to add on different backends.
To extend this to other systems, simply implement the exact same class and
replace the method definitions with API calls for the desired backend.
"""
import re
import json
from pysolr import Solr, SolrError
from mongo_connector import errors
from mongo_connector.compat import u
from mongo_connector.constants import (DEFAULT_COMMIT_INTERVAL,
DEFAULT_MAX_BULK)
from mongo_connector.util import retry_until_ok
from mongo_connector.doc_managers import DocManagerBase, exception_wrapper
from mongo_connector.doc_managers.formatters import DocumentFlattener
# pysolr only has 1 exception: SolrError
wrap_exceptions = exception_wrapper({
SolrError: errors.OperationFailed})
ADMIN_URL = 'admin/luke?show=schema&wt=json'
decoder = json.JSONDecoder()
class DocManager(DocManagerBase):
"""The DocManager class creates a connection to the backend engine and
adds/removes documents, and in the case of rollback, searches for them.
The reason for storing id/doc pairs as opposed to doc's is so that multiple
updates to the same doc reflect the most up to date version as opposed to
multiple, slightly different versions of a doc.
"""
def __init__(self, url, auto_commit_interval=DEFAULT_COMMIT_INTERVAL,
unique_key='_id', chunk_size=DEFAULT_MAX_BULK, **kwargs):
"""Verify Solr URL and establish a connection.
"""
self.solr = Solr(url)
self.unique_key = unique_key
# pysolr does things in milliseconds
if auto_commit_interval is not None:
self.auto_commit_interval = auto_commit_interval * 1000
else:
self.auto_commit_interval = None
self.chunk_size = chunk_size
self.field_list = []
self._build_fields()
self._formatter = DocumentFlattener()
def _parse_fields(self, result, field_name):
""" If Schema access, parse fields and build respective lists
"""
field_list = []
for key, value in result.get('schema', {}).get(field_name, {}).items():
if key not in field_list:
field_list.append(key)
return field_list
@wrap_exceptions
def _build_fields(self):
""" Builds a list of valid fields
"""
declared_fields = self.solr._send_request('get', ADMIN_URL)
result = decoder.decode(declared_fields)
self.field_list = self._parse_fields(result, 'fields')
# Build regular expressions to match dynamic fields.
# dynamic field names may have exactly one wildcard, either at
# the beginning or the end of the name
self._dynamic_field_regexes = []
for wc_pattern in self._parse_fields(result, 'dynamicFields'):
if wc_pattern[0] == "*":
self._dynamic_field_regexes.append(
re.compile(".*%s\Z" % wc_pattern[1:]))
elif wc_pattern[-1] == "*":
self._dynamic_field_regexes.append(
re.compile("\A%s.*" % wc_pattern[:-1]))
def _clean_doc(self, doc):
"""Reformats the given document before insertion into Solr.
This method reformats the document in the following ways:
- removes extraneous fields that aren't defined in schema.xml
- unwinds arrays in order to find and later flatten sub-documents
- flattens the document so that there are no sub-documents, and every
value is associated with its dot-separated path of keys
An example:
{"a": 2,
"b": {
"c": {
"d": 5
}
},
"e": [6, 7, 8]
}
becomes:
{"a": 2, "b.c.d": 5, "e.0": 6, "e.1": 7, "e.2": 8}
"""
# Translate the _id field to whatever unique key we're using.
# _id may not exist in the doc, if we retrieved it from Solr
# as part of update.
if '_id' in doc:
doc[self.unique_key] = u(doc.pop("_id"))
# SOLR cannot index fields within sub-documents, so flatten documents
# with the dot-separated path to each value as the respective key
flat_doc = self._formatter.format_document(doc)
# Only include fields that are explicitly provided in the
# schema or match one of the dynamic field patterns, if
# we were able to retrieve the schema
if len(self.field_list) + len(self._dynamic_field_regexes) > 0:
def include_field(field):
return field in self.field_list or any(
regex.match(field) for regex in self._dynamic_field_regexes
)
return dict((k, v) for k, v in flat_doc.items() if include_field(k))
return flat_doc
def stop(self):
""" Stops the instance
"""
pass
def apply_update(self, doc, update_spec):
"""Override DocManagerBase.apply_update to have flat documents."""
# Replace a whole document
if not '$set' in update_spec and not '$unset' in update_spec:
# update spec contains the new document
update_spec['_ts'] = doc['_ts']
update_spec['ns'] = doc['ns']
update_spec['_id'] = doc['_id']
return update_spec
for to_set in update_spec.get("$set", []):
value = update_spec['$set'][to_set]
# Find dotted-path to the value, remove that key from doc, then
# put value at key:
keys_to_pop = []
for key in doc:
if key.startswith(to_set):
if key == to_set or key[len(to_set)] == '.':
keys_to_pop.append(key)
for key in keys_to_pop:
doc.pop(key)
doc[to_set] = value
for to_unset in update_spec.get("$unset", []):
# MongoDB < 2.5.2 reports $unset for fields that don't exist within
# the document being updated.
keys_to_pop = []
for key in doc:
if key.startswith(to_unset):
if key == to_unset or key[len(to_unset)] == '.':
keys_to_pop.append(key)
for key in keys_to_pop:
doc.pop(key)
return doc
@wrap_exceptions
def update(self, doc, update_spec):
"""Apply updates given in update_spec to the document whose id
matches that of doc.
"""
# Commit outstanding changes so that the document to be updated is the
# same version to which the changes apply.
self.commit()
query = "%s:%s" % (self.unique_key, u(doc['_id']))
results = self.solr.search(query)
if not len(results):
# Document may not be retrievable yet
self.commit()
results = self.solr.search(query)
# Results is an iterable containing only 1 result
for doc in results:
updated = self.apply_update(doc, update_spec)
# A _version_ of 0 will always apply the update
updated['_version_'] = 0
self.upsert(updated)
return updated
@wrap_exceptions
def upsert(self, doc):
"""Update or insert a document into Solr
This method should call whatever add/insert/update method exists for
the backend engine and add the document in there. The input will
always be one mongo document, represented as a Python dictionary.
"""
if self.auto_commit_interval is not None:
self.solr.add([self._clean_doc(doc)],
commit=(self.auto_commit_interval == 0),
commitWithin=u(self.auto_commit_interval))
else:
self.solr.add([self._clean_doc(doc)], commit=False)
@wrap_exceptions
def bulk_upsert(self, docs):
"""Update or insert multiple documents into Solr
docs may be any iterable
"""
if self.auto_commit_interval is not None:
add_kwargs = {
"commit": (self.auto_commit_interval == 0),
"commitWithin": str(self.auto_commit_interval)
}
else:
add_kwargs = {"commit": False}
cleaned = (self._clean_doc(d) for d in docs)
if self.chunk_size > 0:
batch = list(next(cleaned) for i in range(self.chunk_size))
while batch:
self.solr.add(batch, **add_kwargs)
batch = list(next(cleaned)
for i in range(self.chunk_size))
else:
self.solr.add(cleaned, **add_kwargs)
@wrap_exceptions
def remove(self, doc):
"""Removes documents from Solr
The input is a python dictionary that represents a mongo document.
"""
self.solr.delete(id=u(doc["_id"]),
commit=(self.auto_commit_interval == 0))
@wrap_exceptions
def _remove(self):
"""Removes everything
"""
self.solr.delete(q='*:*', commit=(self.auto_commit_interval == 0))
@wrap_exceptions
def _stream_search(self, query):
"""Helper method for iterating over Solr search results."""
for doc in self.solr.search(query, rows=100000000):
if self.unique_key != "_id":
doc["_id"] = doc.pop(self.unique_key)
yield doc
@wrap_exceptions
def search(self, start_ts, end_ts):
"""Called to query Solr for documents in a time range."""
query = '_ts: [%s TO %s]' % (start_ts, end_ts)
return self._stream_search(query)
@wrap_exceptions
def _search(self, query):
"""For test purposes only. Performs search on Solr with given query
Does not have to be implemented.
"""
return self._stream_search(query)
def commit(self):
"""This function is used to force a commit.
"""
retry_until_ok(self.solr.commit)
@wrap_exceptions
def get_last_doc(self):
"""Returns the last document stored in the Solr engine.
"""
#search everything, sort by descending timestamp, return 1 row
try:
result = self.solr.search('*:*', sort='_ts desc', rows=1)
except ValueError:
return None
for r in result:
r['_id'] = r.pop(self.unique_key)
return r
| apache-2.0 | 7,439,807,118,916,180,000 | 36.667785 | 80 | 0.588686 | false |
Srisai85/scipy | scipy/linalg/tests/test_lapack.py | 28 | 17002 | #!/usr/bin/env python
#
# Created by: Pearu Peterson, September 2002
#
from __future__ import division, print_function, absolute_import
from numpy.testing import TestCase, run_module_suite, assert_equal, \
assert_array_almost_equal, assert_, assert_raises, assert_allclose, \
assert_almost_equal
import numpy as np
from scipy.linalg import _flapack as flapack
from scipy.linalg import inv
from scipy.linalg import svd
try:
from scipy.linalg import _clapack as clapack
except ImportError:
clapack = None
from scipy.linalg.lapack import get_lapack_funcs
from scipy.linalg.blas import get_blas_funcs
REAL_DTYPES = [np.float32, np.float64]
COMPLEX_DTYPES = [np.complex64, np.complex128]
DTYPES = REAL_DTYPES + COMPLEX_DTYPES
class TestFlapackSimple(TestCase):
def test_gebal(self):
a = [[1,2,3],[4,5,6],[7,8,9]]
a1 = [[1,0,0,3e-4],
[4,0,0,2e-3],
[7,1,0,0],
[0,1,0,0]]
for p in 'sdzc':
f = getattr(flapack,p+'gebal',None)
if f is None:
continue
ba,lo,hi,pivscale,info = f(a)
assert_(not info,repr(info))
assert_array_almost_equal(ba,a)
assert_equal((lo,hi),(0,len(a[0])-1))
assert_array_almost_equal(pivscale, np.ones(len(a)))
ba,lo,hi,pivscale,info = f(a1,permute=1,scale=1)
assert_(not info,repr(info))
# print a1
# print ba,lo,hi,pivscale
def test_gehrd(self):
a = [[-149, -50,-154],
[537, 180, 546],
[-27, -9, -25]]
for p in 'd':
f = getattr(flapack,p+'gehrd',None)
if f is None:
continue
ht,tau,info = f(a)
assert_(not info,repr(info))
def test_trsyl(self):
a = np.array([[1, 2], [0, 4]])
b = np.array([[5, 6], [0, 8]])
c = np.array([[9, 10], [11, 12]])
trans = 'T'
# Test single and double implementations, including most
# of the options
for dtype in 'fdFD':
a1, b1, c1 = a.astype(dtype), b.astype(dtype), c.astype(dtype)
trsyl, = get_lapack_funcs(('trsyl',), (a1,))
if dtype.isupper(): # is complex dtype
a1[0] += 1j
trans = 'C'
x, scale, info = trsyl(a1, b1, c1)
assert_array_almost_equal(np.dot(a1, x) + np.dot(x, b1), scale * c1)
x, scale, info = trsyl(a1, b1, c1, trana=trans, tranb=trans)
assert_array_almost_equal(np.dot(a1.conjugate().T, x) + np.dot(x, b1.conjugate().T),
scale * c1, decimal=4)
x, scale, info = trsyl(a1, b1, c1, isgn=-1)
assert_array_almost_equal(np.dot(a1, x) - np.dot(x, b1), scale * c1, decimal=4)
def test_lange(self):
a = np.array([
[-149, -50,-154],
[537, 180, 546],
[-27, -9, -25]])
for dtype in 'fdFD':
for norm in 'Mm1OoIiFfEe':
a1 = a.astype(dtype)
if dtype.isupper():
# is complex dtype
a1[0,0] += 1j
lange, = get_lapack_funcs(('lange',), (a1,))
value = lange(norm, a1)
if norm in 'FfEe':
if dtype in 'Ff':
decimal = 3
else:
decimal = 7
ref = np.sqrt(np.sum(np.square(np.abs(a1))))
assert_almost_equal(value, ref, decimal)
else:
if norm in 'Mm':
ref = np.max(np.abs(a1))
elif norm in '1Oo':
ref = np.max(np.sum(np.abs(a1), axis=0))
elif norm in 'Ii':
ref = np.max(np.sum(np.abs(a1), axis=1))
assert_equal(value, ref)
class TestLapack(TestCase):
def test_flapack(self):
if hasattr(flapack,'empty_module'):
# flapack module is empty
pass
def test_clapack(self):
if hasattr(clapack,'empty_module'):
# clapack module is empty
pass
class TestLeastSquaresSolvers(TestCase):
def test_gelsd(self):
for dtype in REAL_DTYPES:
a1 = np.array([[1.0,2.0],
[4.0,5.0],
[7.0,8.0]], dtype=dtype)
b1 = np.array([16.0, 17.0, 20.0], dtype=dtype)
gelsd, gelsd_lwork = get_lapack_funcs(('gelsd','gelsd_lwork'),
(a1, b1))
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
# Request of sizes
work,iwork,info = gelsd_lwork(m,n,nrhs,-1)
lwork = int(np.real(work))
iwork_size = iwork
x, s, rank, info = gelsd(a1, b1, lwork, iwork_size,
-1, False, False)
assert_allclose(x[:-1], np.array([-14.333333333333323,
14.999999999999991], dtype=dtype),
rtol=25*np.finfo(dtype).eps)
assert_allclose(s, np.array([12.596017180511966,
0.583396253199685], dtype=dtype),
rtol=25*np.finfo(dtype).eps)
for dtype in COMPLEX_DTYPES:
a1 = np.array([[1.0+4.0j,2.0],
[4.0+0.5j,5.0-3.0j],
[7.0-2.0j,8.0+0.7j]], dtype=dtype)
b1 = np.array([16.0, 17.0+2.0j, 20.0-4.0j], dtype=dtype)
gelsd, gelsd_lwork = get_lapack_funcs(('gelsd','gelsd_lwork'),
(a1, b1))
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
# Request of sizes
work, rwork, iwork, info = gelsd_lwork(m,n,nrhs,-1)
lwork = int(np.real(work))
rwork_size = int(rwork)
iwork_size = iwork
x, s, rank, info = gelsd(a1, b1, lwork, rwork_size, iwork_size,
-1, False, False)
assert_allclose(x[:-1],
np.array([1.161753632288328-1.901075709391912j,
1.735882340522193+1.521240901196909j],
dtype=dtype), rtol=25*np.finfo(dtype).eps)
assert_allclose(s,
np.array([13.035514762572043, 4.337666985231382],
dtype=dtype), rtol=25*np.finfo(dtype).eps)
def test_gelss(self):
for dtype in REAL_DTYPES:
a1 = np.array([[1.0,2.0],
[4.0,5.0],
[7.0,8.0]], dtype=dtype)
b1 = np.array([16.0, 17.0, 20.0], dtype=dtype)
gelss, gelss_lwork = get_lapack_funcs(('gelss','gelss_lwork'),
(a1, b1))
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
# Request of sizes
work,info = gelss_lwork(m,n,nrhs,-1)
lwork = int(np.real(work))
v,x,s,rank,work,info = gelss(a1, b1,-1,lwork, False, False)
assert_allclose(x[:-1], np.array([-14.333333333333323,
14.999999999999991], dtype=dtype),
rtol=25*np.finfo(dtype).eps)
assert_allclose(s, np.array([12.596017180511966,
0.583396253199685], dtype=dtype),
rtol=25*np.finfo(dtype).eps)
for dtype in COMPLEX_DTYPES:
a1 = np.array([[1.0+4.0j,2.0],
[4.0+0.5j,5.0-3.0j],
[7.0-2.0j,8.0+0.7j]], dtype=dtype)
b1 = np.array([16.0, 17.0+2.0j, 20.0-4.0j], dtype=dtype)
gelss, gelss_lwork = get_lapack_funcs(('gelss','gelss_lwork'),
(a1, b1))
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
# Request of sizes
work,info = gelss_lwork(m,n,nrhs,-1)
lwork = int(np.real(work))
v,x,s,rank,work,info = gelss(a1, b1,-1,lwork, False, False)
assert_allclose(x[:-1],
np.array([1.161753632288328-1.901075709391912j,
1.735882340522193+1.521240901196909j],
dtype=dtype), rtol=25*np.finfo(dtype).eps)
assert_allclose(s, np.array([13.035514762572043,
4.337666985231382], dtype=dtype),
rtol=25*np.finfo(dtype).eps)
def test_gelsy(self):
for dtype in REAL_DTYPES:
a1 = np.array([[1.0,2.0],
[4.0,5.0],
[7.0,8.0]], dtype=dtype)
b1 = np.array([16.0, 17.0, 20.0], dtype=dtype)
gelsy, gelsy_lwork = get_lapack_funcs(('gelsy','gelss_lwork'), (a1, b1))
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
# Request of sizes
work, info = gelsy_lwork(m,n,nrhs,10*np.finfo(dtype).eps)
lwork = int(np.real(work))
jptv = np.zeros((a1.shape[1],1), dtype=np.int32)
v, x, j, rank, info = gelsy(a1, b1, jptv, np.finfo(dtype).eps,
lwork, False, False)
assert_allclose(x[:-1], np.array([-14.333333333333323,
14.999999999999991], dtype=dtype),
rtol=25*np.finfo(dtype).eps)
for dtype in COMPLEX_DTYPES:
a1 = np.array([[1.0+4.0j,2.0],
[4.0+0.5j,5.0-3.0j],
[7.0-2.0j,8.0+0.7j]], dtype=dtype)
b1 = np.array([16.0, 17.0+2.0j, 20.0-4.0j], dtype=dtype)
gelsy, gelsy_lwork = get_lapack_funcs(('gelsy','gelss_lwork'), (a1, b1))
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
# Request of sizes
work, info = gelsy_lwork(m,n,nrhs,10*np.finfo(dtype).eps)
lwork = int(np.real(work))
jptv = np.zeros((a1.shape[1],1), dtype=np.int32)
v, x, j, rank, info = gelsy(a1, b1, jptv, np.finfo(dtype).eps,
lwork, False, False)
assert_allclose(x[:-1],
np.array([1.161753632288328-1.901075709391912j,
1.735882340522193+1.521240901196909j],
dtype=dtype), rtol=25*np.finfo(dtype).eps)
class TestRegression(TestCase):
def test_ticket_1645(self):
# Check that RQ routines have correct lwork
for dtype in DTYPES:
a = np.zeros((300, 2), dtype=dtype)
gerqf, = get_lapack_funcs(['gerqf'], [a])
assert_raises(Exception, gerqf, a, lwork=2)
rq, tau, work, info = gerqf(a)
if dtype in REAL_DTYPES:
orgrq, = get_lapack_funcs(['orgrq'], [a])
assert_raises(Exception, orgrq, rq[-2:], tau, lwork=1)
orgrq(rq[-2:], tau, lwork=2)
elif dtype in COMPLEX_DTYPES:
ungrq, = get_lapack_funcs(['ungrq'], [a])
assert_raises(Exception, ungrq, rq[-2:], tau, lwork=1)
ungrq(rq[-2:], tau, lwork=2)
class TestDpotr(TestCase):
def test_gh_2691(self):
# 'lower' argument of dportf/dpotri
for lower in [True, False]:
for clean in [True, False]:
np.random.seed(42)
x = np.random.normal(size=(3, 3))
a = x.dot(x.T)
dpotrf, dpotri = get_lapack_funcs(("potrf", "potri"), (a, ))
c, info = dpotrf(a, lower, clean=clean)
dpt = dpotri(c, lower)[0]
if lower:
assert_allclose(np.tril(dpt), np.tril(inv(a)))
else:
assert_allclose(np.triu(dpt), np.triu(inv(a)))
class TestDlasd4(TestCase):
def test_sing_val_update(self):
sigmas = np.array([4., 3., 2., 0])
m_vec = np.array([3.12, 5.7, -4.8, -2.2])
M = np.hstack((np.vstack((np.diag(sigmas[0:-1]),
np.zeros((1,len(m_vec) - 1)))), m_vec[:, np.newaxis]))
SM = svd(M, full_matrices=False, compute_uv=False, overwrite_a=False,
check_finite=False)
it_len = len(sigmas)
sgm = np.concatenate((sigmas[::-1], (sigmas[0] +
it_len*np.sqrt(np.sum(np.power(m_vec,2))),)))
mvc = np.concatenate((m_vec[::-1], (0,)))
lasd4 = get_lapack_funcs('lasd4',(sigmas,))
roots = []
for i in range(0, it_len):
res = lasd4(i, sgm, mvc)
roots.append(res[1])
assert_((res[3] <= 0),"LAPACK root finding dlasd4 failed to find \
the singular value %i" % i)
roots = np.array(roots)[::-1]
assert_((not np.any(np.isnan(roots)),"There are NaN roots"))
assert_allclose(SM, roots, atol=100*np.finfo(np.float64).eps,
rtol=100*np.finfo(np.float64).eps)
def test_lartg():
for dtype in 'fdFD':
lartg = get_lapack_funcs('lartg', dtype=dtype)
f = np.array(3, dtype)
g = np.array(4, dtype)
if np.iscomplexobj(g):
g *= 1j
cs, sn, r = lartg(f, g)
assert_allclose(cs, 3.0/5.0)
assert_allclose(r, 5.0)
if np.iscomplexobj(g):
assert_allclose(sn, -4.0j/5.0)
assert_(type(r) == complex)
assert_(type(cs) == float)
else:
assert_allclose(sn, 4.0/5.0)
def test_rot():
# srot, drot from blas and crot and zrot from lapack.
for dtype in 'fdFD':
c = 0.6
s = 0.8
u = np.ones(4, dtype) * 3
v = np.ones(4, dtype) * 4
atol = 10**-(np.finfo(dtype).precision-1)
if dtype in 'fd':
rot = get_blas_funcs('rot', dtype=dtype)
f = 4
else:
rot = get_lapack_funcs('rot', dtype=dtype)
s *= -1j
v *= 1j
f = 4j
assert_allclose(rot(u, v, c, s), [[5,5,5,5],[0,0,0,0]], atol=atol)
assert_allclose(rot(u, v, c, s, n=2), [[5,5,3,3],[0,0,f,f]], atol=atol)
assert_allclose(rot(u, v, c, s, offx=2,offy=2), [[3,3,5,5],[f,f,0,0]], atol=atol)
assert_allclose(rot(u, v, c, s, incx=2, offy=2, n=2), [[5,3,5,3],[f,f,0,0]], atol=atol)
assert_allclose(rot(u, v, c, s, offx=2, incy=2, n=2), [[3,3,5,5],[0,f,0,f]], atol=atol)
assert_allclose(rot(u, v, c, s, offx=2, incx=2, offy=2, incy=2, n=1), [[3,3,5,3],[f,f,0,f]], atol=atol)
assert_allclose(rot(u, v, c, s, incx=-2, incy=-2, n=2), [[5,3,5,3],[0,f,0,f]], atol=atol)
a, b = rot(u, v, c, s, overwrite_x=1, overwrite_y=1)
assert_(a is u)
assert_(b is v)
assert_allclose(a, [5,5,5,5], atol=atol)
assert_allclose(b, [0,0,0,0], atol=atol)
def test_larfg_larf():
np.random.seed(1234)
a0 = np.random.random((4,4))
a0 = a0.T.dot(a0)
a0j = np.random.random((4,4)) + 1j*np.random.random((4,4))
a0j = a0j.T.conj().dot(a0j)
# our test here will be to do one step of reducing a hermetian matrix to
# tridiagonal form using householder transforms.
for dtype in 'fdFD':
larfg, larf = get_lapack_funcs(['larfg', 'larf'], dtype=dtype)
if dtype in 'FD':
a = a0j.copy()
else:
a = a0.copy()
# generate a householder transform to clear a[2:,0]
alpha, x, tau = larfg(a.shape[0]-1, a[1,0], a[2:,0])
# create expected output
expected = np.zeros_like(a[:,0])
expected[0] = a[0,0]
expected[1] = alpha
# assemble householder vector
v = np.zeros_like(a[1:,0])
v[0] = 1.0
v[1:] = x
# apply transform from the left
a[1:,:] = larf(v, tau.conjugate(), a[1:,:], np.zeros(a.shape[1]))
# apply transform from the right
a[:,1:] = larf(v, tau, a[:,1:], np.zeros(a.shape[0]), side='R')
assert_allclose(a[:,0], expected, atol=1e-5)
assert_allclose(a[0,:], expected, atol=1e-5)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | 5,038,684,073,976,007,000 | 34.945032 | 111 | 0.463416 | false |
edx/edx-analytics-dashboard | analytics_dashboard/learner_analytics_api/v0/views.py | 1 | 6267 | from requests.exceptions import ConnectTimeout
from rest_framework.exceptions import PermissionDenied
from rest_framework.generics import RetrieveAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from .clients import LearnerAPIClient
from .permissions import HasCourseAccessPermission
from .renderers import TextRenderer
# TODO: Consider caching responses from the data api when working on AN-6157
class BaseLearnerApiView(RetrieveAPIView):
permission_classes = (IsAuthenticated, HasCourseAccessPermission,)
# Serialize the the Learner Analytics API response to JSON, by default.
serializer_type = 'json'
# Do not return the HTTP headers from the Data API, by default.
# This will be further investigated in AN-6928.
include_headers = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.client = LearnerAPIClient(serializer_type=self.serializer_type)
def get_queryset(self):
"""
DRF requires that we override this method. Since we don't actually use
querysets/django models in this API, this method doesn't have to return
anything.
"""
@property
def course_id(self):
"""
Gets the course_id either from the URL or the querystring parameters.
"""
course_id = getattr(self.request, 'course_id')
if not course_id:
course_id = self.request.query_params.get('course_id')
return course_id
def get(self, request, *args, **kwargs):
"""
Return the response from the Data API.
"""
api_response = self.get_api_response(request, *args, **kwargs)
response_kwargs = dict(
data=api_response.serialized_content,
status=api_response.status_code,
)
if self.include_headers:
response_kwargs['headers'] = api_response.headers
return Response(**response_kwargs)
def get_api_response(self, request, *args, **kwargs):
"""
Fetch the response from the API.
Must be implemented by subclasses.
"""
raise NotImplementedError('Override this method to return the Learner Analytics API response for this view.')
def handle_exception(self, exc):
"""
Handles timeouts raised by the API client by returning an HTTP
504.
"""
if isinstance(exc, ConnectTimeout):
return Response(
data={'developer_message': 'Learner Analytics API timed out.', 'error_code': 'analytics_api_timeout'},
status=504
)
return super().handle_exception(exc)
class DownloadLearnerApiViewMixin:
"""
Requests text/csv data from the Learner Analytics API, and ensures that the REST framework returns it unparsed,
including the response headers.
"""
include_headers = True
content_type = 'text/csv'
serializer_type = 'text'
def get_api_response(self, request, **kwargs):
"""
Sets the HTTP_ACCEPT header on the request to tell the Learner Analytics API which format to return its data in.
And tells the REST framework to render as text. NB: parent class must also define get_api_response()
"""
request.META['Accept'] = self.content_type
request.accepted_renderer = TextRenderer()
return super().get_api_response(request, **kwargs)
class NotFoundLearnerApiViewMixin:
"""
Returns 404s rather than 403s when PermissionDenied exceptions are raised.
"""
@property
def not_found_developer_message(self):
raise NotImplementedError('Override this attribute to define the developer message returned with 404s.')
@property
def not_found_error_code(self):
raise NotImplementedError('Override this attribute to define the error_code string returned with 404s.')
def handle_exception(self, exc):
if isinstance(exc, PermissionDenied):
return Response(
data={'developer_message': self.not_found_developer_message, 'error_code': self.not_found_error_code},
status=404
)
return super().handle_exception(exc)
class LearnerDetailView(NotFoundLearnerApiViewMixin, BaseLearnerApiView):
"""
Forwards requests to the Learner Analytics API's Learner Detail endpoint.
"""
not_found_error_code = 'no_learner_for_course'
@property
def not_found_developer_message(self):
message = 'Learner {} not found'.format(self.kwargs.get('username', ''))
message += f'for course {self.course_id}.' if self.course_id else '.'
return message
def get_api_response(self, request, username, **kwargs):
return self.client.learners(username).get(**request.query_params)
class LearnerListView(BaseLearnerApiView):
"""
Forwards requests to the Learner Analytics API's Learner List endpoint.
"""
def get_api_response(self, request, **kwargs):
return self.client.learners.get(**request.query_params)
class LearnerListCSV(DownloadLearnerApiViewMixin, LearnerListView):
"""
Forwards text/csv requests to the Learner Analytics API's Learner List endpoint,
and returns a simple text response.
"""
class EngagementTimelinesView(NotFoundLearnerApiViewMixin, BaseLearnerApiView):
"""
Forwards requests to the Learner Analytics API's Engagement Timeline
endpoint.
"""
not_found_error_code = 'no_learner_engagement_timeline'
@property
def not_found_developer_message(self):
message = 'Learner {} engagement timeline not found'.format(self.kwargs.get('username', ''))
message += f'for course {self.course_id}.' if self.course_id else '.'
return message
def get_api_response(self, request, username, **kwargs):
return self.client.engagement_timelines(username).get(**request.query_params)
class CourseLearnerMetadataView(BaseLearnerApiView):
"""
Forwards requests to the Learner Analytics API's Course Metadata endpoint.
"""
def get_api_response(self, request, course_id, **kwargs):
return self.client.course_learner_metadata(course_id).get(**request.query_params)
| agpl-3.0 | -2,609,789,061,055,679,500 | 35.436047 | 120 | 0.676081 | false |
schlueter/ansible | lib/ansible/modules/commands/telnet.py | 33 | 2576 | # this is a virtual module that is entirely implemented server side
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: telnet
short_description: Executes a low-down and dirty telnet command
version_added: 2.4
description:
- Executes a low-down and dirty telnet command, not going through the module subsystem.
- This is mostly to be used for enabling ssh on devices that only have telnet enabled by default.
options:
command:
description:
- List of commands to be executed in the telnet session.
required: True
aliases: ['commands']
host:
description:
- The host/target on which to execute the command
required: False
default: remote_addr
user:
description:
- The user for login
required: False
default: remote_user
password:
description:
- The password for login
port:
description:
- Remote port to use
default: 23
timeout:
description:
- timeout for remote operations
default: 120
prompts:
description:
- List of prompts expected before sending next command
required: False
default: ['$']
login_prompt:
description:
- Login or username prompt to expect
required: False
default: 'login: '
password_prompt:
description:
- Login or username prompt to expect
required: False
default: 'Password: '
pause:
description:
- Seconds to pause between each command issued
required: False
default: 1
notes:
- The C(environment) keyword does not work with this task
author:
- Ansible Core Team
'''
EXAMPLES = '''
- name: send configuration commands to IOS
telnet:
user: cisco
password: cisco
login_prompt: "Username: "
prompts:
- "[>|#]"
command:
- terminal length 0
- configure terminal
- hostname ios01
- name: run show commands
telnet:
user: cisco
password: cisco
login_prompt: "Username: "
prompts:
- "[>|#]"
command:
- terminal length 0
- show version
'''
RETURN = '''
output:
description: output of each command is an element in this list
type: list
returned: always
sample: [ 'success', 'success', '', 'warning .. something' ]
'''
| gpl-3.0 | -6,912,494,424,368,098,000 | 23.769231 | 102 | 0.643634 | false |
wshallum/ansible | lib/ansible/modules/network/netvisor/pn_vrouterbgp.py | 29 | 15078 | #!/usr/bin/python
""" PN-CLI vrouter-bgp-add/vrouter-bgp-remove/vrouter-bgp-modify """
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import shlex
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: pn_vrouterbgp
author: "Pluribus Networks (@amitsi)"
version_added: "2.2"
version: 1.0
short_description: CLI command to add/remove/modify vrouter-bgp.
description:
- Execute vrouter-bgp-add, vrouter-bgp-remove, vrouter-bgp-modify command.
- Each fabric, cluster, standalone switch, or virtual network (VNET) can
provide its tenants with a vRouter service that forwards traffic between
networks and implements Layer 4 protocols.
options:
pn_cliusername:
description:
- Provide login username if user is not root.
required: False
pn_clipassword:
description:
- Provide login password if user is not root.
required: False
pn_cliswitch:
description:
- Target switch(es) to run the cli on.
required: False
state:
description:
- State the action to perform. Use 'present' to add bgp,
'absent' to remove bgp and 'update' to modify bgp.
required: True
choices: ['present', 'absent', 'update']
pn_vrouter_name:
description:
- Specify a name for the vRouter service.
required: True
pn_neighbor:
description:
- Specify a neighbor IP address to use for BGP.
- Required for vrouter-bgp-add.
pn_remote_as:
description:
- Specify the remote Autonomous System(AS) number. This value is between
1 and 4294967295.
- Required for vrouter-bgp-add.
pn_next_hop_self:
description:
- Specify if the next-hop is the same router or not.
pn_password:
description:
- Specify a password, if desired.
pn_ebgp:
description:
- Specify a value for external BGP to accept or attempt BGP connections
to external peers, not directly connected, on the network. This is a
value between 1 and 255.
pn_prefix_listin:
description:
- Specify the prefix list to filter traffic inbound.
pn_prefix_listout:
description:
- Specify the prefix list to filter traffic outbound.
pn_route_reflector:
description:
- Specify if a route reflector client is used.
pn_override_capability:
description:
- Specify if you want to override capability.
pn_soft_reconfig:
description:
- Specify if you want a soft reconfiguration of inbound traffic.
pn_max_prefix:
description:
- Specify the maximum number of prefixes.
pn_max_prefix_warn:
description:
- Specify if you want a warning message when the maximum number of
prefixes is exceeded.
pn_bfd:
description:
- Specify if you want BFD protocol support for fault detection.
pn_multiprotocol:
description:
- Specify a multi-protocol for BGP.
choices: ['ipv4-unicast', 'ipv6-unicast']
pn_weight:
description:
- Specify a default weight value between 0 and 65535 for the neighbor
routes.
pn_default_originate:
description:
- Specify if you want announce default routes to the neighbor or not.
pn_keepalive:
description:
- Specify BGP neighbor keepalive interval in seconds.
pn_holdtime:
description:
- Specify BGP neighbor holdtime in seconds.
pn_route_mapin:
description:
- Specify inbound route map for neighbor.
pn_route_mapout:
description:
- Specify outbound route map for neighbor.
"""
EXAMPLES = """
- name: add vrouter-bgp
pn_vrouterbgp:
state: 'present'
pn_vrouter_name: 'ansible-vrouter'
pn_neighbor: 104.104.104.1
pn_remote_as: 1800
- name: remove vrouter-bgp
pn_vrouterbgp:
state: 'absent'
pn_name: 'ansible-vrouter'
"""
RETURN = """
command:
description: The CLI command run on the target node(s).
stdout:
description: The set of responses from the vrouterbpg command.
returned: always
type: list
stderr:
description: The set of error responses from the vrouterbgp command.
returned: on error
type: list
changed:
description: Indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
VROUTER_EXISTS = None
NEIGHBOR_EXISTS = None
def pn_cli(module):
"""
This method is to generate the cli portion to launch the Netvisor cli.
It parses the username, password, switch parameters from module.
:param module: The Ansible module to fetch username, password and switch
:return: returns the cli string for further processing
"""
username = module.params['pn_cliusername']
password = module.params['pn_clipassword']
cliswitch = module.params['pn_cliswitch']
if username and password:
cli = '/usr/bin/cli --quiet --user %s:%s ' % (username, password)
else:
cli = '/usr/bin/cli --quiet '
if cliswitch == 'local':
cli += ' switch-local '
else:
cli += ' switch ' + cliswitch
return cli
def check_cli(module, cli):
"""
This method checks if vRouter exists on the target node.
This method also checks for idempotency using the vrouter-bgp-show command.
If the given vRouter exists, return VROUTER_EXISTS as True else False.
If a BGP neighbor with the given ip exists on the given vRouter,
return NEIGHBOR_EXISTS as True else False.
:param module: The Ansible module to fetch input parameters
:param cli: The CLI string
:return Global Booleans: VROUTER_EXISTS, NEIGHBOR_EXISTS
"""
vrouter_name = module.params['pn_vrouter_name']
neighbor = module.params['pn_neighbor']
# Global flags
global VROUTER_EXISTS, NEIGHBOR_EXISTS
# Check for vRouter
check_vrouter = cli + ' vrouter-show format name no-show-headers '
check_vrouter = shlex.split(check_vrouter)
out = module.run_command(check_vrouter)[1]
out = out.split()
if vrouter_name in out:
VROUTER_EXISTS = True
else:
VROUTER_EXISTS = False
# Check for BGP neighbors
show = cli + ' vrouter-bgp-show vrouter-name %s ' % vrouter_name
show += 'format neighbor no-show-headers'
show = shlex.split(show)
out = module.run_command(show)[1]
out = out.split()
if neighbor in out:
NEIGHBOR_EXISTS = True
else:
NEIGHBOR_EXISTS = False
def run_cli(module, cli):
"""
This method executes the cli command on the target node(s) and returns the
output. The module then exits based on the output.
:param cli: the complete cli string to be executed on the target node(s).
:param module: The Ansible module to fetch command
"""
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
command = get_command_from_state(state)
cmd = shlex.split(cli)
# 'out' contains the output
# 'err' contains the error messages
result, out, err = module.run_command(cmd)
print_cli = cli.split(cliswitch)[1]
# Response in JSON format
if result != 0:
module.exit_json(
command=print_cli,
stderr=err.strip(),
msg="%s operation failed" % command,
changed=False
)
if out:
module.exit_json(
command=print_cli,
stdout=out.strip(),
msg="%s operation completed" % command,
changed=True
)
else:
module.exit_json(
command=print_cli,
msg="%s operation completed" % command,
changed=True
)
def get_command_from_state(state):
"""
This method gets appropriate command name for the state specified. It
returns the command name for the specified state.
:param state: The state for which the respective command name is required.
"""
command = None
if state == 'present':
command = 'vrouter-bgp-add'
if state == 'absent':
command = 'vrouter-bgp-remove'
if state == 'update':
command = 'vrouter-bgp-modify'
return command
def main():
""" This portion is for arguments parsing """
module = AnsibleModule(
argument_spec=dict(
pn_cliusername=dict(required=False, type='str'),
pn_clipassword=dict(required=False, type='str', no_log=True),
pn_cliswitch=dict(required=False, type='str', default='local'),
state=dict(required=True, type='str',
choices=['present', 'absent', 'update']),
pn_vrouter_name=dict(required=True, type='str'),
pn_neighbor=dict(type='str'),
pn_remote_as=dict(type='str'),
pn_next_hop_self=dict(type='bool'),
pn_password=dict(type='str', no_log=True),
pn_ebgp=dict(type='int'),
pn_prefix_listin=dict(type='str'),
pn_prefix_listout=dict(type='str'),
pn_route_reflector=dict(type='bool'),
pn_override_capability=dict(type='bool'),
pn_soft_reconfig=dict(type='bool'),
pn_max_prefix=dict(type='int'),
pn_max_prefix_warn=dict(type='bool'),
pn_bfd=dict(type='bool'),
pn_multiprotocol=dict(type='str',
choices=['ipv4-unicast', 'ipv6-unicast']),
pn_weight=dict(type='int'),
pn_default_originate=dict(type='bool'),
pn_keepalive=dict(type='str'),
pn_holdtime=dict(type='str'),
pn_route_mapin=dict(type='str'),
pn_route_mapout=dict(type='str')
),
required_if=(
["state", "present",
["pn_vrouter_name", "pn_neighbor", "pn_remote_as"]],
["state", "absent",
["pn_vrouter_name", "pn_neighbor"]],
["state", "update",
["pn_vrouter_name", "pn_neighbor"]]
)
)
# Accessing the arguments
state= module.params['state']
vrouter_name = module.params['pn_vrouter_name']
neighbor = module.params['pn_neighbor']
remote_as = module.params['pn_remote_as']
next_hop_self = module.params['pn_next_hop_self']
password = module.params['pn_password']
ebgp = module.params['pn_ebgp']
prefix_listin = module.params['pn_prefix_listin']
prefix_listout = module.params['pn_prefix_listout']
route_reflector = module.params['pn_route_reflector']
override_capability = module.params['pn_override_capability']
soft_reconfig = module.params['pn_soft_reconfig']
max_prefix = module.params['pn_max_prefix']
max_prefix_warn = module.params['pn_max_prefix_warn']
bfd = module.params['pn_bfd']
multiprotocol = module.params['pn_multiprotocol']
weight = module.params['pn_weight']
default_originate = module.params['pn_default_originate']
keepalive = module.params['pn_keepalive']
holdtime = module.params['pn_holdtime']
route_mapin = module.params['pn_route_mapin']
route_mapout = module.params['pn_route_mapout']
# Building the CLI command string
cli = pn_cli(module)
command = get_command_from_state(state)
if command == 'vrouter-bgp-remove':
check_cli(module, cli)
if VROUTER_EXISTS is False:
module.exit_json(
skipped=True,
msg='vRouter %s does not exist' % vrouter_name
)
if NEIGHBOR_EXISTS is False:
module.exit_json(
skipped=True,
msg=('BGP neighbor with IP %s does not exist on %s'
% (neighbor, vrouter_name))
)
cli += (' %s vrouter-name %s neighbor %s '
% (command, vrouter_name, neighbor))
else:
if command == 'vrouter-bgp-add':
check_cli(module, cli)
if VROUTER_EXISTS is False:
module.exit_json(
skipped=True,
msg='vRouter %s does not exist' % vrouter_name
)
if NEIGHBOR_EXISTS is True:
module.exit_json(
skipped=True,
msg=('BGP neighbor with IP %s already exists on %s'
% (neighbor, vrouter_name))
)
cli += (' %s vrouter-name %s neighbor %s '
% (command, vrouter_name, neighbor))
if remote_as:
cli += ' remote-as ' + str(remote_as)
if next_hop_self is True:
cli += ' next-hop-self '
if next_hop_self is False:
cli += ' no-next-hop-self '
if password:
cli += ' password ' + password
if ebgp:
cli += ' ebgp-multihop ' + str(ebgp)
if prefix_listin:
cli += ' prefix-list-in ' + prefix_listin
if prefix_listout:
cli += ' prefix-list-out ' + prefix_listout
if route_reflector is True:
cli += ' route-reflector-client '
if route_reflector is False:
cli += ' no-route-reflector-client '
if override_capability is True:
cli += ' override-capability '
if override_capability is False:
cli += ' no-override-capability '
if soft_reconfig is True:
cli += ' soft-reconfig-inbound '
if soft_reconfig is False:
cli += ' no-soft-reconfig-inbound '
if max_prefix:
cli += ' max-prefix ' + str(max_prefix)
if max_prefix_warn is True:
cli += ' max-prefix-warn-only '
if max_prefix_warn is False:
cli += ' no-max-prefix-warn-only '
if bfd is True:
cli += ' bfd '
if bfd is False:
cli += ' no-bfd '
if multiprotocol:
cli += ' multi-protocol ' + multiprotocol
if weight:
cli += ' weight ' + str(weight)
if default_originate is True:
cli += ' default-originate '
if default_originate is False:
cli += ' no-default-originate '
if keepalive:
cli += ' neighbor-keepalive-interval ' + keepalive
if holdtime:
cli += ' neighbor-holdtime ' + holdtime
if route_mapin:
cli += ' route-map-in ' + route_mapin
if route_mapout:
cli += ' route-map-out ' + route_mapout
run_cli(module, cli)
# Ansible boiler-plate
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| gpl-3.0 | -9,021,961,218,522,632,000 | 30.810127 | 79 | 0.610359 | false |
therandomcode/Fanalytics | lib/oauth2client/service_account.py | 52 | 5038 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A service account credentials class.
This credentials class is implemented on top of rsa library.
"""
import base64
import time
from pyasn1.codec.ber import decoder
from pyasn1_modules.rfc5208 import PrivateKeyInfo
import rsa
from oauth2client import GOOGLE_REVOKE_URI
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client._helpers import _json_encode
from oauth2client._helpers import _to_bytes
from oauth2client._helpers import _urlsafe_b64encode
from oauth2client import util
from oauth2client.client import AssertionCredentials
class _ServiceAccountCredentials(AssertionCredentials):
"""Class representing a service account (signed JWT) credential."""
MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
def __init__(self, service_account_id, service_account_email,
private_key_id, private_key_pkcs8_text, scopes,
user_agent=None, token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI, **kwargs):
super(_ServiceAccountCredentials, self).__init__(
None, user_agent=user_agent, token_uri=token_uri,
revoke_uri=revoke_uri)
self._service_account_id = service_account_id
self._service_account_email = service_account_email
self._private_key_id = private_key_id
self._private_key = _get_private_key(private_key_pkcs8_text)
self._private_key_pkcs8_text = private_key_pkcs8_text
self._scopes = util.scopes_to_string(scopes)
self._user_agent = user_agent
self._token_uri = token_uri
self._revoke_uri = revoke_uri
self._kwargs = kwargs
def _generate_assertion(self):
"""Generate the assertion that will be used in the request."""
header = {
'alg': 'RS256',
'typ': 'JWT',
'kid': self._private_key_id
}
now = int(time.time())
payload = {
'aud': self._token_uri,
'scope': self._scopes,
'iat': now,
'exp': now + _ServiceAccountCredentials.MAX_TOKEN_LIFETIME_SECS,
'iss': self._service_account_email
}
payload.update(self._kwargs)
first_segment = _urlsafe_b64encode(_json_encode(header))
second_segment = _urlsafe_b64encode(_json_encode(payload))
assertion_input = first_segment + b'.' + second_segment
# Sign the assertion.
rsa_bytes = rsa.pkcs1.sign(assertion_input, self._private_key,
'SHA-256')
signature = base64.urlsafe_b64encode(rsa_bytes).rstrip(b'=')
return assertion_input + b'.' + signature
def sign_blob(self, blob):
# Ensure that it is bytes
blob = _to_bytes(blob, encoding='utf-8')
return (self._private_key_id,
rsa.pkcs1.sign(blob, self._private_key, 'SHA-256'))
@property
def service_account_email(self):
return self._service_account_email
@property
def serialization_data(self):
return {
'type': 'service_account',
'client_id': self._service_account_id,
'client_email': self._service_account_email,
'private_key_id': self._private_key_id,
'private_key': self._private_key_pkcs8_text
}
def create_scoped_required(self):
return not self._scopes
def create_scoped(self, scopes):
return _ServiceAccountCredentials(self._service_account_id,
self._service_account_email,
self._private_key_id,
self._private_key_pkcs8_text,
scopes,
user_agent=self._user_agent,
token_uri=self._token_uri,
revoke_uri=self._revoke_uri,
**self._kwargs)
def _get_private_key(private_key_pkcs8_text):
"""Get an RSA private key object from a pkcs8 representation."""
private_key_pkcs8_text = _to_bytes(private_key_pkcs8_text)
der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY')
asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo())
return rsa.PrivateKey.load_pkcs1(
asn1_private_key.getComponentByName('privateKey').asOctets(),
format='DER')
| apache-2.0 | 4,068,466,516,727,695,400 | 36.879699 | 76 | 0.611552 | false |
donald-pinckney/EM-Simulator | EM Sim/EM SimContent/Lib/lib2to3/pgen2/token.py | 353 | 1244 | #! /usr/bin/env python
"""Token constants (from "token.h")."""
# Taken from Python (r53757) and modified to include some tokens
# originally monkeypatched in by pgen2.tokenize
#--start constants--
ENDMARKER = 0
NAME = 1
NUMBER = 2
STRING = 3
NEWLINE = 4
INDENT = 5
DEDENT = 6
LPAR = 7
RPAR = 8
LSQB = 9
RSQB = 10
COLON = 11
COMMA = 12
SEMI = 13
PLUS = 14
MINUS = 15
STAR = 16
SLASH = 17
VBAR = 18
AMPER = 19
LESS = 20
GREATER = 21
EQUAL = 22
DOT = 23
PERCENT = 24
BACKQUOTE = 25
LBRACE = 26
RBRACE = 27
EQEQUAL = 28
NOTEQUAL = 29
LESSEQUAL = 30
GREATEREQUAL = 31
TILDE = 32
CIRCUMFLEX = 33
LEFTSHIFT = 34
RIGHTSHIFT = 35
DOUBLESTAR = 36
PLUSEQUAL = 37
MINEQUAL = 38
STAREQUAL = 39
SLASHEQUAL = 40
PERCENTEQUAL = 41
AMPEREQUAL = 42
VBAREQUAL = 43
CIRCUMFLEXEQUAL = 44
LEFTSHIFTEQUAL = 45
RIGHTSHIFTEQUAL = 46
DOUBLESTAREQUAL = 47
DOUBLESLASH = 48
DOUBLESLASHEQUAL = 49
AT = 50
OP = 51
COMMENT = 52
NL = 53
RARROW = 54
ERRORTOKEN = 55
N_TOKENS = 56
NT_OFFSET = 256
#--end constants--
tok_name = {}
for _name, _value in globals().items():
if type(_value) is type(0):
tok_name[_value] = _name
def ISTERMINAL(x):
return x < NT_OFFSET
def ISNONTERMINAL(x):
return x >= NT_OFFSET
def ISEOF(x):
return x == ENDMARKER
| apache-2.0 | -4,524,195,969,985,903,600 | 14.170732 | 65 | 0.676045 | false |
wbinventor/openmc | openmc/data/kalbach_mann.py | 1 | 14224 | from collections.abc import Iterable
from numbers import Real, Integral
from warnings import warn
import numpy as np
import openmc.checkvalue as cv
from openmc.stats import Tabular, Univariate, Discrete, Mixture
from .function import Tabulated1D, INTERPOLATION_SCHEME
from .angle_energy import AngleEnergy
from .data import EV_PER_MEV
from .endf import get_list_record, get_tab2_record
class KalbachMann(AngleEnergy):
"""Kalbach-Mann distribution
Parameters
----------
breakpoints : Iterable of int
Breakpoints defining interpolation regions
interpolation : Iterable of int
Interpolation codes
energy : Iterable of float
Incoming energies at which distributions exist
energy_out : Iterable of openmc.stats.Univariate
Distribution of outgoing energies corresponding to each incoming energy
precompound : Iterable of openmc.data.Tabulated1D
Precompound factor 'r' as a function of outgoing energy for each
incoming energy
slope : Iterable of openmc.data.Tabulated1D
Kalbach-Chadwick angular distribution slope value 'a' as a function of
outgoing energy for each incoming energy
Attributes
----------
breakpoints : Iterable of int
Breakpoints defining interpolation regions
interpolation : Iterable of int
Interpolation codes
energy : Iterable of float
Incoming energies at which distributions exist
energy_out : Iterable of openmc.stats.Univariate
Distribution of outgoing energies corresponding to each incoming energy
precompound : Iterable of openmc.data.Tabulated1D
Precompound factor 'r' as a function of outgoing energy for each
incoming energy
slope : Iterable of openmc.data.Tabulated1D
Kalbach-Chadwick angular distribution slope value 'a' as a function of
outgoing energy for each incoming energy
"""
def __init__(self, breakpoints, interpolation, energy, energy_out,
precompound, slope):
super().__init__()
self.breakpoints = breakpoints
self.interpolation = interpolation
self.energy = energy
self.energy_out = energy_out
self.precompound = precompound
self.slope = slope
@property
def breakpoints(self):
return self._breakpoints
@property
def interpolation(self):
return self._interpolation
@property
def energy(self):
return self._energy
@property
def energy_out(self):
return self._energy_out
@property
def precompound(self):
return self._precompound
@property
def slope(self):
return self._slope
@breakpoints.setter
def breakpoints(self, breakpoints):
cv.check_type('Kalbach-Mann breakpoints', breakpoints,
Iterable, Integral)
self._breakpoints = breakpoints
@interpolation.setter
def interpolation(self, interpolation):
cv.check_type('Kalbach-Mann interpolation', interpolation,
Iterable, Integral)
self._interpolation = interpolation
@energy.setter
def energy(self, energy):
cv.check_type('Kalbach-Mann incoming energy', energy,
Iterable, Real)
self._energy = energy
@energy_out.setter
def energy_out(self, energy_out):
cv.check_type('Kalbach-Mann distributions', energy_out,
Iterable, Univariate)
self._energy_out = energy_out
@precompound.setter
def precompound(self, precompound):
cv.check_type('Kalbach-Mann precompound factor', precompound,
Iterable, Tabulated1D)
self._precompound = precompound
@slope.setter
def slope(self, slope):
cv.check_type('Kalbach-Mann slope', slope, Iterable, Tabulated1D)
self._slope = slope
def to_hdf5(self, group):
"""Write distribution to an HDF5 group
Parameters
----------
group : h5py.Group
HDF5 group to write to
"""
group.attrs['type'] = np.string_('kalbach-mann')
dset = group.create_dataset('energy', data=self.energy)
dset.attrs['interpolation'] = np.vstack((self.breakpoints,
self.interpolation))
# Determine total number of (E,p,r,a) tuples and create array
n_tuple = sum(len(d) for d in self.energy_out)
distribution = np.empty((5, n_tuple))
# Create array for offsets
offsets = np.empty(len(self.energy_out), dtype=int)
interpolation = np.empty(len(self.energy_out), dtype=int)
n_discrete_lines = np.empty(len(self.energy_out), dtype=int)
j = 0
# Populate offsets and distribution array
for i, (eout, km_r, km_a) in enumerate(zip(
self.energy_out, self.precompound, self.slope)):
n = len(eout)
offsets[i] = j
if isinstance(eout, Mixture):
discrete, continuous = eout.distribution
n_discrete_lines[i] = m = len(discrete)
interpolation[i] = 1 if continuous.interpolation == 'histogram' else 2
distribution[0, j:j+m] = discrete.x
distribution[1, j:j+m] = discrete.p
distribution[2, j:j+m] = discrete.c
distribution[0, j+m:j+n] = continuous.x
distribution[1, j+m:j+n] = continuous.p
distribution[2, j+m:j+n] = continuous.c
else:
if isinstance(eout, Tabular):
n_discrete_lines[i] = 0
interpolation[i] = 1 if eout.interpolation == 'histogram' else 2
elif isinstance(eout, Discrete):
n_discrete_lines[i] = n
interpolation[i] = 1
distribution[0, j:j+n] = eout.x
distribution[1, j:j+n] = eout.p
distribution[2, j:j+n] = eout.c
distribution[3, j:j+n] = km_r.y
distribution[4, j:j+n] = km_a.y
j += n
# Create dataset for distributions
dset = group.create_dataset('distribution', data=distribution)
# Write interpolation as attribute
dset.attrs['offsets'] = offsets
dset.attrs['interpolation'] = interpolation
dset.attrs['n_discrete_lines'] = n_discrete_lines
@classmethod
def from_hdf5(cls, group):
"""Generate Kalbach-Mann distribution from HDF5 data
Parameters
----------
group : h5py.Group
HDF5 group to read from
Returns
-------
openmc.data.KalbachMann
Kalbach-Mann energy distribution
"""
interp_data = group['energy'].attrs['interpolation']
energy_breakpoints = interp_data[0, :]
energy_interpolation = interp_data[1, :]
energy = group['energy'][()]
data = group['distribution']
offsets = data.attrs['offsets']
interpolation = data.attrs['interpolation']
n_discrete_lines = data.attrs['n_discrete_lines']
energy_out = []
precompound = []
slope = []
n_energy = len(energy)
for i in range(n_energy):
# Determine length of outgoing energy distribution and number of
# discrete lines
j = offsets[i]
if i < n_energy - 1:
n = offsets[i+1] - j
else:
n = data.shape[1] - j
m = n_discrete_lines[i]
# Create discrete distribution if lines are present
if m > 0:
eout_discrete = Discrete(data[0, j:j+m], data[1, j:j+m])
eout_discrete.c = data[2, j:j+m]
p_discrete = eout_discrete.c[-1]
# Create continuous distribution
if m < n:
interp = INTERPOLATION_SCHEME[interpolation[i]]
eout_continuous = Tabular(data[0, j+m:j+n], data[1, j+m:j+n], interp)
eout_continuous.c = data[2, j+m:j+n]
# If both continuous and discrete are present, create a mixture
# distribution
if m == 0:
eout_i = eout_continuous
elif m == n:
eout_i = eout_discrete
else:
eout_i = Mixture([p_discrete, 1. - p_discrete],
[eout_discrete, eout_continuous])
km_r = Tabulated1D(data[0, j:j+n], data[3, j:j+n])
km_a = Tabulated1D(data[0, j:j+n], data[4, j:j+n])
energy_out.append(eout_i)
precompound.append(km_r)
slope.append(km_a)
return cls(energy_breakpoints, energy_interpolation,
energy, energy_out, precompound, slope)
@classmethod
def from_ace(cls, ace, idx, ldis):
"""Generate Kalbach-Mann energy-angle distribution from ACE data
Parameters
----------
ace : openmc.data.ace.Table
ACE table to read from
idx : int
Index in XSS array of the start of the energy distribution data
(LDIS + LOCC - 1)
ldis : int
Index in XSS array of the start of the energy distribution block
(e.g. JXS[11])
Returns
-------
openmc.data.KalbachMann
Kalbach-Mann energy-angle distribution
"""
# Read number of interpolation regions and incoming energies
n_regions = int(ace.xss[idx])
n_energy_in = int(ace.xss[idx + 1 + 2*n_regions])
# Get interpolation information
idx += 1
if n_regions > 0:
breakpoints = ace.xss[idx:idx + n_regions].astype(int)
interpolation = ace.xss[idx + n_regions:idx + 2*n_regions].astype(int)
else:
breakpoints = np.array([n_energy_in])
interpolation = np.array([2])
# Incoming energies at which distributions exist
idx += 2*n_regions + 1
energy = ace.xss[idx:idx + n_energy_in]*EV_PER_MEV
# Location of distributions
idx += n_energy_in
loc_dist = ace.xss[idx:idx + n_energy_in].astype(int)
# Initialize variables
energy_out = []
km_r = []
km_a = []
# Read each outgoing energy distribution
for i in range(n_energy_in):
idx = ldis + loc_dist[i] - 1
# intt = interpolation scheme (1=hist, 2=lin-lin)
INTTp = int(ace.xss[idx])
intt = INTTp % 10
n_discrete_lines = (INTTp - intt)//10
if intt not in (1, 2):
warn("Interpolation scheme for continuous tabular distribution "
"is not histogram or linear-linear.")
intt = 2
n_energy_out = int(ace.xss[idx + 1])
data = ace.xss[idx + 2:idx + 2 + 5*n_energy_out].copy()
data.shape = (5, n_energy_out)
data[0,:] *= EV_PER_MEV
# Create continuous distribution
eout_continuous = Tabular(data[0][n_discrete_lines:],
data[1][n_discrete_lines:]/EV_PER_MEV,
INTERPOLATION_SCHEME[intt],
ignore_negative=True)
eout_continuous.c = data[2][n_discrete_lines:]
if np.any(data[1][n_discrete_lines:] < 0.0):
warn("Kalbach-Mann energy distribution has negative "
"probabilities.")
# If discrete lines are present, create a mixture distribution
if n_discrete_lines > 0:
eout_discrete = Discrete(data[0][:n_discrete_lines],
data[1][:n_discrete_lines])
eout_discrete.c = data[2][:n_discrete_lines]
if n_discrete_lines == n_energy_out:
eout_i = eout_discrete
else:
p_discrete = min(sum(eout_discrete.p), 1.0)
eout_i = Mixture([p_discrete, 1. - p_discrete],
[eout_discrete, eout_continuous])
else:
eout_i = eout_continuous
energy_out.append(eout_i)
km_r.append(Tabulated1D(data[0], data[3]))
km_a.append(Tabulated1D(data[0], data[4]))
return cls(breakpoints, interpolation, energy, energy_out, km_r, km_a)
@classmethod
def from_endf(cls, file_obj):
"""Generate Kalbach-Mann distribution from an ENDF evaluation
Parameters
----------
file_obj : file-like object
ENDF file positioned at the start of the Kalbach-Mann distribution
Returns
-------
openmc.data.KalbachMann
Kalbach-Mann energy-angle distribution
"""
params, tab2 = get_tab2_record(file_obj)
lep = params[3]
ne = params[5]
energy = np.zeros(ne)
n_discrete_energies = np.zeros(ne, dtype=int)
energy_out = []
precompound = []
slope = []
for i in range(ne):
items, values = get_list_record(file_obj)
energy[i] = items[1]
n_discrete_energies[i] = items[2]
# TODO: split out discrete energies
n_angle = items[3]
n_energy_out = items[5]
values = np.asarray(values)
values.shape = (n_energy_out, n_angle + 2)
# Outgoing energy distribution at the i-th incoming energy
eout_i = values[:,0]
eout_p_i = values[:,1]
energy_out_i = Tabular(eout_i, eout_p_i, INTERPOLATION_SCHEME[lep])
energy_out.append(energy_out_i)
# Precompound and slope factors for Kalbach-Mann
r_i = values[:,2]
if n_angle == 2:
a_i = values[:,3]
else:
a_i = np.zeros_like(r_i)
precompound.append(Tabulated1D(eout_i, r_i))
slope.append(Tabulated1D(eout_i, a_i))
return cls(tab2.breakpoints, tab2.interpolation, energy,
energy_out, precompound, slope)
| mit | 6,583,787,950,731,069,000 | 34.383085 | 86 | 0.559618 | false |
bclau/nova | nova/tests/integrated/test_servers.py | 8 | 19688 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import zlib
from nova import context
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova.tests import fake_network
from nova.tests.integrated.api import client
from nova.tests.integrated import integrated_helpers
import nova.virt.fake
LOG = logging.getLogger(__name__)
class ServersTest(integrated_helpers._IntegratedTestBase):
_api_version = 'v2'
_force_delete_parameter = 'forceDelete'
_image_ref_parameter = 'imageRef'
_flavor_ref_parameter = 'flavorRef'
_access_ipv4_parameter = 'accessIPv4'
_access_ipv6_parameter = 'accessIPv6'
_return_resv_id_parameter = 'return_reservation_id'
_min_count_parameter = 'min_count'
def setUp(self):
super(ServersTest, self).setUp()
self.conductor = self.start_service(
'conductor', manager='nova.conductor.manager.ConductorManager')
def _wait_for_state_change(self, server, from_status):
for i in xrange(0, 50):
server = self.api.get_server(server['id'])
if server['status'] != from_status:
break
time.sleep(.1)
return server
def _restart_compute_service(self, *args, **kwargs):
"""restart compute service. NOTE: fake driver forgets all instances."""
self.compute.kill()
self.compute = self.start_service('compute', *args, **kwargs)
def test_get_servers(self):
# Simple check that listing servers works.
servers = self.api.get_servers()
for server in servers:
LOG.debug("server: %s" % server)
def test_create_server_with_error(self):
# Create a server which will enter error state.
fake_network.set_stub_network_methods(self.stubs)
def throw_error(*_):
raise Exception()
self.stubs.Set(nova.virt.fake.FakeDriver, 'spawn', throw_error)
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
found_server = self._wait_for_state_change(found_server, 'BUILD')
self.assertEqual('ERROR', found_server['status'])
self._delete_server(created_server_id)
def test_create_and_delete_server(self):
# Creates and deletes a server.
fake_network.set_stub_network_methods(self.stubs)
# Create server
# Build the server data gradually, checking errors along the way
server = {}
good_server = self._build_minimal_create_server_request()
post = {'server': server}
# Without an imageRef, this throws 500.
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# With an invalid imageRef, this throws 500.
server[self._image_ref_parameter] = self.get_invalid_image()
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# Add a valid imageRef
server[self._image_ref_parameter] = good_server.get(
self._image_ref_parameter)
# Without flavorRef, this throws 500
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
server[self._flavor_ref_parameter] = good_server.get(
self._flavor_ref_parameter)
# Without a name, this throws 500
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# Set a valid server name
server['name'] = good_server['name']
created_server = self.api.post_server(post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Check it's there
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
# It should also be in the all-servers list
servers = self.api.get_servers()
server_ids = [s['id'] for s in servers]
self.assertTrue(created_server_id in server_ids)
found_server = self._wait_for_state_change(found_server, 'BUILD')
# It should be available...
# TODO(justinsb): Mock doesn't yet do this...
self.assertEqual('ACTIVE', found_server['status'])
servers = self.api.get_servers(detail=True)
for server in servers:
self.assertTrue("image" in server)
self.assertTrue("flavor" in server)
self._delete_server(created_server_id)
def _force_reclaim(self):
# Make sure that compute manager thinks the instance is
# old enough to be expired
the_past = timeutils.utcnow() + datetime.timedelta(hours=1)
timeutils.set_time_override(override_time=the_past)
ctxt = context.get_admin_context()
self.compute._reclaim_queued_deletes(ctxt)
def test_deferred_delete(self):
# Creates, deletes and waits for server to be reclaimed.
self.flags(reclaim_instance_interval=1)
fake_network.set_stub_network_methods(self.stubs)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Cannot restore unless instance is deleted
self.assertRaises(client.OpenStackApiException,
self.api.post_server_action, created_server_id,
{'restore': {}})
# Cannot forceDelete unless instance is deleted
self.assertRaises(client.OpenStackApiException,
self.api.post_server_action, created_server_id,
{'forceDelete': {}})
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SOFT_DELETED', found_server['status'])
self._force_reclaim()
# Wait for real deletion
self._wait_for_deletion(created_server_id)
def test_deferred_delete_restore(self):
# Creates, deletes and restores a server.
self.flags(reclaim_instance_interval=3600)
fake_network.set_stub_network_methods(self.stubs)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SOFT_DELETED', found_server['status'])
# Restore server
self.api.post_server_action(created_server_id, {'restore': {}})
# Wait for server to become active again
found_server = self._wait_for_state_change(found_server, 'DELETED')
self.assertEqual('ACTIVE', found_server['status'])
def test_deferred_delete_force(self):
# Creates, deletes and force deletes a server.
self.flags(reclaim_instance_interval=3600)
fake_network.set_stub_network_methods(self.stubs)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SOFT_DELETED', found_server['status'])
# Force delete server
self.api.post_server_action(created_server_id,
{self._force_delete_parameter: {}})
# Wait for real deletion
self._wait_for_deletion(created_server_id)
def _wait_for_deletion(self, server_id):
# Wait (briefly) for deletion
for _retries in range(50):
try:
found_server = self.api.get_server(server_id)
except client.OpenStackApiNotFoundException:
found_server = None
LOG.debug("Got 404, proceeding")
break
LOG.debug("Found_server=%s" % found_server)
# TODO(justinsb): Mock doesn't yet do accurate state changes
#if found_server['status'] != 'deleting':
# break
time.sleep(.1)
# Should be gone
self.assertFalse(found_server)
def _delete_server(self, server_id):
# Delete the server
self.api.delete_server(server_id)
self._wait_for_deletion(server_id)
def test_create_server_with_metadata(self):
# Creates a server with metadata.
fake_network.set_stub_network_methods(self.stubs)
# Build the server data gradually, checking errors along the way
server = self._build_minimal_create_server_request()
metadata = {}
for i in range(30):
metadata['key_%s' % i] = 'value_%s' % i
server['metadata'] = metadata
post = {'server': server}
created_server = self.api.post_server(post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual(metadata, found_server.get('metadata'))
# The server should also be in the all-servers details list
servers = self.api.get_servers(detail=True)
server_map = dict((server['id'], server) for server in servers)
found_server = server_map.get(created_server_id)
self.assertTrue(found_server)
# Details do include metadata
self.assertEqual(metadata, found_server.get('metadata'))
# The server should also be in the all-servers summary list
servers = self.api.get_servers(detail=False)
server_map = dict((server['id'], server) for server in servers)
found_server = server_map.get(created_server_id)
self.assertTrue(found_server)
# Summary should not include metadata
self.assertFalse(found_server.get('metadata'))
# Cleanup
self._delete_server(created_server_id)
def test_create_and_rebuild_server(self):
# Rebuild a server with metadata.
fake_network.set_stub_network_methods(self.stubs)
# create a server with initially has no metadata
server = self._build_minimal_create_server_request()
server_post = {'server': server}
metadata = {}
for i in range(30):
metadata['key_%s' % i] = 'value_%s' % i
server_post['server']['metadata'] = metadata
created_server = self.api.post_server(server_post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
created_server = self._wait_for_state_change(created_server, 'BUILD')
# rebuild the server with metadata and other server attributes
post = {}
post['rebuild'] = {
self._image_ref_parameter: "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
"name": "blah",
self._access_ipv4_parameter: "172.19.0.2",
self._access_ipv6_parameter: "fe80::2",
"metadata": {'some': 'thing'},
}
self.api.post_server_action(created_server_id, post)
LOG.debug("rebuilt server: %s" % created_server)
self.assertTrue(created_server['id'])
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual({'some': 'thing'}, found_server.get('metadata'))
self.assertEqual('blah', found_server.get('name'))
self.assertEqual(post['rebuild'][self._image_ref_parameter],
found_server.get('image')['id'])
self.assertEqual('172.19.0.2',
found_server[self._access_ipv4_parameter])
self.assertEqual('fe80::2', found_server[self._access_ipv6_parameter])
# rebuild the server with empty metadata and nothing else
post = {}
post['rebuild'] = {
self._image_ref_parameter: "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
"metadata": {},
}
self.api.post_server_action(created_server_id, post)
LOG.debug("rebuilt server: %s" % created_server)
self.assertTrue(created_server['id'])
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual({}, found_server.get('metadata'))
self.assertEqual('blah', found_server.get('name'))
self.assertEqual(post['rebuild'][self._image_ref_parameter],
found_server.get('image')['id'])
self.assertEqual('172.19.0.2',
found_server[self._access_ipv4_parameter])
self.assertEqual('fe80::2', found_server[self._access_ipv6_parameter])
# Cleanup
self._delete_server(created_server_id)
def test_rename_server(self):
# Test building and renaming a server.
fake_network.set_stub_network_methods(self.stubs)
# Create a server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
server_id = created_server['id']
self.assertTrue(server_id)
# Rename the server to 'new-name'
self.api.put_server(server_id, {'server': {'name': 'new-name'}})
# Check the name of the server
created_server = self.api.get_server(server_id)
self.assertEqual(created_server['name'], 'new-name')
# Cleanup
self._delete_server(server_id)
def test_create_multiple_servers(self):
# Creates multiple servers and checks for reservation_id.
# Create 2 servers, setting 'return_reservation_id, which should
# return a reservation_id
server = self._build_minimal_create_server_request()
server[self._min_count_parameter] = 2
server[self._return_resv_id_parameter] = True
post = {'server': server}
response = self.api.post_server(post)
self.assertIn('reservation_id', response)
reservation_id = response['reservation_id']
self.assertNotIn(reservation_id, ['', None])
# Create 1 more server, which should not return a reservation_id
server = self._build_minimal_create_server_request()
post = {'server': server}
created_server = self.api.post_server(post)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# lookup servers created by the first request.
servers = self.api.get_servers(detail=True,
search_opts={'reservation_id': reservation_id})
server_map = dict((server['id'], server) for server in servers)
found_server = server_map.get(created_server_id)
# The server from the 2nd request should not be there.
self.assertEqual(found_server, None)
# Should have found 2 servers.
self.assertEqual(len(server_map), 2)
# Cleanup
self._delete_server(created_server_id)
for server_id in server_map.iterkeys():
self._delete_server(server_id)
def test_create_server_with_injected_files(self):
# Creates a server with injected_files.
fake_network.set_stub_network_methods(self.stubs)
personality = []
# Inject a text file
data = 'Hello, World!'
personality.append({
'path': '/helloworld.txt',
'contents': data.encode('base64'),
})
# Inject a binary file
data = zlib.compress('Hello, World!')
personality.append({
'path': '/helloworld.zip',
'contents': data.encode('base64'),
})
# Create server
server = self._build_minimal_create_server_request()
server['personality'] = personality
post = {'server': server}
created_server = self.api.post_server(post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Check it's there
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
found_server = self._wait_for_state_change(found_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Cleanup
self._delete_server(created_server_id)
class ServersTestV3(client.TestOpenStackClientV3Mixin, ServersTest):
_force_delete_parameter = 'force_delete'
_api_version = 'v3'
_image_ref_parameter = 'image_ref'
_flavor_ref_parameter = 'flavor_ref'
_access_ipv4_parameter = 'access_ip_v4'
_access_ipv6_parameter = 'access_ip_v6'
_return_resv_id_parameter = 'os-multiple-create:return_reservation_id'
_min_count_parameter = 'os-multiple-create:min_count'
| apache-2.0 | -1,159,182,078,609,849,900 | 37.303502 | 79 | 0.625203 | false |
havard024/prego | sales/api/tests.py | 3 | 17262 | # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
#-*- codeing: utf-8 -*-
import json
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User as DjangoUser
from treeio.core.models import User, Group, Perspective, ModuleSetting, Object
from treeio.identities.models import Contact, ContactType
from treeio.sales.models import SaleOrder, Product, OrderedProduct, Subscription, \
SaleStatus, SaleSource, Lead, Opportunity
from treeio.finance.models import Currency
class SalesAPITest(TestCase):
"Sales functional tests for views"
username = "api_test"
password = "api_password"
prepared = False
authentication_headers = {"CONTENT_TYPE": "application/json",
"HTTP_AUTHORIZATION": "Basic YXBpX3Rlc3Q6YXBpX3Bhc3N3b3Jk"}
content_type = 'application/json'
def setUp(self):
"Initial Setup"
if not self.prepared:
# Clean up first
Object.objects.all().delete()
User.objects.all().delete()
# Create objects
try:
self.group = Group.objects.get(name='test')
except Group.DoesNotExist:
Group.objects.all().delete()
self.group = Group(name='test')
self.group.save()
try:
self.user = DjangoUser.objects.get(username=self.username)
self.user.set_password(self.password)
try:
self.profile = self.user.get_profile()
except Exception:
User.objects.all().delete()
self.user = DjangoUser(username=self.username, password='')
self.user.set_password(self.password)
self.user.save()
except DjangoUser.DoesNotExist:
User.objects.all().delete()
self.user = DjangoUser(username=self.username, password='')
self.user.set_password(self.password)
self.user.save()
try:
perspective = Perspective.objects.get(name='default')
except Perspective.DoesNotExist:
Perspective.objects.all().delete()
perspective = Perspective(name='default')
perspective.set_default_user()
perspective.save()
ModuleSetting.set('default_perspective', perspective.id)
self.contact_type = ContactType()
self.contact_type.slug = 'machine'
self.contact_type.name = 'machine'
self.contact_type.save()
self.contact = Contact()
self.contact.contact_type = self.contact_type
self.contact.set_default_user()
self.contact.save()
self.assertNotEquals(self.contact.id, None)
self.status = SaleStatus()
self.status.active = True
self.status.use_sales = True
self.status.use_leads = True
self.status.use_opportunities = True
self.status.set_default_user()
self.status.save()
self.assertNotEquals(self.status.id, None)
self.currency = Currency(code="GBP",
name="Pounds",
symbol="L",
is_default=True)
self.currency.save()
self.source = SaleSource()
self.source.active = True
self.source.save()
self.source.set_user(self.user)
self.assertNotEquals(self.source.id, None)
self.product = Product(name="Test")
self.product.product_type = 'service'
self.product.active = True
self.product.sell_price = 10
self.product.buy_price = 100
self.product.set_default_user()
self.product.save()
self.assertNotEquals(self.product.id, None)
self.subscription = Subscription()
self.subscription.client = self.contact
self.subscription.set_default_user()
self.subscription.save()
self.assertNotEquals(self.subscription.id, None)
self.lead = Lead()
self.lead.contact_method = 'email'
self.lead.status = self.status
self.lead.contact = self.contact
self.lead.set_default_user()
self.lead.save()
self.assertNotEquals(self.lead.id, None)
self.opportunity = Opportunity()
self.opportunity.lead = self.lead
self.opportunity.contact = self.contact
self.opportunity.status = self.status
self.opportunity.amount = 100
self.opportunity.amount_currency = self.currency
self.opportunity.amount_display = 120
self.opportunity.set_default_user()
self.opportunity.save()
self.assertNotEquals(self.opportunity.id, None)
self.order = SaleOrder(reference="Test")
self.order.opportunity = self.opportunity
self.order.status = self.status
self.order.source = self.source
self.order.currency = self.currency
self.order.total = 0
self.order.total_display = 0
self.order.set_default_user()
self.order.save()
self.assertNotEquals(self.order.id, None)
self.ordered_product = OrderedProduct()
self.ordered_product.product = self.product
self.ordered_product.order = self.order
self.ordered_product.rate = 0
self.ordered_product.subscription = self.subscription
self.ordered_product.set_default_user()
self.ordered_product.save()
self.assertNotEquals(self.ordered_product.id, None)
self.client = Client()
self.prepared = True
def test_unauthenticated_access(self):
"Test index page at /sales/statuses"
response = self.client.get('/api/sales/statuses')
# Redirects as unauthenticated
self.assertEquals(response.status_code, 401)
def test_get_statuses_list(self):
""" Test index page api/sales/status """
response = self.client.get(
path=reverse('api_sales_status'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_status(self):
response = self.client.get(path=reverse('api_sales_status', kwargs={
'object_ptr': self.status.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_status(self):
updates = {"name": "Close_API", "active": True, "details": "api test details",
"use_leads": True, "use_opportunities": True, "hidden": False}
response = self.client.put(path=reverse('api_sales_status', kwargs={'object_ptr': self.status.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['name'], updates['name'])
self.assertEquals(data['active'], updates['active'])
self.assertEquals(data['details'], updates['details'])
self.assertEquals(data['use_leads'], updates['use_leads'])
self.assertEquals(
data['use_opportunities'], updates['use_opportunities'])
self.assertEquals(data['hidden'], updates['hidden'])
def test_get_products_list(self):
""" Test index page api/sales/products """
response = self.client.get(
path=reverse('api_sales_products'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_product(self):
response = self.client.get(path=reverse('api_sales_products', kwargs={
'object_ptr': self.product.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_product(self):
updates = {"name": "API product", "parent": None, "product_type": "service", "code": "api_test_code",
"buy_price": '100.05', "sell_price": '10.5', "active": True, "runout_action": "ignore", "details": "api details"}
response = self.client.put(path=reverse('api_sales_products', kwargs={'object_ptr': self.product.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['name'], updates['name'])
self.assertEquals(data['product_type'], updates['product_type'])
self.assertEquals(data['code'], updates['code'])
self.assertEquals(data['buy_price'], updates['buy_price'])
self.assertEquals(data['sell_price'], updates['sell_price'])
self.assertEquals(data['active'], updates['active'])
self.assertEquals(data['runout_action'], updates['runout_action'])
self.assertEquals(data['details'], updates['details'])
def test_get_sources_list(self):
""" Test index page api/sales/sources """
response = self.client.get(
path=reverse('api_sales_sources'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_source(self):
response = self.client.get(path=reverse('api_sales_sources', kwargs={
'object_ptr': self.source.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_source(self):
updates = {
"name": "Api source", "active": True, "details": "api details"}
response = self.client.put(path=reverse('api_sales_sources', kwargs={'object_ptr': self.source.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['name'], updates['name'])
self.assertEquals(data['active'], updates['active'])
self.assertEquals(data['details'], updates['details'])
#
def test_get_leads_list(self):
""" Test index page api/sales/leads """
response = self.client.get(
path=reverse('api_sales_leads'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_lead(self):
response = self.client.get(path=reverse(
'api_sales_leads', kwargs={'object_ptr': self.lead.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_lead(self):
updates = {"status": self.status.id, "contact_method": "email", "contact": self.contact.id,
"products_interested": [self.product.id], "source": self.source.id, 'details': 'Api details'}
response = self.client.put(path=reverse('api_sales_leads', kwargs={'object_ptr': self.lead.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['status']['id'], updates['status'])
self.assertEquals(data['contact_method'], updates['contact_method'])
self.assertEquals(data['contact']['id'], updates['contact'])
for i, product in enumerate(data['products_interested']):
self.assertEquals(product['id'], updates['products_interested'][i])
self.assertEquals(data['source']['id'], updates['source'])
self.assertEquals(data['details'], updates['details'])
def test_get_opportunities_list(self):
""" Test index page api/sales/opportunities """
response = self.client.get(
path=reverse('api_sales_opportunities'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_opportunity(self):
response = self.client.get(path=reverse('api_sales_opportunities', kwargs={
'object_ptr': self.opportunity.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_opportunity(self):
updates = {"status": self.status.id, "products_interested": [self.product.id], "contact": self.contact.id,
"amount_display": 3000.56, "amount_currency": self.currency.id, "details": "API DETAILS"}
response = self.client.put(path=reverse('api_sales_opportunities', kwargs={'object_ptr': self.opportunity.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['status']['id'], updates['status'])
self.assertEquals(data['contact']['id'], updates['contact'])
for i, product in enumerate(data['products_interested']):
self.assertEquals(product['id'], updates['products_interested'][i])
self.assertEquals(
data['amount_currency']['id'], updates['amount_currency'])
self.assertEquals(data['details'], updates['details'])
def test_get_orders_list(self):
""" Test index page api/sales/orders """
response = self.client.get(
path=reverse('api_sales_orders'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_order(self):
response = self.client.get(path=reverse(
'api_sales_orders', kwargs={'object_ptr': self.order.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_order(self):
updates = {"datetime": "2011-04-11 12:01:15", "status": self.status.id,
"source": self.source.id, "details": "api details"}
response = self.client.put(path=reverse('api_sales_orders', kwargs={'object_ptr': self.order.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['status']['id'], updates['status'])
self.assertEquals(data['source']['id'], updates['source'])
self.assertEquals(data['details'], updates['details'])
def test_get_subscriptions_list(self):
""" Test index page api/sales/subscriptions"""
response = self.client.get(
path=reverse('api_sales_subscriptions'), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_get_subscription(self):
response = self.client.get(path=reverse('api_sales_subscriptions', kwargs={
'object_ptr': self.subscription.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_subscription(self):
updates = {"product": self.product.id, "start": "2011-06-30",
"cycle_period": "daily", "active": True, "details": "api details"}
response = self.client.put(path=reverse('api_sales_subscriptions', kwargs={'object_ptr': self.subscription.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['product']['id'], updates['product'])
self.assertEquals(data['cycle_period'], updates['cycle_period'])
self.assertEquals(data['active'], updates['active'])
self.assertEquals(data['details'], updates['details'])
def test_get_ordered_product(self):
response = self.client.get(path=reverse('api_sales_ordered_products', kwargs={
'object_ptr': self.ordered_product.id}), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
def test_update_ordered_product(self):
updates = {
"discount": '10.0', "product": self.product.id, "quantity": '10'}
response = self.client.put(path=reverse('api_sales_ordered_products', kwargs={'object_ptr': self.ordered_product.id}),
content_type=self.content_type, data=json.dumps(updates), **self.authentication_headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data['product']['id'], updates['product'])
self.assertEquals(data['discount'], updates['discount'])
self.assertEquals(data['quantity'], updates['quantity'])
| mit | 4,903,779,124,763,055,000 | 46.293151 | 132 | 0.609605 | false |
iwoca/django-deep-collector | tests/test_serializer.py | 1 | 1514 | from copy import copy
import json
from django.test import TestCase
from .factories import ChildModelFactory
from deep_collector.compat.serializers import MultiModelInheritanceSerializer
class TestMultiModelInheritanceSerializer(TestCase):
def test_that_parent_model_fields_are_in_serializated_object_if_parent_is_not_abstract(self):
child_model = ChildModelFactory.create()
serializer = MultiModelInheritanceSerializer()
json_objects = serializer.serialize([child_model])
child_model_dict = json.loads(json_objects)[0]
serialized_fields = child_model_dict['fields'].keys()
self.assertIn('child_field', serialized_fields)
self.assertIn('o2o', serialized_fields)
self.assertIn('fkey', serialized_fields)
def test_that_we_dont_alter_model_class_meta_after_serialization(self):
child_model = ChildModelFactory.create()
local_fields_before = copy(child_model._meta.concrete_model._meta.local_fields)
local_m2m_fields_before = copy(child_model._meta.concrete_model._meta.local_many_to_many)
serializer = MultiModelInheritanceSerializer()
serializer.serialize([child_model])
local_fields_after = copy(child_model._meta.concrete_model._meta.local_fields)
local_m2m_fields_after = copy(child_model._meta.concrete_model._meta.local_many_to_many)
self.assertEqual(local_fields_before, local_fields_after)
self.assertEqual(local_m2m_fields_before, local_m2m_fields_after)
| bsd-3-clause | -4,081,760,577,509,073,400 | 42.257143 | 97 | 0.729194 | false |
garyjyao1/ansible | lib/ansible/plugins/connection/docker.py | 15 | 8622 | # Based on the chroot connection plugin by Maykel Moya
#
# Connection plugin for configuring docker containers
# (c) 2014, Lorin Hochstein
# (c) 2015, Leendert Brouwer
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# Maintainer: Leendert Brouwer (https://github.com/objectified)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import distutils.spawn
import os
import os.path
import pipes
import subprocess
import re
from distutils.version import LooseVersion
import ansible.constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
BUFSIZE = 65536
class Connection(ConnectionBase):
''' Local docker based connections '''
transport = 'docker'
has_pipelining = True
# su currently has an undiagnosed issue with calculating the file
# checksums (so copy, for instance, doesn't work right)
# Have to look into that before re-enabling this
become_methods = frozenset(C.BECOME_METHODS).difference(('su',))
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
# Note: docker supports running as non-root in some configurations.
# (For instance, setting the UNIX socket file to be readable and
# writable by a specific UNIX group and then putting users into that
# group). Therefore we don't check that the user is root when using
# this connection. But if the user is getting a permission denied
# error it probably means that docker on their system is only
# configured to be connected to by root and they are not running as
# root.
if 'docker_command' in kwargs:
self.docker_cmd = kwargs['docker_command']
else:
self.docker_cmd = distutils.spawn.find_executable('docker')
if not self.docker_cmd:
raise AnsibleError("docker command not found in PATH")
self.can_copy_bothways = False
docker_version = self._get_docker_version()
if LooseVersion(docker_version) < LooseVersion('1.3'):
raise AnsibleError('docker connection type requires docker 1.3 or higher')
if LooseVersion(docker_version) >= LooseVersion('1.8.0'):
self.can_copy_bothways = True
@staticmethod
def _sanitize_version(version):
return re.sub('[^0-9a-zA-Z\.]', '', version)
def _get_docker_version(self):
cmd = [self.docker_cmd, 'version']
cmd_output = subprocess.check_output(cmd)
for line in cmd_output.split('\n'):
if line.startswith('Server version:'): # old docker versions
return self._sanitize_version(line.split()[2])
# no result yet, must be newer Docker version
new_docker_cmd = [
self.docker_cmd,
'version', '--format', "'{{.Server.Version}}'"
]
cmd_output = subprocess.check_output(new_docker_cmd)
return self._sanitize_version(cmd_output)
def _connect(self, port=None):
""" Connect to the container. Nothing to do """
super(Connection, self)._connect()
if not self._connected:
self._display.vvv("ESTABLISH DOCKER CONNECTION FOR USER: {0}".format(
self._play_context.remote_user, host=self._play_context.remote_addr)
)
self._connected = True
def exec_command(self, cmd, in_data=None, sudoable=False):
""" Run a command on the docker host """
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else '/bin/sh'
# -i is needed to keep stdin open which allows pipelining to work
local_cmd = [self.docker_cmd, "exec", '-i', self._play_context.remote_addr, executable, '-c', cmd]
self._display.vvv("EXEC %s" % (local_cmd), host=self._play_context.remote_addr)
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(in_data)
return (p.returncode, stdout, stderr)
def _prefix_login_path(self, remote_path):
''' Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default.
Can revisit using $HOME instead if it's a problem
'''
if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path)
def put_file(self, in_path, out_path):
""" Transfer a file from local to docker container """
super(Connection, self).put_file(in_path, out_path)
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
out_path = self._prefix_login_path(out_path)
if not os.path.exists(in_path):
raise AnsibleFileNotFound(
"file or module does not exist: %s" % in_path)
if self.can_copy_bothways:
# only docker >= 1.8.1 can do this natively
args = [ self.docker_cmd, "cp", in_path, "%s:%s" % (self._play_context.remote_addr, out_path) ]
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
else:
out_path = pipes.quote(out_path)
# Older docker doesn't have native support for copying files into
# running containers, so we use docker exec to implement this
executable = C.DEFAULT_EXECUTABLE.split()[0] if C.DEFAULT_EXECUTABLE else '/bin/sh'
args = [self.docker_cmd, "exec", "-i", self._play_context.remote_addr, executable, "-c",
"dd of={0} bs={1}".format(out_path, BUFSIZE)]
with open(in_path, 'rb') as in_file:
try:
p = subprocess.Popen(args, stdin=in_file,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError:
raise AnsibleError("docker connection with docker < 1.8.1 requires dd command in the chroot")
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def fetch_file(self, in_path, out_path):
""" Fetch a file from container to local. """
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
in_path = self._prefix_login_path(in_path)
# out_path is the final file path, but docker takes a directory, not a
# file path
out_dir = os.path.dirname(out_path)
args = [self.docker_cmd, "cp", "%s:%s" % (self._play_context.remote_addr, in_path), out_dir]
p = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
# Rename if needed
actual_out_path = os.path.join(out_dir, os.path.basename(in_path))
if actual_out_path != out_path:
os.rename(actual_out_path, out_path)
def close(self):
""" Terminate the connection. Nothing to do for Docker"""
super(Connection, self).close()
self._connected = False
| gpl-3.0 | -5,472,440,659,272,386,000 | 42.326633 | 121 | 0.63338 | false |
c0defreak/python-for-android | python3-alpha/extra_modules/gdata/geo/__init__.py | 45 | 6005 | # -*-*- encoding: utf-8 -*-*-
#
# This is gdata.photos.geo, implementing geological positioning in gdata structures
#
# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Picasa Web Albums uses the georss and gml namespaces for
elements defined in the GeoRSS and Geography Markup Language specifications.
Specifically, Picasa Web Albums uses the following elements:
georss:where
gml:Point
gml:pos
http://code.google.com/apis/picasaweb/reference.html#georss_reference
Picasa Web Albums also accepts geographic-location data in two other formats:
W3C format and plain-GeoRSS (without GML) format.
"""
#
#Over the wire, the Picasa Web Albums only accepts and sends the
#elements mentioned above, but this module will let you seamlessly convert
#between the different formats (TODO 2007-10-18 hg)
__author__ = '[email protected]'# (Håvard Gulldahl)' #BUG: api chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
import atom
import gdata
GEO_NAMESPACE = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
GML_NAMESPACE = 'http://www.opengis.net/gml'
GEORSS_NAMESPACE = 'http://www.georss.org/georss'
class GeoBaseElement(atom.AtomBase):
"""Base class for elements.
To add new elements, you only need to add the element tag name to self._tag
and the namespace to self._namespace
"""
_tag = ''
_namespace = GML_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Pos(GeoBaseElement):
"""(string) Specifies a latitude and longitude, separated by a space,
e.g. `35.669998 139.770004'"""
_tag = 'pos'
def PosFromString(xml_string):
return atom.CreateClassFromXMLString(Pos, xml_string)
class Point(GeoBaseElement):
"""(container) Specifies a particular geographical point, by means of
a <gml:pos> element."""
_tag = 'Point'
_children = atom.AtomBase._children.copy()
_children['{%s}pos' % GML_NAMESPACE] = ('pos', Pos)
def __init__(self, pos=None, extension_elements=None, extension_attributes=None, text=None):
GeoBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
if pos is None:
pos = Pos()
self.pos=pos
def PointFromString(xml_string):
return atom.CreateClassFromXMLString(Point, xml_string)
class Where(GeoBaseElement):
"""(container) Specifies a geographical location or region.
A container element, containing a single <gml:Point> element.
(Not to be confused with <gd:where>.)
Note that the (only) child attribute, .Point, is title-cased.
This reflects the names of elements in the xml stream
(principle of least surprise).
As a convenience, you can get a tuple of (lat, lon) with Where.location(),
and set the same data with Where.setLocation( (lat, lon) ).
Similarly, there are methods to set and get only latitude and longitude.
"""
_tag = 'where'
_namespace = GEORSS_NAMESPACE
_children = atom.AtomBase._children.copy()
_children['{%s}Point' % GML_NAMESPACE] = ('Point', Point)
def __init__(self, point=None, extension_elements=None, extension_attributes=None, text=None):
GeoBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
if point is None:
point = Point()
self.Point=point
def location(self):
"(float, float) Return Where.Point.pos.text as a (lat,lon) tuple"
try:
return tuple([float(z) for z in self.Point.pos.text.split(' ')])
except AttributeError:
return tuple()
def set_location(self, latlon):
"""(bool) Set Where.Point.pos.text from a (lat,lon) tuple.
Arguments:
lat (float): The latitude in degrees, from -90.0 to 90.0
lon (float): The longitude in degrees, from -180.0 to 180.0
Returns True on success.
"""
assert(isinstance(latlon[0], float))
assert(isinstance(latlon[1], float))
try:
self.Point.pos.text = "%s %s" % (latlon[0], latlon[1])
return True
except AttributeError:
return False
def latitude(self):
"(float) Get the latitude value of the geo-tag. See also .location()"
lat, lon = self.location()
return lat
def longitude(self):
"(float) Get the longtitude value of the geo-tag. See also .location()"
lat, lon = self.location()
return lon
longtitude = longitude
def set_latitude(self, lat):
"""(bool) Set the latitude value of the geo-tag.
Args:
lat (float): The new latitude value
See also .set_location()
"""
_lat, lon = self.location()
return self.set_location(lat, lon)
def set_longitude(self, lon):
"""(bool) Set the longtitude value of the geo-tag.
Args:
lat (float): The new latitude value
See also .set_location()
"""
lat, _lon = self.location()
return self.set_location(lat, lon)
set_longtitude = set_longitude
def WhereFromString(xml_string):
return atom.CreateClassFromXMLString(Where, xml_string)
| apache-2.0 | -141,531,355,547,288,300 | 31.448649 | 103 | 0.686657 | false |
ycaihua/kbengine | kbe/res/scripts/common/Lib/encodings/rot_13.py | 155 | 2428 | #!/usr/bin/env python
""" Python Character Mapping Codec for ROT13.
This codec de/encodes from str to str.
Written by Marc-Andre Lemburg ([email protected]).
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return (input.translate(rot13_map), len(input))
def decode(self, input, errors='strict'):
return (input.translate(rot13_map), len(input))
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return input.translate(rot13_map)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return input.translate(rot13_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='rot-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
_is_text_encoding=False,
)
### Map
rot13_map = codecs.make_identity_dict(range(256))
rot13_map.update({
0x0041: 0x004e,
0x0042: 0x004f,
0x0043: 0x0050,
0x0044: 0x0051,
0x0045: 0x0052,
0x0046: 0x0053,
0x0047: 0x0054,
0x0048: 0x0055,
0x0049: 0x0056,
0x004a: 0x0057,
0x004b: 0x0058,
0x004c: 0x0059,
0x004d: 0x005a,
0x004e: 0x0041,
0x004f: 0x0042,
0x0050: 0x0043,
0x0051: 0x0044,
0x0052: 0x0045,
0x0053: 0x0046,
0x0054: 0x0047,
0x0055: 0x0048,
0x0056: 0x0049,
0x0057: 0x004a,
0x0058: 0x004b,
0x0059: 0x004c,
0x005a: 0x004d,
0x0061: 0x006e,
0x0062: 0x006f,
0x0063: 0x0070,
0x0064: 0x0071,
0x0065: 0x0072,
0x0066: 0x0073,
0x0067: 0x0074,
0x0068: 0x0075,
0x0069: 0x0076,
0x006a: 0x0077,
0x006b: 0x0078,
0x006c: 0x0079,
0x006d: 0x007a,
0x006e: 0x0061,
0x006f: 0x0062,
0x0070: 0x0063,
0x0071: 0x0064,
0x0072: 0x0065,
0x0073: 0x0066,
0x0074: 0x0067,
0x0075: 0x0068,
0x0076: 0x0069,
0x0077: 0x006a,
0x0078: 0x006b,
0x0079: 0x006c,
0x007a: 0x006d,
})
### Filter API
def rot13(infile, outfile):
outfile.write(codecs.encode(infile.read(), 'rot-13'))
if __name__ == '__main__':
import sys
rot13(sys.stdin, sys.stdout)
| lgpl-3.0 | 6,333,234,198,888,007,000 | 20.486726 | 57 | 0.663097 | false |
woutersmet/Molmodsummer | lib/molmod/io/dlpoly.py | 1 | 9363 | # MolMod is a collection of molecular modelling tools for python.
# Copyright (C) 2007 - 2008 Toon Verstraelen <[email protected]>
#
# This file is part of MolMod.
#
# MolMod is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# MolMod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from molmod.units import ps, amu, A, atm, deg
from molmod.io.common import slice_match
import numpy
__all__ = ["Error", "HistoryReader", "OutputReader"]
class Error(Exception):
pass
class HistoryReader(object):
def __init__(self, filename, sub=slice(None), pos_unit=A, vel_unit=A/ps, frc_unit=amu*A/ps**2, time_unit=ps, mass_unit=amu):
self._f = file(filename)
self._sub = sub
self.pos_unit = pos_unit
self.vel_unit = vel_unit
self.frc_unit = frc_unit
self.time_unit = time_unit
self.mass_unit = mass_unit
try:
self.header = self._f.next()[:-1]
integers = tuple(int(word) for word in self._f.next().split())
if len(integers) != 3:
raise Error("Second line must contain three integers.")
self.keytrj, self.imcon, self.num_atoms = integers
except StopIteration:
raise Error("File is too short. Could not read header.")
except ValueError:
raise Error("Second line must contain three integers.")
self._counter = 1
self._frame_size = 4 + self.num_atoms*(self.keytrj+2)
def __del__(self):
self._f.close()
def __iter__(self):
return self
def next(self):
# auxiliary read function
def read_three(msg):
# read three words as floating point numbers
line = self._f.next()
try:
return [float(line[:12]), float(line[12:24]), float(line[24:])]
except ValueError:
raise Error(msg)
# skip frames as requested
while not slice_match(self._sub, self._counter):
for i in xrange(self._frame_size):
self._f.next()
self._counter += 1
frame = {}
# read the frame header line
words = self._f.next().split()
if len(words) != 6:
raise Error("The first line of each time frame must contain 6 words. (%i'th frame)" % self._counter)
if words[0] != "timestep":
raise Error("The first word of the first line of each time frame must be 'timestep'. (%i'th frame)" % self._counter)
try:
step = int(words[1])
frame["step"] = step
if int(words[2]) != self.num_atoms:
raise Error("The number of atoms has changed. (%i'th frame, %i'th step)" % (self._counter, step))
if int(words[3]) != self.keytrj:
raise Error("keytrj has changed. (%i'th frame, %i'th step)" % (self._counter, step))
if int(words[4]) != self.imcon:
raise Error("imcon has changed. (%i'th frame, %i'th step)" % (self._counter, step))
frame["timestep"] = float(words[5])*self.time_unit
frame["time"] = frame["timestep"]*step # this is ugly, or wait ... dlpoly is a bit ugly. we are not to blame!
except ValueError:
raise Error("Could not convert all numbers on the first line of the current time frame. (%i'th frame)" % self._counter)
# the three cell lines
cell = numpy.zeros((3,3), float)
frame["cell"] = cell
cell_msg = "The cell lines must consist of three floating point values. (%i'th frame, %i'th step)" % (self._counter, step)
for i in xrange(3):
cell[:,i] = read_three(cell_msg)
cell *= self.pos_unit
# the atoms
symbols = []
frame["symbols"] = symbols
masses = numpy.zeros(self.num_atoms, float)
frame["masses"] = masses
charges = numpy.zeros(self.num_atoms, float)
frame["charges"] = charges
pos = numpy.zeros((self.num_atoms,3), float)
frame["pos"] = pos
if self.keytrj > 0:
vel = numpy.zeros((self.num_atoms,3), float)
frame["vel"] = vel
if self.keytrj > 1:
frc = numpy.zeros((self.num_atoms,3), float)
frame["frc"] = frc
for i in xrange(self.num_atoms):
# the atom header line
words = self._f.next().split()
if len(words) != 4:
raise Error("The atom header line must contain 4 words. (%i'th frame, %i'th step, %i'th atom)" % (self._counter, step, i+1))
symbols.append(words[0])
try:
masses[i] = float(words[2])*self.mass_unit
charges[i] = float(words[3])
except ValueError:
raise Error("The numbers in the atom header line could not be interpreted.")
# the pos line
pos_msg = "The position lines must consist of three floating point values. (%i'th frame, %i'th step, %i'th atom)" % (self._counter, step, i+1)
pos[i] = read_three(pos_msg)
if self.keytrj > 0:
vel_msg = "The velocity lines must consist of three floating point values. (%i'th frame, %i'th step, %i'th atom)" % (self._counter, step, i+1)
vel[i] = read_three(vel_msg)
if self.keytrj > 1:
frc_msg = "The force lines must consist of three floating point values. (%i'th frame, %i'th step, %i'th atom)" % (self._counter, step, i+1)
frc[i] = read_three(frc_msg)
pos *= self.pos_unit # convert to au
if self.keytrj > 0:
vel *= self.vel_unit # convert to au
if self.keytrj > 1:
frc *= self.frc_unit # convert to au
# done
self._counter += 1
return frame
class OutputReader(object):
_marker = " " + "-"*130
def __init__(self, filename, sub=slice(None), skip_equi_period=True, pos_unit=A, time_unit=ps, angle_unit=deg, e_unit=amu/(A/ps)**2):
self._f = file(filename)
self._sub = sub
self.skip_equi_period = skip_equi_period
self._counter = 1
self._conv = [
1, e_unit, 1, e_unit, e_unit, e_unit, e_unit, e_unit, e_unit, e_unit,
time_unit, e_unit, 1, e_unit, e_unit, e_unit, e_unit, e_unit, e_unit, e_unit,
1, pos_unit**3, 1, e_unit, e_unit, angle_unit, angle_unit, angle_unit, e_unit, 1000*atm,
]
self.last_step = None
# find the line that gives the number of equilibration steps:
try:
while True:
line = self._f.next()
if line.startswith(" equilibration period"):
self.equi_period = int(line[30:])
break
except StopIteration:
raise Error("DL_POLY OUTPUT file is too short. Could not find line with the number of equilibration steps.")
except ValueError:
raise Error("Could not read the number of equilibration steps. (expecting an integer)")
def __del__(self):
self._f.close()
def __iter__(self):
return self
def next(self):
def goto_next_frame():
marked = False
while True:
line = self._f.next()[:-1]
if marked and len(line) > 0 and not line.startswith(" --------"):
try:
step = int(line[:10])
return step, line
except ValueError:
pass
marked = (len(line) == 131 and line == self._marker)
while True:
step, line = goto_next_frame()
if (not self.skip_equi_period or step >= self.equi_period) and \
step != self.last_step:
break
# skip frames as requested
while not slice_match(self._sub, self._counter):
step, line = goto_next_frame()
self._counter += 1
# now really read these three lines
try:
row = [step]
for i in xrange(9):
row.append(float(line[10+i*12:10+(i+1)*12]))
line = self._f.next()[:-1]
row.append(float(line[:10]))
for i in xrange(9):
row.append(float(line[10+i*12:10+(i+1)*12]))
line = self._f.next()[:-1]
row.append(float(line[:10]))
for i in xrange(9):
row.append(float(line[10+i*12:10+(i+1)*12]))
except ValueError:
raise Error("Some numbers in the output file could not be read. (expecting floating point numbers)")
# convert all the numbers to atomic units
for i in xrange(30):
row[i] *= self._conv[i]
# done
self.last_step = step
return row
| gpl-3.0 | 5,444,467,297,642,824,000 | 39.184549 | 158 | 0.548756 | false |
Empeeric/dirometer | django/views/csrf.py | 289 | 3834 | from django.http import HttpResponseForbidden
from django.template import Context, Template
from django.conf import settings
# We include the template inline since we need to be able to reliably display
# this error message, especially for the sake of developers, and there isn't any
# other way of making it available independent of what is in the settings file.
CSRF_FAILRE_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>403 Forbidden</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
#info { background:#f6f6f6; }
#info ul { margin: 0.5em 4em; }
#info p, #summary p { padding-top:10px; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Forbidden <span>(403)</span></h1>
<p>CSRF verification failed. Request aborted.</p>
{% if no_referer %}
<p>You are seeing this message because this HTTPS site requires a 'Referer
header' to be sent by your Web browser, but none was sent. This header is
required for security reasons, to ensure that your browser is not being
hijacked by third parties.</p>
<p>If you have configured your browser to disable 'Referer' headers, please
re-enable them, at least for this site, or for HTTPS connections, or for
'same-origin' requests.</p>
{% endif %}
</div>
{% if DEBUG %}
<div id="info">
<h2>Help</h2>
{% if reason %}
<p>Reason given for failure:</p>
<pre>
{{ reason }}
</pre>
{% endif %}
<p>In general, this can occur when there is a genuine Cross Site Request Forgery, or when
<a
href='http://docs.djangoproject.com/en/dev/ref/contrib/csrf/#ref-contrib-csrf'>Django's
CSRF mechanism</a> has not been used correctly. For POST forms, you need to
ensure:</p>
<ul>
<li>The view function uses <a
href='http://docs.djangoproject.com/en/dev/ref/templates/api/#subclassing-context-requestcontext'><code>RequestContext</code></a>
for the template, instead of <code>Context</code>.</li>
<li>In the template, there is a <code>{% templatetag openblock %} csrf_token
{% templatetag closeblock %}</code> template tag inside each POST form that
targets an internal URL.</li>
<li>If you are not using <code>CsrfViewMiddleware</code>, then you must use
<code>csrf_protect</code> on any views that use the <code>csrf_token</code>
template tag, as well as those that accept the POST data.</li>
</ul>
<p>You're seeing the help section of this page because you have <code>DEBUG =
True</code> in your Django settings file. Change that to <code>False</code>,
and only the initial error message will be displayed. </p>
<p>You can customize this page using the CSRF_FAILURE_VIEW setting.</p>
</div>
{% else %}
<div id="explanation">
<p><small>More information is available with DEBUG=True.</small></p>
</div>
{% endif %}
</body>
</html>
"""
def csrf_failure(request, reason=""):
"""
Default view used when request fails CSRF protection
"""
from django.middleware.csrf import REASON_NO_REFERER
t = Template(CSRF_FAILRE_TEMPLATE)
c = Context({'DEBUG': settings.DEBUG,
'reason': reason,
'no_referer': reason == REASON_NO_REFERER
})
return HttpResponseForbidden(t.render(c), mimetype='text/html')
| mit | -5,320,599,471,362,799,000 | 36.588235 | 133 | 0.675013 | false |
lucalianas/openmicroscopy | components/tools/OmeroPy/src/omero/gateway/scripts/testdb_create.py | 9 | 9835 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Library for gateway tests
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import omero
from omero.rtypes import rstring
from omero.gateway.scripts import dbhelpers
dbhelpers.USERS = {
'user': dbhelpers.UserEntry(
'weblitz_test_user', 'foobar', 'User', 'Weblitz'),
'author': dbhelpers.UserEntry(
'weblitz_test_author', 'foobar', 'Author', 'Weblitz'),
}
dbhelpers.PROJECTS = {
'testpr1': dbhelpers.ProjectEntry('weblitz_test_priv_project', 'author'),
'testpr2': dbhelpers.ProjectEntry('weblitz_test_priv_project2', 'author'),
}
dbhelpers.DATASETS = {
'testds1': dbhelpers.DatasetEntry('weblitz_test_priv_dataset', 'testpr1'),
'testds2': dbhelpers.DatasetEntry('weblitz_test_priv_dataset2', 'testpr1'),
'testds3': dbhelpers.DatasetEntry('weblitz_test_priv_dataset3', 'testpr2'),
}
dbhelpers.IMAGES = {
'testimg1': dbhelpers.ImageEntry(
'weblitz_test_priv_image', 'CHOBI_d3d.dv', 'testds1'),
'testimg2': dbhelpers.ImageEntry(
'weblitz_test_priv_image2', 'CHOBI_d3d.dv', 'testds1'),
'tinyimg': dbhelpers.ImageEntry(
'weblitz_test_priv_image_tiny', 'tinyTest.d3d.dv', 'testds1'),
'badimg': dbhelpers.ImageEntry(
'weblitz_test_priv_image_bad', False, 'testds1'),
'tinyimg2': dbhelpers.ImageEntry(
'weblitz_test_priv_image_tiny2', 'tinyTest.d3d.dv', 'testds2'),
'tinyimg3': dbhelpers.ImageEntry(
'weblitz_test_priv_image_tiny3', 'tinyTest.d3d.dv', 'testds3'),
'bigimg': dbhelpers.ImageEntry(
'weblitz_test_priv_image_big', 'big.tiff', 'testds3'),
}
class TestDBHelper(object):
def setUp(self, skipTestDB=False, skipTestImages=True):
self.tmpfiles = []
self._has_connected = False
self._last_login = None
self.doDisconnect()
self.USER = dbhelpers.USERS['user']
self.AUTHOR = dbhelpers.USERS['author']
self.ADMIN = dbhelpers.ROOT
gateway = omero.client_wrapper()
try:
rp = gateway.getProperty('omero.rootpass')
if rp:
dbhelpers.ROOT.passwd = rp
finally:
gateway.seppuku()
self.prepTestDB(onlyUsers=skipTestDB, skipImages=skipTestImages)
self.doDisconnect()
def doConnect(self):
if not self._has_connected:
self.gateway.connect()
self._has_connected = True
assert self.gateway.isConnected(), 'Can not connect'
assert self.gateway.keepAlive(
), 'Could not send keepAlive to connection'
self.gateway.setGroupForSession(
self.gateway.getEventContext().memberOfGroups[0])
def doDisconnect(self):
if self._has_connected and self.gateway:
self.doConnect()
self.gateway.seppuku()
assert not self.gateway.isConnected(), 'Can not disconnect'
self.gateway = None
self._has_connected = False
self._last_login = None
def doLogin(self, user=None, groupname=None):
l = (user, groupname)
if self._has_connected and self._last_login == l:
return self.doConnect()
self.doDisconnect()
if user:
self.gateway = dbhelpers.login(user, groupname)
else:
self.gateway = dbhelpers.loginAsPublic()
self.doConnect()
self._last_login = l
def loginAsAdmin(self):
self.doLogin(self.ADMIN)
def loginAsAuthor(self):
self.doLogin(self.AUTHOR)
def loginAsUser(self):
self.doLogin(self.USER)
def loginAsPublic(self):
self.doLogin()
def tearDown(self):
try:
if self.gateway is not None:
self.gateway.seppuku()
finally:
failure = False
for tmpfile in self.tmpfiles:
try:
tmpfile.close()
except:
print "Error closing:" + tmpfile
if failure:
raise Exception("Exception on client.closeSession")
def getTestProject(self):
return dbhelpers.getProject(self.gateway, 'testpr1')
def getTestProject2(self):
return dbhelpers.getProject(self.gateway, 'testpr2')
def getTestDataset(self, project=None):
return dbhelpers.getDataset(self.gateway, 'testds1', project)
def getTestDataset2(self, project=None):
return dbhelpers.getDataset(self.gateway, 'testds2', project)
def getTestImage(self, dataset=None, autocreate=False):
return dbhelpers.getImage(self.gateway, 'testimg1', forceds=dataset,
autocreate=autocreate)
def getTestImage2(self, dataset=None):
return dbhelpers.getImage(self.gateway, 'testimg2', dataset)
def getBadTestImage(self, dataset=None, autocreate=False):
return dbhelpers.getImage(self.gateway, 'badimg', forceds=dataset,
autocreate=autocreate)
def getTinyTestImage(self, dataset=None, autocreate=False):
return dbhelpers.getImage(self.gateway, 'tinyimg', forceds=dataset,
autocreate=autocreate)
def getTinyTestImage2(self, dataset=None, autocreate=False):
return dbhelpers.getImage(self.gateway, 'tinyimg2', forceds=dataset,
autocreate=autocreate)
def getTinyTestImage3(self, dataset=None, autocreate=False):
return dbhelpers.getImage(self.gateway, 'tinyimg3', forceds=dataset,
autocreate=autocreate)
def getBigTestImage(self, dataset=None, autocreate=False):
return dbhelpers.getImage(self.gateway, 'bigimg', forceds=dataset,
autocreate=autocreate)
def prepTestDB(self, onlyUsers=False, skipImages=True):
dbhelpers.bootstrap(onlyUsers=onlyUsers, skipImages=skipImages)
def waitOnCmd(self, client, handle):
callback = omero.callbacks.CmdCallbackI(client, handle)
callback.loop(10, 500) # throws on timeout
rsp = callback.getResponse()
assert isinstance(rsp, omero.cmd.OK)
return callback
def createPDTree(self, project=None, dataset=None):
"""
Create/link a Project and/or Dataset (link them if both are specified)
Existing objects can be parsed as an omero.model object(s) or blitz
Wrapper objects. Otherwise new objects will be created with name
str(project) or str(dataset). If project OR dataset is specified, the
ProjectWrapper or DatasetWrapper is returned. If both project and
dataset are specified, they will be linked and the PD-link is returned
as a BlitzObjectWrapper.
@param project: omero.model.ProjectDatasetLinkI
OR omero.gateway.ProjectWrapper
or name (string)
@param dataset: omero.model.DatasetI
OR omero.gateway.DatasetWrapper
or name (string)
"""
dsId = ds = None
prId = pr = None
returnVal = None
if dataset is not None:
try:
dsId = dataset.id
dsId = dsId.val
except:
ds = omero.model.DatasetI()
ds.name = rstring(str(dataset))
ds = self.gateway.getUpdateService().saveAndReturnObject(ds)
returnVal = omero.gateway.DatasetWrapper(self.gateway, ds)
dsId = ds.id.val
if project is not None:
try:
prId = project.id
prId = prId.val
except:
pr = omero.model.ProjectI()
pr.name = rstring(str(project))
pr = self.gateway.getUpdateService().saveAndReturnObject(pr)
returnVal = omero.gateway.ProjectWrapper(self.gateway, pr)
prId = pr.id.val
if dsId and prId:
link = omero.model.ProjectDatasetLinkI()
link.setParent(omero.model.ProjectI(prId, False))
link.setChild(omero.model.DatasetI(dsId, False))
link = self.gateway.getUpdateService().saveAndReturnObject(link)
returnVal = omero.gateway.BlitzObjectWrapper(self.gateway, link)
return returnVal
def createTestImage(self, imageName="testImage", dataset=None, sizeX=16,
sizeY=16, sizeZ=1, sizeC=1, sizeT=1):
"""
Creates a test image of the required dimensions, where each pixel
value is set to the average value of x & y. If dataset (obj or name)
is specified, will be linked to image. If project (obj or name) is
specified, will be created/linked to dataset (if dataset not None)
@param dataset: omero.model.DatasetI
OR DatasetWrapper
OR dataset ID
"""
from numpy import fromfunction, int16
def f(x, y):
return x
def planeGen():
for p in range(sizeZ * sizeC * sizeT):
yield fromfunction(f, (sizeY, sizeX), dtype=int16)
ds = None
if dataset is not None:
if hasattr(dataset, "_obj"):
dataset = dataset._obj
if isinstance(dataset, omero.model.DatasetI):
ds = dataset
else:
try:
dsId = long(dataset)
ds = omero.model.DatasetI(dsId, False)
except:
pass
image = self.gateway.createImageFromNumpySeq(
planeGen(), imageName, sizeZ=sizeZ, sizeC=sizeC, sizeT=sizeT,
dataset=ds)
return image
| gpl-2.0 | -877,442,211,466,764,400 | 35.835206 | 79 | 0.59939 | false |
blacklin/kbengine | kbe/src/lib/python/Lib/test/test_pickle.py | 72 | 4522 | import pickle
import io
import collections
from test import support
from test.pickletester import AbstractPickleTests
from test.pickletester import AbstractPickleModuleTests
from test.pickletester import AbstractPersistentPicklerTests
from test.pickletester import AbstractPicklerUnpicklerObjectTests
from test.pickletester import AbstractDispatchTableTests
from test.pickletester import BigmemPickleTests
try:
import _pickle
has_c_implementation = True
except ImportError:
has_c_implementation = False
class PickleTests(AbstractPickleModuleTests):
pass
class PyPicklerTests(AbstractPickleTests):
pickler = pickle._Pickler
unpickler = pickle._Unpickler
def dumps(self, arg, proto=None):
f = io.BytesIO()
p = self.pickler(f, proto)
p.dump(arg)
f.seek(0)
return bytes(f.read())
def loads(self, buf, **kwds):
f = io.BytesIO(buf)
u = self.unpickler(f, **kwds)
return u.load()
class InMemoryPickleTests(AbstractPickleTests, BigmemPickleTests):
pickler = pickle._Pickler
unpickler = pickle._Unpickler
def dumps(self, arg, protocol=None):
return pickle.dumps(arg, protocol)
def loads(self, buf, **kwds):
return pickle.loads(buf, **kwds)
class PyPersPicklerTests(AbstractPersistentPicklerTests):
pickler = pickle._Pickler
unpickler = pickle._Unpickler
def dumps(self, arg, proto=None):
class PersPickler(self.pickler):
def persistent_id(subself, obj):
return self.persistent_id(obj)
f = io.BytesIO()
p = PersPickler(f, proto)
p.dump(arg)
f.seek(0)
return f.read()
def loads(self, buf, **kwds):
class PersUnpickler(self.unpickler):
def persistent_load(subself, obj):
return self.persistent_load(obj)
f = io.BytesIO(buf)
u = PersUnpickler(f, **kwds)
return u.load()
class PyPicklerUnpicklerObjectTests(AbstractPicklerUnpicklerObjectTests):
pickler_class = pickle._Pickler
unpickler_class = pickle._Unpickler
class PyDispatchTableTests(AbstractDispatchTableTests):
pickler_class = pickle._Pickler
def get_dispatch_table(self):
return pickle.dispatch_table.copy()
class PyChainDispatchTableTests(AbstractDispatchTableTests):
pickler_class = pickle._Pickler
def get_dispatch_table(self):
return collections.ChainMap({}, pickle.dispatch_table)
if has_c_implementation:
class CPicklerTests(PyPicklerTests):
pickler = _pickle.Pickler
unpickler = _pickle.Unpickler
class CPersPicklerTests(PyPersPicklerTests):
pickler = _pickle.Pickler
unpickler = _pickle.Unpickler
class CDumpPickle_LoadPickle(PyPicklerTests):
pickler = _pickle.Pickler
unpickler = pickle._Unpickler
class DumpPickle_CLoadPickle(PyPicklerTests):
pickler = pickle._Pickler
unpickler = _pickle.Unpickler
class CPicklerUnpicklerObjectTests(AbstractPicklerUnpicklerObjectTests):
pickler_class = _pickle.Pickler
unpickler_class = _pickle.Unpickler
def test_issue18339(self):
unpickler = self.unpickler_class(io.BytesIO())
with self.assertRaises(TypeError):
unpickler.memo = object
# used to cause a segfault
with self.assertRaises(ValueError):
unpickler.memo = {-1: None}
unpickler.memo = {1: None}
class CDispatchTableTests(AbstractDispatchTableTests):
pickler_class = pickle.Pickler
def get_dispatch_table(self):
return pickle.dispatch_table.copy()
class CChainDispatchTableTests(AbstractDispatchTableTests):
pickler_class = pickle.Pickler
def get_dispatch_table(self):
return collections.ChainMap({}, pickle.dispatch_table)
def test_main():
tests = [PickleTests, PyPicklerTests, PyPersPicklerTests,
PyDispatchTableTests, PyChainDispatchTableTests]
if has_c_implementation:
tests.extend([CPicklerTests, CPersPicklerTests,
CDumpPickle_LoadPickle, DumpPickle_CLoadPickle,
PyPicklerUnpicklerObjectTests,
CPicklerUnpicklerObjectTests,
CDispatchTableTests, CChainDispatchTableTests,
InMemoryPickleTests])
support.run_unittest(*tests)
support.run_doctest(pickle)
if __name__ == "__main__":
test_main()
| lgpl-3.0 | 3,059,946,087,810,033,000 | 27.987179 | 76 | 0.671827 | false |
mottosso/pyblish-magenta | pyblish_magenta/vendor/cquery/vendor/nose/result.py | 68 | 6711 | """
Test Result
-----------
Provides a TextTestResult that extends unittest's _TextTestResult to
provide support for error classes (such as the builtin skip and
deprecated classes), and hooks for plugins to take over or extend
reporting.
"""
import logging
try:
# 2.7+
from unittest.runner import _TextTestResult
except ImportError:
from unittest import _TextTestResult
from nose.config import Config
from nose.util import isclass, ln as _ln # backwards compat
log = logging.getLogger('nose.result')
def _exception_detail(exc):
# this is what stdlib module traceback does
try:
return str(exc)
except:
return '<unprintable %s object>' % type(exc).__name__
class TextTestResult(_TextTestResult):
"""Text test result that extends unittest's default test result
support for a configurable set of errorClasses (eg, Skip,
Deprecated, TODO) that extend the errors/failures/success triad.
"""
def __init__(self, stream, descriptions, verbosity, config=None,
errorClasses=None):
if errorClasses is None:
errorClasses = {}
self.errorClasses = errorClasses
if config is None:
config = Config()
self.config = config
_TextTestResult.__init__(self, stream, descriptions, verbosity)
def addSkip(self, test, reason):
# 2.7 skip compat
from nose.plugins.skip import SkipTest
if SkipTest in self.errorClasses:
storage, label, isfail = self.errorClasses[SkipTest]
storage.append((test, reason))
self.printLabel(label, (SkipTest, reason, None))
def addError(self, test, err):
"""Overrides normal addError to add support for
errorClasses. If the exception is a registered class, the
error will be added to the list for that class, not errors.
"""
ec, ev, tb = err
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
# 2.3 compat
exc_info = self._exc_info_to_string(err)
for cls, (storage, label, isfail) in self.errorClasses.items():
#if 'Skip' in cls.__name__ or 'Skip' in ec.__name__:
# from nose.tools import set_trace
# set_trace()
if isclass(ec) and issubclass(ec, cls):
if isfail:
test.passed = False
storage.append((test, exc_info))
self.printLabel(label, err)
return
self.errors.append((test, exc_info))
test.passed = False
self.printLabel('ERROR')
# override to bypass changes in 2.7
def getDescription(self, test):
if self.descriptions:
return test.shortDescription() or str(test)
else:
return str(test)
def printLabel(self, label, err=None):
# Might get patched into a streamless result
stream = getattr(self, 'stream', None)
if stream is not None:
if self.showAll:
message = [label]
if err:
detail = _exception_detail(err[1])
if detail:
message.append(detail)
stream.writeln(": ".join(message))
elif self.dots:
stream.write(label[:1])
def printErrors(self):
"""Overrides to print all errorClasses errors as well.
"""
_TextTestResult.printErrors(self)
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if isfail:
self.printErrorList(label, storage)
# Might get patched into a result with no config
if hasattr(self, 'config'):
self.config.plugins.report(self.stream)
def printSummary(self, start, stop):
"""Called by the test runner to print the final summary of test
run results.
"""
write = self.stream.write
writeln = self.stream.writeln
taken = float(stop - start)
run = self.testsRun
plural = run != 1 and "s" or ""
writeln(self.separator2)
writeln("Ran %s test%s in %.3fs" % (run, plural, taken))
writeln()
summary = {}
eckeys = self.errorClasses.keys()
for cls in eckeys:
storage, label, isfail = self.errorClasses[cls]
count = len(storage)
if not count:
continue
summary[label] = count
if len(self.failures):
summary['failures'] = len(self.failures)
if len(self.errors):
summary['errors'] = len(self.errors)
if not self.wasSuccessful():
write("FAILED")
else:
write("OK")
items = summary.items()
if items:
items.sort()
write(" (")
write(", ".join(["%s=%s" % (label, count) for
label, count in items]))
writeln(")")
else:
writeln()
def wasSuccessful(self):
"""Overrides to check that there are no errors in errorClasses
lists that are marked as errors and should cause a run to
fail.
"""
if self.errors or self.failures:
return False
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if not isfail:
continue
if storage:
return False
return True
def _addError(self, test, err):
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
# 2.3: does not take test arg
exc_info = self._exc_info_to_string(err)
self.errors.append((test, exc_info))
if self.showAll:
self.stream.write('ERROR')
elif self.dots:
self.stream.write('E')
def _exc_info_to_string(self, err, test=None):
# 2.7 skip compat
from nose.plugins.skip import SkipTest
if isclass(err[0]) and issubclass(err[0], SkipTest):
return str(err[1])
# 2.3/2.4 -- 2.4 passes test, 2.3 does not
try:
return _TextTestResult._exc_info_to_string(self, err, test)
except TypeError:
# 2.3: does not take test arg
return _TextTestResult._exc_info_to_string(self, err)
def ln(*arg, **kw):
from warnings import warn
warn("ln() has moved to nose.util from nose.result and will be removed "
"from nose.result in a future release. Please update your imports ",
DeprecationWarning)
return _ln(*arg, **kw)
| lgpl-3.0 | 118,256,144,930,173,840 | 32.555 | 77 | 0.567874 | false |
ivankelly/Rhythmbox-Spotify-Plugin | plugins/rb/__init__.py | 2 | 3597 | # -*- Mode: python; coding: utf-8; tab-width: 8; indent-tabs-mode: t; -*-
#
# Copyright 2006, James Livingston <[email protected]>
# Copyright 2006, Ed Catmur <[email protected]>
# Copyright 2007, Jonathan Matthew
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# The Rhythmbox authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and Rhythmbox. This permission is above and beyond the permissions granted
# by the GPL license by which Rhythmbox is covered. If you modify this code
# you may extend this exception to your version of the code, but you are not
# obligated to do so. If you do not wish to do so, delete this exception
# statement from your version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os.path
import os
import gtk
# rb classes
from Loader import Loader
from Loader import ChunkLoader
from Loader import UpdateCheck
from Coroutine import Coroutine
#def _excepthandler (exc_class, exc_inst, trace):
# import sys
# # print out stuff ignoring our debug redirect
# sys.__excepthook__ (exc_class, exc_inst, trace)
def try_load_icon(theme, icon, size, flags):
try:
return theme.load_icon(icon, size, flags)
except:
return None
def append_plugin_source_path(theme, iconpath):
# check for a Makefile.am in the dir the file was loaded from
fr = sys._getframe(1)
co = fr.f_code
filename = co.co_filename
# and if found, append the icon path
dir = filename[:filename.rfind(os.sep)]
if os.path.exists(dir + "/Makefile.am"):
plugindir = dir[:dir.rfind(os.sep)]
icondir = plugindir + iconpath
theme.append_search_path(icondir)
def show_uri(uri):
# use gtk_show_uri if available, otherwise use gnome-vfs
if hasattr(gtk, 'show_uri'):
gtk.show_uri(gtk.gdk.Screen(), uri, 0)
else:
import gnomevfs
gnomevfs.url_show(uri)
class _rbdebugfile:
def __init__(self, fn):
self.fn = fn
def write(self, str):
if str == '\n':
return
import rb
fr = sys._getframe(1)
co = fr.f_code
filename = co.co_filename
# strip off the cwd, for if running uninstalled
cwd = os.getcwd()
if cwd[-1] != os.sep:
cwd += os.sep
if filename[:len(cwd)] == cwd:
filename = filename[len(cwd):]
# add the class name to the method, if 'self' exists
methodname = co.co_name
if fr.f_locals.has_key('self'):
methodname = '%s.%s' % (fr.f_locals['self'].__class__.__name__, methodname)
rb._debug (methodname, filename, co.co_firstlineno + fr.f_lineno, True, str)
def close(self): pass
def flush(self): pass
def fileno(self): return self.fn
def isatty(self): return 0
def read(self, a): return ''
def readline(self): return ''
def readlines(self): return []
writelines = write
def seek(self, a): raise IOError, (29, 'Illegal seek')
def tell(self): raise IOError, (29, 'Illegal seek')
truncate = tell
sys.stdout = _rbdebugfile(sys.stdout.fileno())
#sys.excepthook = _excepthandler
| gpl-2.0 | -6,092,515,768,749,965,000 | 30.008621 | 79 | 0.70392 | false |
Intel-bigdata/s3-tests | s3tests/functional/__init__.py | 3 | 12312 | import ConfigParser
import boto.exception
import boto.s3.connection
import bunch
import itertools
import os
import random
import string
from .utils import region_sync_meta
s3 = bunch.Bunch()
config = bunch.Bunch()
targets = bunch.Bunch()
# this will be assigned by setup()
prefix = None
calling_formats = dict(
ordinary=boto.s3.connection.OrdinaryCallingFormat(),
subdomain=boto.s3.connection.SubdomainCallingFormat(),
vhost=boto.s3.connection.VHostCallingFormat(),
)
def get_prefix():
assert prefix is not None
return prefix
def is_slow_backend():
return slow_backend
def choose_bucket_prefix(template, max_len=30):
"""
Choose a prefix for our test buckets, so they're easy to identify.
Use template and feed it more and more random filler, until it's
as long as possible but still below max_len.
"""
rand = ''.join(
random.choice(string.ascii_lowercase + string.digits)
for c in range(255)
)
while rand:
s = template.format(random=rand)
if len(s) <= max_len:
return s
rand = rand[:-1]
raise RuntimeError(
'Bucket prefix template is impossible to fulfill: {template!r}'.format(
template=template,
),
)
def nuke_prefixed_buckets_on_conn(prefix, name, conn):
print 'Cleaning buckets from connection {name} prefix {prefix!r}.'.format(
name=name,
prefix=prefix,
)
for bucket in conn.get_all_buckets():
print 'prefix=',prefix
if bucket.name.startswith(prefix):
print 'Cleaning bucket {bucket}'.format(bucket=bucket)
success = False
for i in xrange(2):
try:
try:
iterator = iter(bucket.list_versions())
# peek into iterator to issue list operation
try:
keys = itertools.chain([next(iterator)], iterator)
except StopIteration:
keys = [] # empty iterator
except boto.exception.S3ResponseError as e:
# some S3 implementations do not support object
# versioning - fall back to listing without versions
if e.error_code != 'NotImplemented':
raise e
keys = bucket.list();
for key in keys:
print 'Cleaning bucket {bucket} key {key}'.format(
bucket=bucket,
key=key,
)
# key.set_canned_acl('private')
bucket.delete_key(key.name, version_id = key.version_id)
bucket.delete()
success = True
except boto.exception.S3ResponseError as e:
if e.error_code != 'AccessDenied':
print 'GOT UNWANTED ERROR', e.error_code
raise
# seems like we don't have permissions set appropriately, we'll
# modify permissions and retry
pass
if success:
return
bucket.set_canned_acl('private')
def nuke_prefixed_buckets(prefix):
# If no regions are specified, use the simple method
if targets.main.master == None:
for name, conn in s3.items():
print 'Deleting buckets on {name}'.format(name=name)
nuke_prefixed_buckets_on_conn(prefix, name, conn)
else:
# First, delete all buckets on the master connection
for name, conn in s3.items():
if conn == targets.main.master.connection:
print 'Deleting buckets on {name} (master)'.format(name=name)
nuke_prefixed_buckets_on_conn(prefix, name, conn)
# Then sync to propagate deletes to secondaries
region_sync_meta(targets.main, targets.main.master.connection)
print 'region-sync in nuke_prefixed_buckets'
# Now delete remaining buckets on any other connection
for name, conn in s3.items():
if conn != targets.main.master.connection:
print 'Deleting buckets on {name} (non-master)'.format(name=name)
nuke_prefixed_buckets_on_conn(prefix, name, conn)
print 'Done with cleanup of test buckets.'
class TargetConfig:
def __init__(self, cfg, section):
self.port = None
self.api_name = ''
self.is_master = False
self.is_secure = False
self.sync_agent_addr = None
self.sync_agent_port = 0
self.sync_meta_wait = 0
try:
self.api_name = cfg.get(section, 'api_name')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
pass
try:
self.port = cfg.getint(section, 'port')
except ConfigParser.NoOptionError:
pass
try:
self.host=cfg.get(section, 'host')
except ConfigParser.NoOptionError:
raise RuntimeError(
'host not specified for section {s}'.format(s=section)
)
try:
self.is_master=cfg.getboolean(section, 'is_master')
except ConfigParser.NoOptionError:
pass
try:
self.is_secure=cfg.getboolean(section, 'is_secure')
except ConfigParser.NoOptionError:
pass
try:
raw_calling_format = cfg.get(section, 'calling_format')
except ConfigParser.NoOptionError:
raw_calling_format = 'ordinary'
try:
self.sync_agent_addr = cfg.get(section, 'sync_agent_addr')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
pass
try:
self.sync_agent_port = cfg.getint(section, 'sync_agent_port')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
pass
try:
self.sync_meta_wait = cfg.getint(section, 'sync_meta_wait')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
pass
try:
self.calling_format = calling_formats[raw_calling_format]
except KeyError:
raise RuntimeError(
'calling_format unknown: %r' % raw_calling_format
)
class TargetConnection:
def __init__(self, conf, conn):
self.conf = conf
self.connection = conn
class RegionsInfo:
def __init__(self):
self.m = bunch.Bunch()
self.master = None
self.secondaries = []
def add(self, name, region_config):
self.m[name] = region_config
if (region_config.is_master):
if not self.master is None:
raise RuntimeError(
'multiple regions defined as master'
)
self.master = region_config
else:
self.secondaries.append(region_config)
def get(self, name):
return self.m[name]
def get(self):
return self.m
def iteritems(self):
return self.m.iteritems()
regions = RegionsInfo()
class RegionsConn:
def __init__(self):
self.m = bunch.Bunch()
self.default = None
self.master = None
self.secondaries = []
def iteritems(self):
return self.m.iteritems()
def set_default(self, conn):
self.default = conn
def add(self, name, conn):
self.m[name] = conn
if not self.default:
self.default = conn
if (conn.conf.is_master):
self.master = conn
else:
self.secondaries.append(conn)
# nosetests --processes=N with N>1 is safe
_multiprocess_can_split_ = True
def setup():
cfg = ConfigParser.RawConfigParser()
try:
path = os.environ['S3TEST_CONF']
except KeyError:
raise RuntimeError(
'To run tests, point environment '
+ 'variable S3TEST_CONF to a config file.',
)
with file(path) as f:
cfg.readfp(f)
global prefix
global targets
global slow_backend
try:
template = cfg.get('fixtures', 'bucket prefix')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
template = 'test-{random}-'
prefix = choose_bucket_prefix(template=template)
try:
slow_backend = cfg.getboolean('fixtures', 'slow backend')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
slow_backend = False
# pull the default_region out, if it exists
try:
default_region = cfg.get('fixtures', 'default_region')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
default_region = None
s3.clear()
config.clear()
for section in cfg.sections():
try:
(type_, name) = section.split(None, 1)
except ValueError:
continue
if type_ != 'region':
continue
regions.add(name, TargetConfig(cfg, section))
for section in cfg.sections():
try:
(type_, name) = section.split(None, 1)
except ValueError:
continue
if type_ != 's3':
continue
if len(regions.get()) == 0:
regions.add("default", TargetConfig(cfg, section))
config[name] = bunch.Bunch()
for var in [
'user_id',
'display_name',
'email',
]:
try:
config[name][var] = cfg.get(section, var)
except ConfigParser.NoOptionError:
pass
targets[name] = RegionsConn()
for (k, conf) in regions.iteritems():
conn = boto.s3.connection.S3Connection(
aws_access_key_id=cfg.get(section, 'access_key'),
aws_secret_access_key=cfg.get(section, 'secret_key'),
is_secure=conf.is_secure,
port=conf.port,
host=conf.host,
# TODO test vhost calling format
calling_format=conf.calling_format,
)
temp_targetConn = TargetConnection(conf, conn)
targets[name].add(k, temp_targetConn)
# Explicitly test for and set the default region, if specified.
# If it was not specified, use the 'is_master' flag to set it.
if default_region:
if default_region == name:
targets[name].set_default(temp_targetConn)
elif conf.is_master:
targets[name].set_default(temp_targetConn)
s3[name] = targets[name].default.connection
# WARNING! we actively delete all buckets we see with the prefix
# we've chosen! Choose your prefix with care, and don't reuse
# credentials!
# We also assume nobody else is going to use buckets with that
# prefix. This is racy but given enough randomness, should not
# really fail.
nuke_prefixed_buckets(prefix=prefix)
def teardown():
# remove our buckets here also, to avoid littering
nuke_prefixed_buckets(prefix=prefix)
bucket_counter = itertools.count(1)
def get_new_bucket_name():
"""
Get a bucket name that probably does not exist.
We make every attempt to use a unique random prefix, so if a
bucket by this name happens to exist, it's ok if tests give
false negatives.
"""
name = '{prefix}{num}'.format(
prefix=prefix,
num=next(bucket_counter),
)
return name
def get_new_bucket(target=None, name=None, headers=None):
"""
Get a bucket that exists and is empty.
Always recreates a bucket from scratch. This is useful to also
reset ACLs and such.
"""
if target is None:
target = targets.main.default
connection = target.connection
if name is None:
name = get_new_bucket_name()
# the only way for this to fail with a pre-existing bucket is if
# someone raced us between setup nuke_prefixed_buckets and here;
# ignore that as astronomically unlikely
bucket = connection.create_bucket(name, location=target.conf.api_name, headers=headers)
return bucket
| mit | 4,699,205,822,177,889,000 | 30.248731 | 91 | 0.574643 | false |
imtapps/django-imt-fork | django/db/backends/__init__.py | 52 | 38612 | from django.db.utils import DatabaseError
try:
from django.utils.six.moves import _thread as thread
except ImportError:
from django.utils.six.moves import _dummy_thread as thread
from contextlib import contextmanager
from django.conf import settings
from django.db import DEFAULT_DB_ALIAS
from django.db.backends import util
from django.db.transaction import TransactionManagementError
from django.utils.functional import cached_property
from django.utils.importlib import import_module
from django.utils import six
from django.utils.timezone import is_aware
class BaseDatabaseWrapper(object):
"""
Represents a database connection.
"""
ops = None
vendor = 'unknown'
def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS,
allow_thread_sharing=False):
# `settings_dict` should be a dictionary containing keys such as
# NAME, USER, etc. It's called `settings_dict` instead of `settings`
# to disambiguate it from Django settings modules.
self.connection = None
self.queries = []
self.settings_dict = settings_dict
self.alias = alias
self.use_debug_cursor = None
# Transaction related attributes
self.transaction_state = []
self.savepoint_state = 0
self._dirty = None
self._thread_ident = thread.get_ident()
self.allow_thread_sharing = allow_thread_sharing
def __eq__(self, other):
return self.alias == other.alias
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.alias)
def _commit(self):
if self.connection is not None:
return self.connection.commit()
def _rollback(self):
if self.connection is not None:
return self.connection.rollback()
def _enter_transaction_management(self, managed):
"""
A hook for backend-specific changes required when entering manual
transaction handling.
"""
pass
def _leave_transaction_management(self, managed):
"""
A hook for backend-specific changes required when leaving manual
transaction handling. Will usually be implemented only when
_enter_transaction_management() is also required.
"""
pass
def _savepoint(self, sid):
if not self.features.uses_savepoints:
return
self.cursor().execute(self.ops.savepoint_create_sql(sid))
def _savepoint_rollback(self, sid):
if not self.features.uses_savepoints:
return
self.cursor().execute(self.ops.savepoint_rollback_sql(sid))
def _savepoint_commit(self, sid):
if not self.features.uses_savepoints:
return
self.cursor().execute(self.ops.savepoint_commit_sql(sid))
def abort(self):
"""
Roll back any ongoing transaction and clean the transaction state
stack.
"""
if self._dirty:
self._rollback()
self._dirty = False
while self.transaction_state:
self.leave_transaction_management()
def enter_transaction_management(self, managed=True):
"""
Enters transaction management for a running thread. It must be balanced with
the appropriate leave_transaction_management call, since the actual state is
managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
"""
if self.transaction_state:
self.transaction_state.append(self.transaction_state[-1])
else:
self.transaction_state.append(settings.TRANSACTIONS_MANAGED)
if self._dirty is None:
self._dirty = False
self._enter_transaction_management(managed)
def leave_transaction_management(self):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
if self.transaction_state:
del self.transaction_state[-1]
else:
raise TransactionManagementError(
"This code isn't under transaction management")
# We will pass the next status (after leaving the previous state
# behind) to subclass hook.
self._leave_transaction_management(self.is_managed())
if self._dirty:
self.rollback()
raise TransactionManagementError(
"Transaction managed block ended with pending COMMIT/ROLLBACK")
self._dirty = False
def validate_thread_sharing(self):
"""
Validates that the connection isn't accessed by another thread than the
one which originally created it, unless the connection was explicitly
authorized to be shared between threads (via the `allow_thread_sharing`
property). Raises an exception if the validation fails.
"""
if (not self.allow_thread_sharing
and self._thread_ident != thread.get_ident()):
raise DatabaseError("DatabaseWrapper objects created in a "
"thread can only be used in that same thread. The object "
"with alias '%s' was created in thread id %s and this is "
"thread id %s."
% (self.alias, self._thread_ident, thread.get_ident()))
def is_dirty(self):
"""
Returns True if the current transaction requires a commit for changes to
happen.
"""
return self._dirty
def set_dirty(self):
"""
Sets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether there are open
changes waiting for commit.
"""
if self._dirty is not None:
self._dirty = True
else:
raise TransactionManagementError("This code isn't under transaction "
"management")
def set_clean(self):
"""
Resets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether a commit or rollback
should happen.
"""
if self._dirty is not None:
self._dirty = False
else:
raise TransactionManagementError("This code isn't under transaction management")
self.clean_savepoints()
def clean_savepoints(self):
self.savepoint_state = 0
def is_managed(self):
"""
Checks whether the transaction manager is in manual or in auto state.
"""
if self.transaction_state:
return self.transaction_state[-1]
return settings.TRANSACTIONS_MANAGED
def managed(self, flag=True):
"""
Puts the transaction manager into a manual state: managed transactions have
to be committed explicitly by the user. If you switch off transaction
management and there is a pending commit/rollback, the data will be
commited.
"""
top = self.transaction_state
if top:
top[-1] = flag
if not flag and self.is_dirty():
self._commit()
self.set_clean()
else:
raise TransactionManagementError("This code isn't under transaction "
"management")
def commit_unless_managed(self):
"""
Commits changes if the system is not in managed transaction mode.
"""
self.validate_thread_sharing()
if not self.is_managed():
self._commit()
self.clean_savepoints()
else:
self.set_dirty()
def rollback_unless_managed(self):
"""
Rolls back changes if the system is not in managed transaction mode.
"""
self.validate_thread_sharing()
if not self.is_managed():
self._rollback()
else:
self.set_dirty()
def commit(self):
"""
Does the commit itself and resets the dirty flag.
"""
self.validate_thread_sharing()
self._commit()
self.set_clean()
def rollback(self):
"""
This function does the rollback itself and resets the dirty flag.
"""
self.validate_thread_sharing()
self._rollback()
self.set_clean()
def savepoint(self):
"""
Creates a savepoint (if supported and required by the backend) inside the
current transaction. Returns an identifier for the savepoint that will be
used for the subsequent rollback or commit.
"""
thread_ident = thread.get_ident()
self.savepoint_state += 1
tid = str(thread_ident).replace('-', '')
sid = "s%s_x%d" % (tid, self.savepoint_state)
self._savepoint(sid)
return sid
def savepoint_rollback(self, sid):
"""
Rolls back the most recent savepoint (if one exists). Does nothing if
savepoints are not supported.
"""
self.validate_thread_sharing()
if self.savepoint_state:
self._savepoint_rollback(sid)
def savepoint_commit(self, sid):
"""
Commits the most recent savepoint (if one exists). Does nothing if
savepoints are not supported.
"""
self.validate_thread_sharing()
if self.savepoint_state:
self._savepoint_commit(sid)
@contextmanager
def constraint_checks_disabled(self):
disabled = self.disable_constraint_checking()
try:
yield
finally:
if disabled:
self.enable_constraint_checking()
def disable_constraint_checking(self):
"""
Backends can implement as needed to temporarily disable foreign key constraint
checking.
"""
pass
def enable_constraint_checking(self):
"""
Backends can implement as needed to re-enable foreign key constraint checking.
"""
pass
def check_constraints(self, table_names=None):
"""
Backends can override this method if they can apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE"). Should raise an IntegrityError if any invalid foreign key references are encountered.
"""
pass
def close(self):
self.validate_thread_sharing()
if self.connection is not None:
self.connection.close()
self.connection = None
def cursor(self):
self.validate_thread_sharing()
if (self.use_debug_cursor or
(self.use_debug_cursor is None and settings.DEBUG)):
cursor = self.make_debug_cursor(self._cursor())
else:
cursor = util.CursorWrapper(self._cursor(), self)
return cursor
def make_debug_cursor(self, cursor):
return util.CursorDebugWrapper(cursor, self)
class BaseDatabaseFeatures(object):
allows_group_by_pk = False
# True if django.db.backend.utils.typecast_timestamp is used on values
# returned from dates() calls.
needs_datetime_string_cast = True
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists, but one of the unique_together columns is NULL?
ignores_nulls_in_unique_constraints = True
can_use_chunked_reads = True
can_return_id_from_insert = False
has_bulk_insert = False
uses_autocommit = False
uses_savepoints = False
can_combine_inserts_with_and_without_auto_increment_pk = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries = True
has_select_for_update = False
has_select_for_update_nowait = False
supports_select_related = True
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does a dirty transaction need to be rolled back
# before the cursor can be used again?
requires_rollback_on_dirty_transaction = False
# Does the backend allow very long model names without error?
supports_long_model_names = True
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
supports_bitwise_or = True
# Do time/datetime fields have microsecond precision?
supports_microsecond_precision = True
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Is there a 1000 item limit on query parameters?
supports_1000_query_parameters = True
# Can an object have a primary key of 0? MySQL says No.
allows_primary_key_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
supports_mixed_date_datetime_comparisons = True
# Does the backend support tablespaces? Default to False because it isn't
# in the SQL standard.
supports_tablespaces = False
# Does the backend reset sequences between tests?
supports_sequence_reset = True
# Confirm support for introspected foreign keys
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
can_introspect_foreign_keys = True
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_transactions(self):
"Confirm support for transactions"
try:
# Make sure to run inside a managed transaction block,
# otherwise autocommit will cause the confimation to
# fail.
self.connection.enter_transaction_management()
self.connection.managed(True)
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection._commit()
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection._rollback()
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
self.connection._commit()
self.connection._dirty = False
finally:
self.connection.leave_transaction_management()
return count == 0
@cached_property
def supports_stddev(self):
"Confirm support for STDDEV and related stats functions"
class StdDevPop(object):
sql_function = 'STDDEV_POP'
try:
self.connection.ops.check_aggregate_support(StdDevPop())
return True
except NotImplementedError:
return False
class BaseDatabaseOperations(object):
"""
This class encapsulates all backend-specific differences, such as the way
a backend performs ordering or calculates the ID of a recently-inserted
row.
"""
compiler_module = "django.db.models.sql.compiler"
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Returns any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Returns the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def cache_key_culling_sql(self):
"""
Returns a SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError()
def date_interval_sql(self, sql, connector, timedelta):
"""
Implements the date interval functionality for expressions
"""
raise NotImplementedError()
def date_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
truncates the given date field field_name to a DATE object with only
the given specificity.
"""
raise NotImplementedError()
def datetime_cast_sql(self):
"""
Returns the SQL necessary to cast a datetime value so that it will be
retrieved as a Python datetime object instead of a string.
This SQL should include a '%s' in place of the field's name.
"""
return "%s"
def deferrable_sql(self):
"""
Returns the SQL necessary to make a constraint "initially deferred"
during a CREATE TABLE statement.
"""
return ''
def distinct_sql(self, fields):
"""
Returns an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only the given fields are being
checked for duplicates.
"""
if fields:
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
else:
return 'DISTINCT'
def drop_foreignkey_sql(self):
"""
Returns the SQL command that drops a foreign key.
"""
return "DROP CONSTRAINT"
def drop_sequence_sql(self, table):
"""
Returns any SQL necessary to drop the sequence for the given table.
Returns None if no SQL is necessary.
"""
return None
def fetch_returned_insert_id(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table that has an auto-incrementing ID, returns the
newly created ID.
"""
return cursor.fetchone()[0]
def field_cast_sql(self, db_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR'), returns the SQL necessary
to cast it before using it in a WHERE statement. Note that the
resulting string should contain a '%s' placeholder for the column being
searched against.
"""
return '%s'
def force_no_ordering(self):
"""
Returns a list used in the "ORDER BY" clause to force no ordering at
all. Returning an empty list means that nothing will be included in the
ordering.
"""
return []
def for_update_sql(self, nowait=False):
"""
Returns the FOR UPDATE SQL clause to lock rows for an update operation.
"""
if nowait:
return 'FOR UPDATE NOWAIT'
else:
return 'FOR UPDATE'
def fulltext_search_sql(self, field_name):
"""
Returns the SQL WHERE clause to use in order to perform a full-text
search of the given field_name. Note that the resulting string should
contain a '%s' placeholder for the value being searched against.
"""
raise NotImplementedError('Full-text search is not implemented for this database backend')
def last_executed_query(self, cursor, sql, params):
"""
Returns a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders, and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
from django.utils.encoding import force_text
# Convert params to contain Unicode values.
to_unicode = lambda s: force_text(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple([to_unicode(val) for val in params])
else:
u_params = dict([(to_unicode(k), to_unicode(v)) for k, v in params.items()])
return force_text(sql) % u_params
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, returns the newly created ID.
This method also receives the table name and the name of the primary-key
column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type):
"""
Returns the string to use in a query when performing lookups
("contains", "like", etc). The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Returns the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Returns the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Returns the value to use for the LIMIT when we are wanting "LIMIT
infinity". Returns None if the limit clause can be omitted in this case.
"""
raise NotImplementedError
def pk_default_value(self):
"""
Returns the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT'
def process_clob(self, value):
"""
Returns the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_id(self):
"""
For backends that support returning the last insert ID as part
of an insert query, this method returns the SQL and params to
append to the INSERT query. The returned fragment should
contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Returns the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Returns a quoted version of the given table, index or column name. Does
not quote the given name if it's already been quoted.
"""
raise NotImplementedError()
def random_function_sql(self):
"""
Returns a SQL expression that returns a random value.
"""
return 'RANDOM()'
def regex_lookup(self, lookup_type):
"""
Returns the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). The resulting string should
contain a '%s' placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), a
NotImplementedError exception can be raised.
"""
raise NotImplementedError
def savepoint_create_sql(self, sid):
"""
Returns the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
raise NotImplementedError
def savepoint_commit_sql(self, sid):
"""
Returns the SQL for committing the given savepoint.
"""
raise NotImplementedError
def savepoint_rollback_sql(self, sid):
"""
Returns the SQL for rolling back the given savepoint.
"""
raise NotImplementedError
def set_time_zone_sql(self):
"""
Returns the SQL that will set the connection's time zone.
Returns '' if the backend doesn't support time zones.
"""
return ''
def sql_flush(self, style, tables, sequences):
"""
Returns a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The returned value also includes SQL statements required to reset DB
sequences passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
raise NotImplementedError()
def sequence_reset_by_name_sql(self, style, sequences):
"""
Returns a list of the SQL statements required to reset sequences
passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Returns a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""
Returns the SQL statement required to start a transaction.
"""
return "BEGIN;"
def end_transaction_sql(self, success=True):
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Returns the SQL that will be used in a query to define the tablespace.
Returns '' if the backend doesn't support tablespaces.
If inline is True, the SQL is appended to a row; otherwise it's appended
to the entire CREATE TABLE or CREATE INDEX statement.
"""
return ''
def prep_for_like_query(self, x):
"""Prepares a value for use in a LIKE query."""
from django.utils.encoding import force_text
return force_text(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). This method will raise a ValueError
if the value is invalid, otherwise returns validated value.
"""
return value
def value_to_db_date(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return six.text_type(value)
def value_to_db_datetime(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return six.text_type(value)
def value_to_db_time(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
if is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return six.text_type(value)
def value_to_db_decimal(self, value, max_digits, decimal_places):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
if value is None:
return None
return util.format_number(value, max_digits, decimal_places)
def year_lookup_bounds(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a field value using a year lookup
`value` is an int, containing the looked-up year.
"""
first = '%s-01-01 00:00:00'
second = '%s-12-31 23:59:59.999999'
return [first % value, second % value]
def year_lookup_bounds_for_date_field(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year lookup
`value` is an int, containing the looked-up year.
By default, it just calls `self.year_lookup_bounds`. Some backends need
this hook because on their DB date fields can't be compared to values
which include a time part.
"""
return self.year_lookup_bounds(value)
def convert_values(self, value, field):
"""
Coerce the value returned by the database backend into a consistent type
that is compatible with the field type.
"""
if value is None:
return value
internal_type = field.get_internal_type()
if internal_type == 'FloatField':
return float(value)
elif (internal_type and (internal_type.endswith('IntegerField')
or internal_type == 'AutoField')):
return int(value)
return value
def check_aggregate_support(self, aggregate_func):
"""Check that the backend supports the provided aggregate
This is used on specific backends to rule out known aggregates
that are known to have faulty implementations. If the named
aggregate function has a known problem, the backend should
raise NotImplementedError.
"""
pass
def combine_expression(self, connector, sub_expressions):
"""Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions)
"""
conn = ' %s ' % connector
return conn.join(sub_expressions)
def modify_insert_params(self, placeholders, params):
"""Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
class BaseDatabaseIntrospection(object):
"""
This class encapsulates all backend-specific introspection utilities
"""
data_types_reverse = {}
def __init__(self, connection):
self.connection = connection
def get_field_type(self, data_type, description):
"""Hook for a database backend to use the cursor description to
match a Django field type to a database column.
For Oracle, the column data_type on its own is insufficient to
distinguish between a FloatField and IntegerField, for example."""
return self.data_types_reverse[data_type]
def table_name_converter(self, name):
"""Apply a conversion to the name for the purposes of comparison.
The default table name converter is for case sensitive comparison.
"""
return name
def table_names(self, cursor=None):
"""
Returns a list of names of all tables that exist in the database.
The returned table list is sorted by Python's default sorting. We
do NOT use database's ORDER BY here to avoid subtle differences
in sorting order between databases.
"""
if cursor is None:
cursor = self.connection.cursor()
return sorted(self.get_table_list(cursor))
def get_table_list(self, cursor):
"""
Returns an unsorted list of names of all tables that exist in the
database.
"""
raise NotImplementedError
def django_table_names(self, only_existing=False):
"""
Returns a list of all table names that have associated Django models and
are in INSTALLED_APPS.
If only_existing is True, the resulting list will only include the tables
that actually exist in the database.
"""
from django.db import models, router
tables = set()
for app in models.get_apps():
for model in models.get_models(app):
if not model._meta.managed:
continue
if not router.allow_syncdb(self.connection.alias, model):
continue
tables.add(model._meta.db_table)
tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many])
tables = list(tables)
if only_existing:
existing_tables = self.table_names()
tables = [
t
for t in tables
if self.table_name_converter(t) in existing_tables
]
return tables
def installed_models(self, tables):
"Returns a set of all models represented by the provided list of table names."
from django.db import models, router
all_models = []
for app in models.get_apps():
for model in models.get_models(app):
if router.allow_syncdb(self.connection.alias, model):
all_models.append(model)
tables = list(map(self.table_name_converter, tables))
return set([
m for m in all_models
if self.table_name_converter(m._meta.db_table) in tables
])
def sequence_list(self):
"Returns a list of information about all DB sequences for all models in all apps."
from django.db import models, router
apps = models.get_apps()
sequence_list = []
for app in apps:
for model in models.get_models(app):
if not model._meta.managed:
continue
if model._meta.swapped:
continue
if not router.allow_syncdb(self.connection.alias, model):
continue
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
sequence_list.append({'table': model._meta.db_table, 'column': f.column})
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.local_many_to_many:
# If this is an m2m using an intermediate table,
# we don't need to reset the sequence.
if f.rel.through is None:
sequence_list.append({'table': f.m2m_db_table(), 'column': None})
return sequence_list
def get_key_columns(self, cursor, table_name):
"""
Backends can override this to return a list of (column_name, referenced_table_name,
referenced_column_name) for all key columns in given table.
"""
raise NotImplementedError
def get_primary_key_column(self, cursor, table_name):
"""
Returns the name of the primary key column for the given table.
"""
for column in six.iteritems(self.get_indexes(cursor, table_name)):
if column[1]['primary_key']:
return column[0]
return None
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of indexed fieldname -> infodict for the given
table, where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
Only single-column indexes are introspected.
"""
raise NotImplementedError
class BaseDatabaseClient(object):
"""
This class encapsulates all backend-specific methods for opening a
client shell.
"""
# This should be a string representing the name of the executable
# (e.g., "psql"). Subclasses must override this.
executable_name = None
def __init__(self, connection):
# connection is an instance of BaseDatabaseWrapper.
self.connection = connection
def runshell(self):
raise NotImplementedError()
class BaseDatabaseValidation(object):
"""
This class encapsualtes all backend-specific model validation.
"""
def __init__(self, connection):
self.connection = connection
def validate_field(self, errors, opts, f):
"By default, there is no backend-specific validation"
pass
| bsd-3-clause | 710,411,669,456,865,800 | 34.133758 | 110 | 0.62263 | false |
westinedu/newertrends | django/views/generic/date_based.py | 246 | 14025 | import datetime
import time
from django.template import loader, RequestContext
from django.core.exceptions import ObjectDoesNotExist
from django.core.xheaders import populate_xheaders
from django.db.models.fields import DateTimeField
from django.http import Http404, HttpResponse
import warnings
warnings.warn(
'Function-based generic views have been deprecated; use class-based views instead.',
PendingDeprecationWarning
)
def archive_index(request, queryset, date_field, num_latest=15,
template_name=None, template_loader=loader,
extra_context=None, allow_empty=True, context_processors=None,
mimetype=None, allow_future=False, template_object_name='latest'):
"""
Generic top-level archive of date-based objects.
Templates: ``<app_label>/<model_name>_archive.html``
Context:
date_list
List of years
latest
Latest N (defaults to 15) objects by date
"""
if extra_context is None: extra_context = {}
model = queryset.model
if not allow_future:
queryset = queryset.filter(**{'%s__lte' % date_field: datetime.datetime.now()})
date_list = queryset.dates(date_field, 'year')[::-1]
if not date_list and not allow_empty:
raise Http404("No %s available" % model._meta.verbose_name)
if date_list and num_latest:
latest = queryset.order_by('-'+date_field)[:num_latest]
else:
latest = None
if not template_name:
template_name = "%s/%s_archive.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'date_list' : date_list,
template_object_name : latest,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
return HttpResponse(t.render(c), mimetype=mimetype)
def archive_year(request, year, queryset, date_field, template_name=None,
template_loader=loader, extra_context=None, allow_empty=False,
context_processors=None, template_object_name='object', mimetype=None,
make_object_list=False, allow_future=False):
"""
Generic yearly archive view.
Templates: ``<app_label>/<model_name>_archive_year.html``
Context:
date_list
List of months in this year with objects
year
This year
object_list
List of objects published in the given month
(Only available if make_object_list argument is True)
"""
if extra_context is None: extra_context = {}
model = queryset.model
now = datetime.datetime.now()
lookup_kwargs = {'%s__year' % date_field: year}
# Only bother to check current date if the year isn't in the past and future objects aren't requested.
if int(year) >= now.year and not allow_future:
lookup_kwargs['%s__lte' % date_field] = now
date_list = queryset.filter(**lookup_kwargs).dates(date_field, 'month')
if not date_list and not allow_empty:
raise Http404
if make_object_list:
object_list = queryset.filter(**lookup_kwargs)
else:
object_list = []
if not template_name:
template_name = "%s/%s_archive_year.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'date_list': date_list,
'year': year,
'%s_list' % template_object_name: object_list,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
return HttpResponse(t.render(c), mimetype=mimetype)
def archive_month(request, year, month, queryset, date_field,
month_format='%b', template_name=None, template_loader=loader,
extra_context=None, allow_empty=False, context_processors=None,
template_object_name='object', mimetype=None, allow_future=False):
"""
Generic monthly archive view.
Templates: ``<app_label>/<model_name>_archive_month.html``
Context:
date_list:
List of days in this month with objects
month:
(date) this month
next_month:
(date) the first day of the next month, or None if the next month is in the future
previous_month:
(date) the first day of the previous month
object_list:
list of objects published in the given month
"""
if extra_context is None: extra_context = {}
try:
tt = time.strptime("%s-%s" % (year, month), '%s-%s' % ('%Y', month_format))
date = datetime.date(*tt[:3])
except ValueError:
raise Http404
model = queryset.model
now = datetime.datetime.now()
# Calculate first and last day of month, for use in a date-range lookup.
first_day = date.replace(day=1)
if first_day.month == 12:
last_day = first_day.replace(year=first_day.year + 1, month=1)
else:
last_day = first_day.replace(month=first_day.month + 1)
lookup_kwargs = {
'%s__gte' % date_field: first_day,
'%s__lt' % date_field: last_day,
}
# Only bother to check current date if the month isn't in the past and future objects are requested.
if last_day >= now.date() and not allow_future:
lookup_kwargs['%s__lte' % date_field] = now
object_list = queryset.filter(**lookup_kwargs)
date_list = object_list.dates(date_field, 'day')
if not object_list and not allow_empty:
raise Http404
# Calculate the next month, if applicable.
if allow_future:
next_month = last_day
elif last_day <= datetime.date.today():
next_month = last_day
else:
next_month = None
# Calculate the previous month
if first_day.month == 1:
previous_month = first_day.replace(year=first_day.year-1,month=12)
else:
previous_month = first_day.replace(month=first_day.month-1)
if not template_name:
template_name = "%s/%s_archive_month.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'date_list': date_list,
'%s_list' % template_object_name: object_list,
'month': date,
'next_month': next_month,
'previous_month': previous_month,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
return HttpResponse(t.render(c), mimetype=mimetype)
def archive_week(request, year, week, queryset, date_field,
template_name=None, template_loader=loader,
extra_context=None, allow_empty=True, context_processors=None,
template_object_name='object', mimetype=None, allow_future=False):
"""
Generic weekly archive view.
Templates: ``<app_label>/<model_name>_archive_week.html``
Context:
week:
(date) this week
object_list:
list of objects published in the given week
"""
if extra_context is None: extra_context = {}
try:
tt = time.strptime(year+'-0-'+week, '%Y-%w-%U')
date = datetime.date(*tt[:3])
except ValueError:
raise Http404
model = queryset.model
now = datetime.datetime.now()
# Calculate first and last day of week, for use in a date-range lookup.
first_day = date
last_day = date + datetime.timedelta(days=7)
lookup_kwargs = {
'%s__gte' % date_field: first_day,
'%s__lt' % date_field: last_day,
}
# Only bother to check current date if the week isn't in the past and future objects aren't requested.
if last_day >= now.date() and not allow_future:
lookup_kwargs['%s__lte' % date_field] = now
object_list = queryset.filter(**lookup_kwargs)
if not object_list and not allow_empty:
raise Http404
if not template_name:
template_name = "%s/%s_archive_week.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'%s_list' % template_object_name: object_list,
'week': date,
})
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
return HttpResponse(t.render(c), mimetype=mimetype)
def archive_day(request, year, month, day, queryset, date_field,
month_format='%b', day_format='%d', template_name=None,
template_loader=loader, extra_context=None, allow_empty=False,
context_processors=None, template_object_name='object',
mimetype=None, allow_future=False):
"""
Generic daily archive view.
Templates: ``<app_label>/<model_name>_archive_day.html``
Context:
object_list:
list of objects published that day
day:
(datetime) the day
previous_day
(datetime) the previous day
next_day
(datetime) the next day, or None if the current day is today
"""
if extra_context is None: extra_context = {}
try:
tt = time.strptime('%s-%s-%s' % (year, month, day),
'%s-%s-%s' % ('%Y', month_format, day_format))
date = datetime.date(*tt[:3])
except ValueError:
raise Http404
model = queryset.model
now = datetime.datetime.now()
if isinstance(model._meta.get_field(date_field), DateTimeField):
lookup_kwargs = {'%s__range' % date_field: (datetime.datetime.combine(date, datetime.time.min), datetime.datetime.combine(date, datetime.time.max))}
else:
lookup_kwargs = {date_field: date}
# Only bother to check current date if the date isn't in the past and future objects aren't requested.
if date >= now.date() and not allow_future:
lookup_kwargs['%s__lte' % date_field] = now
object_list = queryset.filter(**lookup_kwargs)
if not allow_empty and not object_list:
raise Http404
# Calculate the next day, if applicable.
if allow_future:
next_day = date + datetime.timedelta(days=1)
elif date < datetime.date.today():
next_day = date + datetime.timedelta(days=1)
else:
next_day = None
if not template_name:
template_name = "%s/%s_archive_day.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'%s_list' % template_object_name: object_list,
'day': date,
'previous_day': date - datetime.timedelta(days=1),
'next_day': next_day,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
return HttpResponse(t.render(c), mimetype=mimetype)
def archive_today(request, **kwargs):
"""
Generic daily archive view for today. Same as archive_day view.
"""
today = datetime.date.today()
kwargs.update({
'year': str(today.year),
'month': today.strftime('%b').lower(),
'day': str(today.day),
})
return archive_day(request, **kwargs)
def object_detail(request, year, month, day, queryset, date_field,
month_format='%b', day_format='%d', object_id=None, slug=None,
slug_field='slug', template_name=None, template_name_field=None,
template_loader=loader, extra_context=None, context_processors=None,
template_object_name='object', mimetype=None, allow_future=False):
"""
Generic detail view from year/month/day/slug or year/month/day/id structure.
Templates: ``<app_label>/<model_name>_detail.html``
Context:
object:
the object to be detailed
"""
if extra_context is None: extra_context = {}
try:
tt = time.strptime('%s-%s-%s' % (year, month, day),
'%s-%s-%s' % ('%Y', month_format, day_format))
date = datetime.date(*tt[:3])
except ValueError:
raise Http404
model = queryset.model
now = datetime.datetime.now()
if isinstance(model._meta.get_field(date_field), DateTimeField):
lookup_kwargs = {'%s__range' % date_field: (datetime.datetime.combine(date, datetime.time.min), datetime.datetime.combine(date, datetime.time.max))}
else:
lookup_kwargs = {date_field: date}
# Only bother to check current date if the date isn't in the past and future objects aren't requested.
if date >= now.date() and not allow_future:
lookup_kwargs['%s__lte' % date_field] = now
if object_id:
lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id
elif slug and slug_field:
lookup_kwargs['%s__exact' % slug_field] = slug
else:
raise AttributeError("Generic detail view must be called with either an object_id or a slug/slugfield")
try:
obj = queryset.get(**lookup_kwargs)
except ObjectDoesNotExist:
raise Http404("No %s found for" % model._meta.verbose_name)
if not template_name:
template_name = "%s/%s_detail.html" % (model._meta.app_label, model._meta.object_name.lower())
if template_name_field:
template_name_list = [getattr(obj, template_name_field), template_name]
t = template_loader.select_template(template_name_list)
else:
t = template_loader.get_template(template_name)
c = RequestContext(request, {
template_object_name: obj,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
response = HttpResponse(t.render(c), mimetype=mimetype)
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.name))
return response
| bsd-3-clause | 2,064,579,753,707,283,700 | 36.4 | 156 | 0.624171 | false |
keen99/SickRage | lib/github/CommitStatus.py | 49 | 4721 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.NamedUser
class CommitStatus(github.GithubObject.NonCompletableGithubObject):
"""
This class represents CommitStatuss as returned for example by http://developer.github.com/v3/todo
"""
@property
def created_at(self):
"""
:type: datetime.datetime
"""
return self._created_at.value
@property
def creator(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
return self._creator.value
@property
def description(self):
"""
:type: string
"""
return self._description.value
@property
def id(self):
"""
:type: integer
"""
return self._id.value
@property
def state(self):
"""
:type: string
"""
return self._state.value
@property
def target_url(self):
"""
:type: string
"""
return self._target_url.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
return self._url.value
def _initAttributes(self):
self._created_at = github.GithubObject.NotSet
self._creator = github.GithubObject.NotSet
self._description = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._state = github.GithubObject.NotSet
self._target_url = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "creator" in attributes: # pragma no branch
self._creator = self._makeClassAttribute(github.NamedUser.NamedUser, attributes["creator"])
if "description" in attributes: # pragma no branch
self._description = self._makeStringAttribute(attributes["description"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "state" in attributes: # pragma no branch
self._state = self._makeStringAttribute(attributes["state"])
if "target_url" in attributes: # pragma no branch
self._target_url = self._makeStringAttribute(attributes["target_url"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 | -3,011,692,638,393,350,000 | 38.672269 | 103 | 0.518111 | false |
xuweiliang/Codelibrary | novaclient/exceptions.py | 1 | 8967 | # Copyright 2010 Jacob Kaplan-Moss
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
class UnsupportedVersion(Exception):
"""Indicates that the user is trying to use an unsupported
version of the API.
"""
pass
class UnsupportedAttribute(AttributeError):
"""Indicates that the user is trying to transmit the argument to a method,
which is not supported by selected version.
"""
def __init__(self, argument_name, start_version, end_version=None):
if end_version:
self.message = (
"'%(name)s' argument is only allowed for microversions "
"%(start)s - %(end)s." % {"name": argument_name,
"start": start_version,
"end": end_version})
else:
self.message = (
"'%(name)s' argument is only allowed since microversion "
"%(start)s." % {"name": argument_name, "start": start_version})
class CommandError(Exception):
pass
class AuthorizationFailure(Exception):
pass
class NoUniqueMatch(Exception):
pass
class AuthSystemNotFound(Exception):
"""When the user specify a AuthSystem but not installed."""
def __init__(self, auth_system):
self.auth_system = auth_system
def __str__(self):
return "AuthSystemNotFound: %s" % repr(self.auth_system)
class NoTokenLookupException(Exception):
"""This form of authentication does not support looking up
endpoints from an existing token.
"""
pass
class EndpointNotFound(Exception):
"""Could not find Service or Region in Service Catalog."""
pass
class AmbiguousEndpoints(Exception):
"""Found more than one matching endpoint in Service Catalog."""
def __init__(self, endpoints=None):
self.endpoints = endpoints
def __str__(self):
return "AmbiguousEndpoints: %s" % repr(self.endpoints)
class ConnectionRefused(Exception):
"""
Connection refused: the server refused the connection.
"""
def __init__(self, response=None):
self.response = response
def __str__(self):
return "ConnectionRefused: %s" % repr(self.response)
class ResourceInErrorState(Exception):
"""Resource is in the error state."""
def __init__(self, obj):
msg = "`%s` resource is in the error state" % obj.__class__.__name__
fault_msg = getattr(obj, "fault", {}).get("message")
if fault_msg:
msg += "due to '%s'" % fault_msg
self.message = "%s." % msg
class VersionNotFoundForAPIMethod(Exception):
msg_fmt = "API version '%(vers)s' is not supported on '%(method)s' method."
def __init__(self, version, method):
self.version = version
self.method = method
def __str__(self):
return self.msg_fmt % {"vers": self.version, "method": self.method}
class InstanceInDeletedState(Exception):
"""Instance is in the deleted state."""
pass
class ClientException(Exception):
"""
The base exception class for all exceptions this library raises.
"""
message = 'Unknown Error'
def __init__(self, code, message=None, details=None, request_id=None,
url=None, method=None):
self.code = code
self.message = message or self.__class__.message
self.details = details
self.request_id = request_id
self.url = url
self.method = method
def __str__(self):
formatted_string = "%s (HTTP %s)" % (self.message, self.code)
if self.request_id:
formatted_string += " (Request-ID: %s)" % self.request_id
return formatted_string
class RetryAfterException(ClientException):
"""
The base exception class for ClientExceptions that use Retry-After header.
"""
def __init__(self, *args, **kwargs):
try:
self.retry_after = int(kwargs.pop('retry_after'))
except (KeyError, ValueError):
self.retry_after = 0
super(RetryAfterException, self).__init__(*args, **kwargs)
class BadRequest(ClientException):
"""
HTTP 400 - Bad request: you sent some malformed data.
"""
http_status = 400
message = "Bad request"
class Unauthorized(ClientException):
"""
HTTP 401 - Unauthorized: bad credentials.
"""
http_status = 401
message = "Unauthorized"
class Forbidden(ClientException):
"""
HTTP 403 - Forbidden: your credentials don't give you access to this
resource.
"""
http_status = 403
message = "Forbidden"
class NotFound(ClientException):
"""
HTTP 404 - Not found
"""
http_status = 404
message = "Not found"
class MethodNotAllowed(ClientException):
"""
HTTP 405 - Method Not Allowed
"""
http_status = 405
message = "Method Not Allowed"
class NotAcceptable(ClientException):
"""
HTTP 406 - Not Acceptable
"""
http_status = 406
message = "Not Acceptable"
class Conflict(ClientException):
"""
HTTP 409 - Conflict
"""
http_status = 409
message = "Conflict"
class OverLimit(RetryAfterException):
"""
HTTP 413 - Over limit: you're over the API limits for this time period.
"""
http_status = 413
message = "Over limit"
class RateLimit(RetryAfterException):
"""
HTTP 429 - Rate limit: you've sent too many requests for this time period.
"""
http_status = 429
message = "Rate limit"
# NotImplemented is a python keyword.
class HTTPNotImplemented(ClientException):
"""
HTTP 501 - Not Implemented: the server does not support this operation.
"""
http_status = 501
message = "Not Implemented"
# In Python 2.4 Exception is old-style and thus doesn't have a __subclasses__()
# so we can do this:
# _code_map = dict((c.http_status, c)
# for c in ClientException.__subclasses__())
#
# Instead, we have to hardcode it:
_error_classes = [BadRequest, Unauthorized, Forbidden, NotFound,
MethodNotAllowed, NotAcceptable, Conflict, OverLimit,
RateLimit, HTTPNotImplemented]
_code_map = dict((c.http_status, c) for c in _error_classes)
class InvalidUsage(RuntimeError):
"""This function call is invalid in the way you are using this client.
Due to the transition to using keystoneauth some function calls are no
longer available. You should make a similar call to the session object
instead.
"""
pass
def from_response(response, body, url, method=None):
"""
Return an instance of an ClientException or subclass
based on a requests response.
Usage::
resp, body = requests.request(...)
if resp.status_code != 200:
raise exception_from_response(resp, rest.text)
"""
cls = _code_map.get(response.status_code, ClientException)
kwargs = {
'code': response.status_code,
'method': method,
'url': url,
'request_id': None,
}
if response.headers:
kwargs['request_id'] = response.headers.get('x-compute-request-id')
if (issubclass(cls, RetryAfterException) and
'retry-after' in response.headers):
kwargs['retry_after'] = response.headers.get('retry-after')
if body:
message = "n/a"
details = "n/a"
if hasattr(body, 'keys'):
# NOTE(mriedem): WebOb<1.6.0 will return a nested dict structure
# where the error keys to the message/details/code. WebOb>=1.6.0
# returns just a response body as a single dict, not nested,
# so we have to handle both cases (since we can't trust what we're
# given with content_type: application/json either way.
if 'message' in body:
# WebOb 1.6.0 case
message = body.get('message')
details = body.get('details')
else:
# WebOb<1.6.0 where we assume there is a single error message
# key to the body that has the message and details.
error = body[list(body)[0]]
message = error.get('message')
details = error.get('details')
kwargs['message'] = message
kwargs['details'] = details
return cls(**kwargs)
class ResourceNotFound(Exception):
"""Error in getting the resource."""
pass
| apache-2.0 | -1,046,662,721,080,125,000 | 26.934579 | 79 | 0.616929 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.