repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
henzk/ape | ape/container_mode/tasks.py | 1 | 12347 | from __future__ import unicode_literals, print_function
import os
import sys
import subprocess
import json
from ape import feaquencer
from ape import tasks
from .exceptions import ContainerError, ContainerNotFound, ProductNotFound
class Config(object):
APE_ROOT = os.environ['APE_ROOT_DIR']
SOURCE_HEADER = '#please execute the following in your shell:\n'
introduce_conf = Config()
@tasks.register_helper
def get_container_dir(container_name):
return tasks.conf.APE_ROOT + '/' + container_name
@tasks.register_helper
def get_product_dir(container_name, product_name):
return tasks.get_container_dir(container_name) + '/products/' + product_name
@tasks.register_helper
def get_containers():
entries = os.listdir(tasks.conf.APE_ROOT)
containers = []
for entry in entries:
if os.path.isdir(tasks.get_container_dir(entry) + '/products'):
containers.append(entry)
return containers
@tasks.register_helper
def get_products(container_name):
products_dir = tasks.get_container_dir(container_name) + '/products'
if not os.path.isdir(products_dir):
return []
products = os.listdir(products_dir)
def is_product(p):
return not p.startswith('.') and not p.startswith('_')
return [p for p in products if is_product(p)]
@tasks.register
def info():
"""
List information about this productive environment
:return:
"""
print()
print('root directory :', tasks.conf.APE_ROOT)
print()
print('active container :', os.environ.get('CONTAINER_NAME', ''))
print()
print('active product :', os.environ.get('PRODUCT_NAME', ''))
print()
print('ape feature selection :', tasks.FEATURE_SELECTION)
print()
print('containers and products:')
print('-' * 30)
print()
for container_name in tasks.get_containers():
print(container_name)
for product_name in tasks.get_products(container_name):
print(' ' + product_name)
print()
@tasks.register
def cd(doi):
"""
cd to directory of interest(doi)
a doi can be:
herbert - the container named "herbert"
sdox:dev - product "website" located in container "herbert"
:param doi:
:return:
"""
parts = doi.split(':')
if len(parts) == 2:
container_name, product_name = parts[0], parts[1]
elif len(parts) == 1 and os.environ.get('CONTAINER_NAME'):
# interpret poi as product name if already zapped into a product in order
# to enable simply switching products by doing ape zap prod.
product_name = parts[0]
container_name = os.environ.get('CONTAINER_NAME')
else:
print('unable to parse context - format: <container_name>:<product_name>')
sys.exit(1)
if container_name not in tasks.get_containers():
print('No such container')
else:
if product_name:
if product_name not in tasks.get_products(container_name):
print('No such product')
else:
print(tasks.conf.SOURCE_HEADER)
print('cd ' + tasks.get_product_dir(container_name, product_name))
else:
print(tasks.conf.SOURCE_HEADER)
print('cd ' + tasks.get_container_dir(container_name))
SWITCH_TEMPLATE = '''{source_header}
export CONTAINER_NAME={container_name}
export PRODUCT_NAME={product_name}
update_ape_env
'''
@tasks.register
def switch(poi):
"""
Zaps into a specific product specified by switch context to the product of interest(poi)
A poi is:
sdox:dev - for product "dev" located in container "sdox"
If poi does not contain a ":" it is interpreted as product name implying that a product within this
container is already active. So if this task is called with ape zap prod (and the corresponding container is
already zapped in), than only the product is switched.
After the context has been switched to sdox:dev additional commands may be available
that are relevant to sdox:dev
:param poi: product of interest, string: <container_name>:<product_name> or <product_name>.
"""
parts = poi.split(':')
if len(parts) == 2:
container_name, product_name = parts
elif len(parts) == 1 and os.environ.get('CONTAINER_NAME'):
# interpret poi as product name if already zapped into a product in order
# to enable simply switching products by doing ape zap prod.
container_name = os.environ.get('CONTAINER_NAME')
product_name = parts[0]
else:
print('unable to find poi: ', poi)
sys.exit(1)
if container_name not in tasks.get_containers():
raise ContainerNotFound('No such container %s' % container_name)
elif product_name not in tasks.get_products(container_name):
raise ProductNotFound('No such product %s' % product_name)
else:
print(SWITCH_TEMPLATE.format(
source_header=tasks.conf.SOURCE_HEADER,
container_name=container_name,
product_name=product_name
))
@tasks.register
def teleport(poi):
"""
switch and cd in one operation
:param poi:
:return:
"""
tasks.switch(poi)
tasks.cd(poi)
@tasks.register
def zap(poi):
'''alias for "teleport"'''
tasks.teleport(poi)
@tasks.register
def install_container(container_name):
"""
Installs the container specified by container_name
:param container_name: string, name of the container
"""
container_dir = os.path.join(os.environ['APE_ROOT_DIR'], container_name)
if os.path.exists(container_dir):
os.environ['CONTAINER_DIR'] = container_dir
else:
raise ContainerNotFound('ERROR: container directory not found: %s' % container_dir)
install_script = os.path.join(container_dir, 'install.py')
if os.path.exists(install_script):
print('... running install.py for %s' % container_name)
subprocess.check_call(['python', install_script])
else:
raise ContainerError('ERROR: this container does not provide an install.py!')
@tasks.register_helper
def get_extra_pypath(container_name=None):
from ape.installtools import pypath
return pypath.get_extra_pypath()
@tasks.register_helper
def get_poi_tuple(poi=None):
"""
Takes the poi or None and returns the container_dir and the product name either of the passed poi
(<container_name>: <product_name>) or from os.environ-
:param poi: optional; <container_name>: <product_name>
:return: tuple of the container directory and the product name
"""
if poi:
parts = poi.split(':')
if len(parts) == 2:
container_name, product_name = parts
if container_name not in tasks.get_containers():
print('No such container')
sys.exit(1)
elif product_name not in tasks.get_products(container_name):
print('No such product')
sys.exit(1)
else:
container_dir = tasks.get_container_dir(container_name)
else:
print('Please check your arguments: --poi <container>:<product>')
sys.exit(1)
else:
container_dir = os.environ.get('CONTAINER_DIR')
product_name = os.environ.get('PRODUCT_NAME')
return container_dir, product_name
@tasks.register
def validate_product_equation(poi=None):
"""
Validates the product equation.
* Validates the feature order
* Validates the product spec (mandatory functional features)
:param poi: optional product of interest
"""
from . import utils
from . import validators
container_dir, product_name = tasks.get_poi_tuple(poi=poi)
feature_list = utils.get_features_from_equation(container_dir, product_name)
ordering_constraints = utils.get_feature_order_constraints(container_dir)
spec_path = utils.get_feature_ide_paths(container_dir, product_name).product_spec_path
print('*** Starting product.equation validation')
# --------------------------------------------------------
# Validate the feature order
print('\tChecking feature order')
feature_order_validator = validators.FeatureOrderValidator(feature_list, ordering_constraints)
feature_order_validator.check_order()
if feature_order_validator.has_errors():
print('\t\txxx ERROR in your product.equation feature order xxx')
for error in feature_order_validator.get_violations():
print('\t\t\t', error[1])
else:
print('\t\tOK')
# --------------------------------------------------------
# Validate the functional product specification
print('\tChecking functional product spec')
if not os.path.exists(spec_path):
print(
'\t\tSkipped - No product spec exists.\n'
'\t\tYou may create a product spec if you want to ensure that\n'
'\t\trequired functional features are represented in the product equation\n'
'\t\t=> Create spec file featuremodel/productline/<container>/product_spec.json'
)
return
spec_validator = validators.ProductSpecValidator(spec_path, product_name, feature_list)
if not spec_validator.is_valid():
if spec_validator.get_errors_mandatory():
print('\t\tERROR: The following feature are missing', spec_validator.get_errors_mandatory())
if spec_validator.get_errors_never():
print('\t\tERROR: The following feature are not allowed', spec_validator.get_errors_never())
else:
print('\t\tOK')
if feature_order_validator.has_errors() or spec_validator.has_errors():
sys.exit(1)
@tasks.register_helper
def get_ordered_feature_list(info_object, feature_list):
"""
Orders the passed feature list by the given, json-formatted feature
dependency file using feaquencer's topsort algorithm.
:param feature_list:
:param info_object:
:return:
"""
feature_dependencies = json.load(open(info_object.feature_order_json))
feature_selection = [feature for feature in [feature.strip().replace('\n', '') for feature in feature_list]
if len(feature) > 0 and not feature.startswith('_') and not feature.startswith('#')]
return [feature + '\n' for feature in feaquencer.get_total_order(feature_selection, feature_dependencies)]
@tasks.register
def config_to_equation(poi=None):
"""
Generates a product.equation file for the given product name.
It generates it from the <product_name>.config file in the products folder.
For that you need to have your project imported to featureIDE and set the correct settings.
"""
from . import utils
container_dir, product_name = tasks.get_poi_tuple(poi=poi)
info_object = utils.get_feature_ide_paths(container_dir, product_name)
feature_list = list()
try:
print('*** Processing ', info_object.config_file_path)
with open(info_object.config_file_path, 'r') as config_file:
config_file = config_file.readlines()
for line in config_file:
# in FeatureIDE we cant use '.' for the paths to sub-features so we used '__'
# e.g. django_productline__features__development
if len(line.split('__')) <= 2:
line = line
else:
line = line.replace('__', '.')
if line.startswith('abstract_'):
# we skipp abstract features; this is a special case as featureIDE does not work with abstract
# sub trees / leafs.
line = ''
feature_list.append(line)
except IOError:
print('{} does not exist. Make sure your config file exists.'.format(info_object.config_file_path))
feature_list = tasks.get_ordered_feature_list(info_object, feature_list)
try:
with open(info_object.equation_file_path, 'w') as eq_file:
eq_file.writelines(feature_list)
print('*** Successfully generated product.equation')
except IOError:
print('product.equation file not found. Please make sure you have a valid product.equation in your chosen product')
# finally performing the validation of the product equation
tasks.validate_product_equation()
| mit |
bclau/nova | nova/tests/api/openstack/compute/test_limits.py | 7 | 36000 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests dealing with HTTP rate-limiting.
"""
import httplib
import StringIO
from xml.dom import minidom
from lxml import etree
import webob
from nova.api.openstack.compute import limits
from nova.api.openstack.compute import views
from nova.api.openstack import xmlutil
import nova.context
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import matchers
from nova import utils
TEST_LIMITS = [
limits.Limit("GET", "/delayed", "^/delayed", 1,
utils.TIME_UNITS['MINUTE']),
limits.Limit("POST", "*", ".*", 7, utils.TIME_UNITS['MINUTE']),
limits.Limit("POST", "/servers", "^/servers", 3,
utils.TIME_UNITS['MINUTE']),
limits.Limit("PUT", "*", "", 10, utils.TIME_UNITS['MINUTE']),
limits.Limit("PUT", "/servers", "^/servers", 5,
utils.TIME_UNITS['MINUTE']),
]
NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/common/api/v1.0'
}
class BaseLimitTestSuite(test.NoDBTestCase):
"""Base test suite which provides relevant stubs and time abstraction."""
def setUp(self):
super(BaseLimitTestSuite, self).setUp()
self.time = 0.0
self.stubs.Set(limits.Limit, "_get_time", self._get_time)
self.absolute_limits = {}
def stub_get_project_quotas(context, project_id, usages=True):
return dict((k, dict(limit=v))
for k, v in self.absolute_limits.items())
self.stubs.Set(nova.quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
def _get_time(self):
"""Return the "time" according to this test suite."""
return self.time
class LimitsControllerTest(BaseLimitTestSuite):
"""
Tests for `limits.LimitsController` class.
"""
def setUp(self):
"""Run before each test."""
super(LimitsControllerTest, self).setUp()
self.controller = limits.create_resource()
self.ctrler = limits.LimitsController()
def _get_index_request(self, accept_header="application/json"):
"""Helper to set routing arguments."""
request = webob.Request.blank("/")
request.accept = accept_header
request.environ["wsgiorg.routing_args"] = (None, {
"action": "index",
"controller": "",
})
context = nova.context.RequestContext('testuser', 'testproject')
request.environ["nova.context"] = context
return request
def _populate_limits(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("POST", "*", ".*", 5, 60 * 60).display(),
limits.Limit("GET", "changes-since*", "changes-since",
5, 60).display(),
]
request.environ["nova.limits"] = _limits
return request
def test_empty_index_json(self):
# Test getting empty limit details in JSON.
request = self._get_index_request()
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def test_index_json(self):
# Test getting limit details in JSON.
request = self._get_index_request()
request = self._populate_limits(request)
self.absolute_limits = {
'ram': 512,
'instances': 5,
'cores': 21,
'key_pairs': 10,
'floating_ips': 10,
'security_groups': 10,
'security_group_rules': 20,
}
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
{
"verb": "POST",
"next-available": "1970-01-01T00:00:00Z",
"unit": "HOUR",
"value": 5,
"remaining": 5,
},
],
},
{
"regex": "changes-since",
"uri": "changes-since*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 5,
"remaining": 5,
},
],
},
],
"absolute": {
"maxTotalRAMSize": 512,
"maxTotalInstances": 5,
"maxTotalCores": 21,
"maxTotalKeypairs": 10,
"maxTotalFloatingIps": 10,
"maxSecurityGroups": 10,
"maxSecurityGroupRules": 20,
},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _populate_limits_diff_regex(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("GET", "*", "*.*", 10, 60).display(),
]
request.environ["nova.limits"] = _limits
return request
def test_index_diff_regex(self):
# Test getting limit details in JSON.
request = self._get_index_request()
request = self._populate_limits_diff_regex(request)
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
{
"regex": "*.*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _test_index_absolute_limits_json(self, expected):
request = self._get_index_request()
response = request.get_response(self.controller)
body = jsonutils.loads(response.body)
self.assertEqual(expected, body['limits']['absolute'])
def test_index_ignores_extra_absolute_limits_json(self):
self.absolute_limits = {'unknown_limit': 9001}
self._test_index_absolute_limits_json({})
def test_index_absolute_ram_json(self):
self.absolute_limits = {'ram': 1024}
self._test_index_absolute_limits_json({'maxTotalRAMSize': 1024})
def test_index_absolute_cores_json(self):
self.absolute_limits = {'cores': 17}
self._test_index_absolute_limits_json({'maxTotalCores': 17})
def test_index_absolute_instances_json(self):
self.absolute_limits = {'instances': 19}
self._test_index_absolute_limits_json({'maxTotalInstances': 19})
def test_index_absolute_metadata_json(self):
# NOTE: both server metadata and image metadata are overloaded
# into metadata_items
self.absolute_limits = {'metadata_items': 23}
expected = {
'maxServerMeta': 23,
'maxImageMeta': 23,
}
self._test_index_absolute_limits_json(expected)
def test_index_absolute_injected_files(self):
self.absolute_limits = {
'injected_files': 17,
'injected_file_content_bytes': 86753,
}
expected = {
'maxPersonality': 17,
'maxPersonalitySize': 86753,
}
self._test_index_absolute_limits_json(expected)
def test_index_absolute_security_groups(self):
self.absolute_limits = {
'security_groups': 8,
'security_group_rules': 16,
}
expected = {
'maxSecurityGroups': 8,
'maxSecurityGroupRules': 16,
}
self._test_index_absolute_limits_json(expected)
def test_limit_create(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.create,
req, {})
def test_limit_delete(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.delete,
req, 1)
def test_limit_detail(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.detail,
req)
def test_limit_show(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.show,
req, 1)
def test_limit_update(self):
req = fakes.HTTPRequest.blank('/v2/fake/limits')
self.assertRaises(webob.exc.HTTPNotImplemented, self.ctrler.update,
req, 1, {})
class MockLimiter(limits.Limiter):
pass
class LimitMiddlewareTest(BaseLimitTestSuite):
"""
Tests for the `limits.RateLimitingMiddleware` class.
"""
@webob.dec.wsgify
def _empty_app(self, request):
"""Do-nothing WSGI app."""
pass
def setUp(self):
"""Prepare middleware for use through fake WSGI app."""
super(LimitMiddlewareTest, self).setUp()
_limits = '(GET, *, .*, 1, MINUTE)'
self.app = limits.RateLimitingMiddleware(self._empty_app, _limits,
"%s.MockLimiter" %
self.__class__.__module__)
def test_limit_class(self):
# Test that middleware selected correct limiter class.
assert isinstance(self.app._limiter, MockLimiter)
def test_good_request(self):
# Test successful GET request through middleware.
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
def test_limited_request_json(self):
# Test a rate-limited (429) GET request through middleware.
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(response.status_int, 429)
self.assertTrue('Retry-After' in response.headers)
retry_after = int(response.headers['Retry-After'])
self.assertAlmostEqual(retry_after, 60, 1)
body = jsonutils.loads(response.body)
expected = "Only 1 GET request(s) can be made to * every minute."
value = body["overLimit"]["details"].strip()
self.assertEqual(value, expected)
self.assertTrue("retryAfter" in body["overLimit"])
retryAfter = body["overLimit"]["retryAfter"]
self.assertEqual(retryAfter, "60")
def test_limited_request_xml(self):
# Test a rate-limited (429) response as XML.
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
request.accept = "application/xml"
response = request.get_response(self.app)
self.assertEqual(response.status_int, 429)
root = minidom.parseString(response.body).childNodes[0]
expected = "Only 1 GET request(s) can be made to * every minute."
self.assertNotEqual(root.attributes.getNamedItem("retryAfter"), None)
retryAfter = root.attributes.getNamedItem("retryAfter").value
self.assertEqual(retryAfter, "60")
details = root.getElementsByTagName("details")
self.assertEqual(details.length, 1)
value = details.item(0).firstChild.data.strip()
self.assertEqual(value, expected)
class LimitTest(BaseLimitTestSuite):
"""
Tests for the `limits.Limit` class.
"""
def test_GET_no_delay(self):
# Test a limit handles 1 GET per second.
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
self.assertEqual(0, limit.next_request)
self.assertEqual(0, limit.last_request)
def test_GET_delay(self):
# Test two calls to 1 GET per second limit.
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
delay = limit("GET", "/anything")
self.assertEqual(1, delay)
self.assertEqual(1, limit.next_request)
self.assertEqual(0, limit.last_request)
self.time += 4
delay = limit("GET", "/anything")
self.assertEqual(None, delay)
self.assertEqual(4, limit.next_request)
self.assertEqual(4, limit.last_request)
class ParseLimitsTest(BaseLimitTestSuite):
"""
Tests for the default limits parser in the in-memory
`limits.Limiter` class.
"""
def test_invalid(self):
# Test that parse_limits() handles invalid input correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
';;;;;')
def test_bad_rule(self):
# Test that parse_limits() handles bad rules correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'GET, *, .*, 20, minute')
def test_missing_arg(self):
# Test that parse_limits() handles missing args correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20)')
def test_bad_value(self):
# Test that parse_limits() handles bad values correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, foo, minute)')
def test_bad_unit(self):
# Test that parse_limits() handles bad units correctly.
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20, lightyears)')
def test_multiple_rules(self):
# Test that parse_limits() handles multiple rules correctly.
try:
l = limits.Limiter.parse_limits('(get, *, .*, 20, minute);'
'(PUT, /foo*, /foo.*, 10, hour);'
'(POST, /bar*, /bar.*, 5, second);'
'(Say, /derp*, /derp.*, 1, day)')
except ValueError as e:
assert False, str(e)
# Make sure the number of returned limits are correct
self.assertEqual(len(l), 4)
# Check all the verbs...
expected = ['GET', 'PUT', 'POST', 'SAY']
self.assertEqual([t.verb for t in l], expected)
# ...the URIs...
expected = ['*', '/foo*', '/bar*', '/derp*']
self.assertEqual([t.uri for t in l], expected)
# ...the regexes...
expected = ['.*', '/foo.*', '/bar.*', '/derp.*']
self.assertEqual([t.regex for t in l], expected)
# ...the values...
expected = [20, 10, 5, 1]
self.assertEqual([t.value for t in l], expected)
# ...and the units...
expected = [utils.TIME_UNITS['MINUTE'], utils.TIME_UNITS['HOUR'],
utils.TIME_UNITS['SECOND'], utils.TIME_UNITS['DAY']]
self.assertEqual([t.unit for t in l], expected)
class LimiterTest(BaseLimitTestSuite):
"""
Tests for the in-memory `limits.Limiter` class.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
userlimits = {'limits.user3': '',
'limits.user0': '(get, *, .*, 4, minute);'
'(put, *, .*, 2, minute)'}
self.limiter = limits.Limiter(TEST_LIMITS, **userlimits)
def _check(self, num, verb, url, username=None):
"""Check and yield results from checks."""
for x in xrange(num):
yield self.limiter.check_for_delay(verb, url, username)[0]
def _check_sum(self, num, verb, url, username=None):
"""Check and sum results from checks."""
results = self._check(num, verb, url, username)
return sum(item for item in results if item)
def test_no_delay_GET(self):
"""
Simple test to ensure no delay on a single call for a limit verb we
didn"t set.
"""
delay = self.limiter.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_no_delay_PUT(self):
# Simple test to ensure no delay on a single call for a known limit.
delay = self.limiter.check_for_delay("PUT", "/anything")
self.assertEqual(delay, (None, None))
def test_delay_PUT(self):
"""
Ensure the 11th PUT will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_POST(self):
"""
Ensure the 8th POST will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 7
results = list(self._check(7, "POST", "/anything"))
self.assertEqual(expected, results)
expected = 60.0 / 7.0
results = self._check_sum(1, "POST", "/anything")
self.failUnlessAlmostEqual(expected, results, 8)
def test_delay_GET(self):
# Ensure the 11th GET will result in NO delay.
expected = [None] * 11
results = list(self._check(11, "GET", "/anything"))
self.assertEqual(expected, results)
expected = [None] * 4 + [15.0]
results = list(self._check(5, "GET", "/foo", "user0"))
self.assertEqual(expected, results)
def test_delay_PUT_servers(self):
"""
Ensure PUT on /servers limits at 5 requests, and PUT elsewhere is still
OK after 5 requests...but then after 11 total requests, PUT limiting
kicks in.
"""
# First 6 requests on PUT /servers
expected = [None] * 5 + [12.0]
results = list(self._check(6, "PUT", "/servers"))
self.assertEqual(expected, results)
# Next 5 request on PUT /anything
expected = [None] * 4 + [6.0]
results = list(self._check(5, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_PUT_wait(self):
"""
Ensure after hitting the limit and then waiting for the correct
amount of time, the limit will be lifted.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
# Advance time
self.time += 6.0
expected = [None, 6.0]
results = list(self._check(2, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_multiple_delays(self):
# Ensure multiple requests still get a delay.
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything"))
self.assertEqual(expected, results)
self.time += 1.0
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything"))
self.assertEqual(expected, results)
expected = [None] * 2 + [30.0] * 8
results = list(self._check(10, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
def test_user_limit(self):
# Test user-specific limits.
self.assertEqual(self.limiter.levels['user3'], [])
self.assertEqual(len(self.limiter.levels['user0']), 2)
def test_multiple_users(self):
# Tests involving multiple users.
# User0
expected = [None] * 2 + [30.0] * 8
results = list(self._check(10, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
# User1
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
# User2
expected = [None] * 10 + [6.0] * 5
results = list(self._check(15, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User3
expected = [None] * 20
results = list(self._check(20, "PUT", "/anything", "user3"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [4.0] * 5
results = list(self._check(5, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User0 again
expected = [28.0]
results = list(self._check(1, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
self.time += 28.0
expected = [None, 30.0]
results = list(self._check(2, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
class WsgiLimiterTest(BaseLimitTestSuite):
"""
Tests for `limits.WsgiLimiter` class.
"""
def setUp(self):
"""Run before each test."""
super(WsgiLimiterTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
def _request_data(self, verb, path):
"""Get data describing a limit request verb/path."""
return jsonutils.dumps({"verb": verb, "path": path})
def _request(self, verb, url, username=None):
"""Make sure that POSTing to the given url causes the given username
to perform the given action. Make the internal rate limiter return
delay and make sure that the WSGI app returns the correct response.
"""
if username:
request = webob.Request.blank("/%s" % username)
else:
request = webob.Request.blank("/")
request.method = "POST"
request.body = self._request_data(verb, url)
response = request.get_response(self.app)
if "X-Wait-Seconds" in response.headers:
self.assertEqual(response.status_int, 403)
return response.headers["X-Wait-Seconds"]
self.assertEqual(response.status_int, 204)
def test_invalid_methods(self):
# Only POSTs should work.
requests = []
for method in ["GET", "PUT", "DELETE", "HEAD", "OPTIONS"]:
request = webob.Request.blank("/", method=method)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 405)
def test_good_url(self):
delay = self._request("GET", "/something")
self.assertEqual(delay, None)
def test_escaping(self):
delay = self._request("GET", "/something/jump%20up")
self.assertEqual(delay, None)
def test_response_to_delays(self):
delay = self._request("GET", "/delayed")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed")
self.assertEqual(delay, '60.00')
def test_response_to_delays_usernames(self):
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, None)
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, '60.00')
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, '60.00')
class FakeHttplibSocket(object):
"""
Fake `httplib.HTTPResponse` replacement.
"""
def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`."""
self._buffer = StringIO.StringIO(response_string)
def makefile(self, _mode, _other):
"""Returns the socket's internal buffer."""
return self._buffer
class FakeHttplibConnection(object):
"""
Fake `httplib.HTTPConnection`.
"""
def __init__(self, app, host):
"""
Initialize `FakeHttplibConnection`.
"""
self.app = app
self.host = host
def request(self, method, path, body="", headers=None):
"""
Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into
an `httplib.HTTPResponse`.
"""
if not headers:
headers = {}
req = webob.Request.blank(path)
req.method = method
req.headers = headers
req.host = self.host
req.body = body
resp = str(req.get_response(self.app))
resp = "HTTP/1.0 %s" % resp
sock = FakeHttplibSocket(resp)
self.http_response = httplib.HTTPResponse(sock)
self.http_response.begin()
def getresponse(self):
"""Return our generated response from the request."""
return self.http_response
def wire_HTTPConnection_to_WSGI(host, app):
"""Monkeypatches HTTPConnection so that if you try to connect to host, you
are instead routed straight to the given WSGI app.
After calling this method, when any code calls
httplib.HTTPConnection(host)
the connection object will be a fake. Its requests will be sent directly
to the given WSGI app rather than through a socket.
Code connecting to hosts other than host will not be affected.
This method may be called multiple times to map different hosts to
different apps.
This method returns the original HTTPConnection object, so that the caller
can restore the default HTTPConnection interface (for all hosts).
"""
class HTTPConnectionDecorator(object):
"""Wraps the real HTTPConnection class so that when you instantiate
the class you might instead get a fake instance.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
def __call__(self, connection_host, *args, **kwargs):
if connection_host == host:
return FakeHttplibConnection(app, host)
else:
return self.wrapped(connection_host, *args, **kwargs)
oldHTTPConnection = httplib.HTTPConnection
httplib.HTTPConnection = HTTPConnectionDecorator(httplib.HTTPConnection)
return oldHTTPConnection
class WsgiLimiterProxyTest(BaseLimitTestSuite):
"""
Tests for the `limits.WsgiLimiterProxy` class.
"""
def setUp(self):
"""
Do some nifty HTTP/WSGI magic which allows for WSGI to be called
directly by something like the `httplib` library.
"""
super(WsgiLimiterProxyTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
self.oldHTTPConnection = (
wire_HTTPConnection_to_WSGI("169.254.0.1:80", self.app))
self.proxy = limits.WsgiLimiterProxy("169.254.0.1:80")
def test_200(self):
# Successful request test.
delay = self.proxy.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_403(self):
# Forbidden request test.
delay = self.proxy.check_for_delay("GET", "/delayed")
self.assertEqual(delay, (None, None))
delay, error = self.proxy.check_for_delay("GET", "/delayed")
error = error.strip()
expected = ("60.00", "403 Forbidden\n\nOnly 1 GET request(s) can be "
"made to /delayed every minute.")
self.assertEqual((delay, error), expected)
def tearDown(self):
# restore original HTTPConnection object
httplib.HTTPConnection = self.oldHTTPConnection
super(WsgiLimiterProxyTest, self).tearDown()
class LimitsViewBuilderTest(test.NoDBTestCase):
def setUp(self):
super(LimitsViewBuilderTest, self).setUp()
self.view_builder = views.limits.ViewBuilder()
self.rate_limits = [{"URI": "*",
"regex": ".*",
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"resetTime": 1311272226},
{"URI": "*/servers",
"regex": "^/servers",
"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"resetTime": 1311272226}]
self.absolute_limits = {"metadata_items": 1,
"injected_files": 5,
"injected_file_content_bytes": 5}
def test_build_limits(self):
expected_limits = {"limits": {
"rate": [{
"uri": "*",
"regex": ".*",
"limit": [{"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": "2011-07-21T18:17:06Z"}]},
{"uri": "*/servers",
"regex": "^/servers",
"limit": [{"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": "2011-07-21T18:17:06Z"}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 5}}}
output = self.view_builder.build(self.rate_limits,
self.absolute_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
def test_build_limits_empty_limits(self):
expected_limits = {"limits": {"rate": [],
"absolute": {}}}
abs_limits = {}
rate_limits = []
output = self.view_builder.build(rate_limits, abs_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
class LimitsXMLSerializationTest(test.NoDBTestCase):
def test_xml_declaration(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_index(self):
serializer = limits.LimitsTemplate()
fixture = {
"limits": {
"rate": [{
"uri": "*",
"regex": ".*",
"limit": [{
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": "2011-12-15T22:42:45Z"}]},
{"uri": "*/servers",
"regex": "^/servers",
"limit": [{
"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": "2011-12-15T22:42:45Z"}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 10240}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 4)
for limit in absolutes:
name = limit.get('name')
value = limit.get('value')
self.assertEqual(value, str(fixture['limits']['absolute'][name]))
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 2)
for i, rate in enumerate(rates):
for key in ['uri', 'regex']:
self.assertEqual(rate.get(key),
str(fixture['limits']['rate'][i][key]))
rate_limits = rate.xpath('ns:limit', namespaces=NS)
self.assertEqual(len(rate_limits), 1)
for j, limit in enumerate(rate_limits):
for key in ['verb', 'value', 'remaining', 'unit',
'next-available']:
self.assertEqual(limit.get(key),
str(fixture['limits']['rate'][i]['limit'][j][key]))
def test_index_no_limits(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 0)
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 0)
| apache-2.0 |
cmichal/python-social-auth | social/backends/strava.py | 70 | 1850 | """
Strava OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/strava.html
"""
from social.backends.oauth import BaseOAuth2
class StravaOAuth(BaseOAuth2):
name = 'strava'
AUTHORIZATION_URL = 'https://www.strava.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://www.strava.com/oauth/token'
ACCESS_TOKEN_METHOD = 'POST'
# Strava doesn't check for parameters in redirect_uri and directly appends
# the auth parameters to it, ending with an URL like:
# http://example.com/complete/strava?redirect_state=xxx?code=xxx&state=xxx
# Check issue #259 for details.
REDIRECT_STATE = False
REVOKE_TOKEN_URL = 'https://www.strava.com/oauth/deauthorize'
def get_user_id(self, details, response):
return response['athlete']['id']
def get_user_details(self, response):
"""Return user details from Strava account"""
# because there is no usernames on strava
username = response['athlete']['id']
email = response['athlete'].get('email', '')
fullname, first_name, last_name = self.get_user_names(
first_name=response['athlete'].get('firstname', ''),
last_name=response['athlete'].get('lastname', ''),
)
return {'username': str(username),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'email': email}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://www.strava.com/api/v3/athlete',
params={'access_token': access_token})
def revoke_token_params(self, token, uid):
params = super(StravaOAuth, self).revoke_token_params(token, uid)
params['access_token'] = token
return params
| bsd-3-clause |
Voskrese/archlive.archldr | src/pypack/altgraph/Graph.py | 9 | 19562 | """
Base Graph class
#--Version 2.1
#--Bob Ippolito October, 2004
#--Version 2.0
#--Istvan Albert June, 2004
#--Version 1.0
#--Nathan Denny, May 27, 1999
"""
from altgraph import GraphError
from compat import *
class Graph(object):
"""
The Graph class represents a directed graph with C{N} nodes and C{E} edges.
Naming conventions:
- the prefixes such asC{out}, C{inc} and C{all} will refer to methods
that operate on the outgoing, incoming or all edges of that node.
For example: L{inc_degree} will refer to the degree of the node
computed over the incoming edges (the number of neighbours linking to
the node).
- the prefixes such as C{forw} and C{back} will refer to the
orientation of the edges used in the method with respect to the node.
For example: L{forw_bfs} will start at the node then use the outgoing
edges to traverse the graph (goes forward).
"""
def __init__(self, edges=None):
"""
Initialization
"""
self.next_edge = 0
self.nodes, self.edges = {}, {}
self.hidden_edges, self.hidden_nodes = {}, {}
try:
# instantiate graph from iterable data
if edges:
cols = len(edges[0])
if cols == 2:
for head, tail in edges:
self.add_edge(head, tail)
elif cols == 3:
for head, tail, data in edges:
self.add_edge(head, tail, data)
except Exception, exc:
raise GraphError('%s -> Cannot create graph from edges=%s' %
(exc, edges))
def __repr__(self):
return '<Graph: %d nodes, %d edges>' % (
self.number_of_nodes(), self.number_of_edges())
def add_node(self, node, node_data=None):
"""
Creates a new node with a node. Arbitrary data can be attached to the
node via the node_data parameter. Adding the same node twice will be
silently ignored.
"""
#
# the nodes will contain tuples that will store incoming edges,
# outgoing edges and data
#
# index 0 -> incoming edges
# index 1 -> outgoing edges
if node not in self.nodes:
self.nodes[node] = ([], [], node_data)
def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
"""
Adds a directed edge going from head_id to tail_id.
Arbitrary data can be attached to the edge via edge_data.
It may create the nodes if adding edges between nonexisting ones.
@param head_id: head node
@param tail_id: tail node
@param edge_data: (optional) data attached to the edge
@param create_nodes: (optional) creates the head_id or tail_id node in case they did not exist
"""
# shorcut
edge = self.next_edge
# add nodes if on automatic node creation
if create_nodes:
self.add_node(head_id)
self.add_node(tail_id)
# store edge information
self.edges[edge] = (head_id, tail_id, edge_data)
# update the corresponding incoming and outgoing lists in the nodes
# index 0 -> incoming edges
# index 1 -> outgoing edges
try:
self.nodes[tail_id][0].append(edge)
self.nodes[head_id][1].append(edge)
except KeyError:
raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
self.next_edge += 1
def hide_edge(self, edge):
"""
Hides an edge from the graph. The edge may be unhidden at some later
time.
"""
try:
head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge]
self.nodes[tail_id][0].remove(edge)
self.nodes[head_id][1].remove(edge)
del self.edges[edge]
except KeyError:
raise GraphError('Invalid edge %s' % edge)
def hide_node(self, node):
"""
Hides a node from the graph. The incoming and outgoing edges of the
node will also be hidden. The node may be unhidden at some later time.
"""
try:
all_edges = self.all_edges(node)
self.hidden_nodes[node] = (self.nodes[node], all_edges)
for edge in all_edges:
self.hide_edge(edge)
del self.nodes[node]
except KeyError:
raise GraphError('Invalid node %s' % node)
def restore_node(self, node):
"""
Restores a previously hidden node back into the graph and restores
all of its incoming and outgoing edges.
"""
try:
self.nodes[node], all_edges = self.hidden_nodes[node]
for edge in all_edges:
self.restore_edge(edge)
del self.hidden_nodes[node]
except KeyError:
raise GraphError('Invalid node %s' % node)
def restore_edge(self, edge):
"""
Restores a previously hidden edge back into the graph.
"""
try:
self.edges[edge] = head_id, tail_id, data = self.hidden_edges[edge]
self.nodes[tail_id][0].append(edge)
self.nodes[head_id][1].append(edge)
del self.hidden_edges[edge]
except KeyError:
raise GraphError('Invalid edge %s' % edge)
def restore_all_edges(self):
"""
Restores all hidden edges.
"""
for edge in self.hidden_edges.keys():
self.restore_edge(edge)
def restore_all_nodes(self):
"""
Restores all hidden nodes.
"""
for node in self.hidden_nodes.keys():
self.restore_node(node)
def __contains__(self, node):
"""
Test whether a node is in the graph
"""
return node in self.nodes
def edge_by_id(self, edge):
"""
Returns the edge that connects the head_id and tail_id nodes
"""
try:
head, tail, data = self.edges[edge]
except KeyError:
head, tail = None, None
raise GraphError('Invalid edge %s' % edge)
return (head, tail)
def edge_by_node(self, head, tail):
"""
Returns the edge that connects the head_id and tail_id nodes
"""
for edge in self.out_edges(head):
if self.tail(edge) == tail:
return edge
return None
def number_of_nodes(self):
"""
Returns the number of nodes
"""
return len(self.nodes)
def number_of_edges(self):
"""
Returns the number of edges
"""
return len(self.edges)
def __iter__(self):
"""
Iterates over all nodes in the graph
"""
return iter(self.nodes)
def node_list(self):
"""
Return a list of the node ids for all visible nodes in the graph.
"""
return self.nodes.keys()
def edge_list(self):
"""
Returns an iterator for all visible nodes in the graph.
"""
return self.edges.keys()
def number_of_hidden_edges(self):
"""
Returns the number of hidden edges
"""
return len(self.hidden_edges)
def number_of_hidden_nodes(self):
"""
Returns the number of hidden nodes
"""
return len(self.hidden_nodes)
def hidden_node_list(self):
"""
Returns the list with the hidden nodes
"""
return self.hidden_nodes.keys()
def hidden_edge_list(self):
"""
Returns a list with the hidden edges
"""
return self.hidden_edges.keys()
def describe_node(self, node):
"""
return node, node data, outgoing edges, incoming edges for node
"""
incoming, outgoing, data = self.nodes[node]
return node, data, outgoing, incoming
def describe_edge(self, edge):
"""
return edge, edge data, head, tail for edge
"""
head, tail, data = self.edges[edge]
return edge, data, head, tail
def node_data(self, node):
"""
Returns the data associated with a node
"""
return self.nodes[node][2]
def edge_data(self, edge):
"""
Returns the data associated with an edge
"""
return self.edges[edge][2]
def head(self, edge):
"""
Returns the node of the head of the edge.
"""
return self.edges[edge][0]
def tail(self, edge):
"""
Returns node of the tail of the edge.
"""
return self.edges[edge][1]
def out_nbrs(self, node):
"""
List of nodes connected by outgoing edges
"""
return map(self.tail, self.out_edges(node))
def inc_nbrs(self, node):
"""
List of nodes connected by incoming edges
"""
return map(self.head, self.inc_edges(node))
def all_nbrs(self, node):
"""
List of nodes connected by incoming and outgoing edges
"""
return self.inc_nbrs(node) + self.out_nbrs(node)
def out_edges(self, node):
"""
Returns a list of the outgoing edges
"""
try:
return list(self.nodes[node][1])
except KeyError:
raise GraphError('Invalid node %s' % node)
return None
def inc_edges(self, node):
"""
Returns a list of the incoming edges
"""
try:
return list(self.nodes[node][0])
except KeyError:
raise GraphError('Invalid node %s' % node)
return None
def all_edges(self, node):
"""
Returns a list of incoming and outging edges.
"""
return set(self.inc_edges(node) + self.out_edges(node))
def out_degree(self, node):
"""
Returns the number of outgoing edges
"""
return len(self.out_edges(node))
def inc_degree(self, node):
"""
Returns the number of incoming edges
"""
return len(self.inc_edges(node))
def all_degree(self, node):
"""
The total degree of a node
"""
return self.inc_degree(node) + self.out_degree(node)
def _topo_sort(self, forward=True):
"""
Topological sort.
Returns a list of nodes where the successors (based on outgoing and
incoming edges selected by the forward parameter) of any given node
appear in the sequence after that node.
"""
topo_list = []
queue = deque()
indeg = {}
# select the operation that will be performed
if forward:
get_edges = self.out_edges
get_degree = self.inc_degree
else:
get_edges = self.inc_edges
get_degree = self.out_degree
for node in self.node_list():
degree = get_degree(node)
if degree:
indeg[node] = degree
else:
queue.append(node)
while queue:
curr_node = queue.popleft()
topo_list.append(curr_node)
for edge in get_edges(curr_node):
tail_id = self.tail(edge)
indeg[tail_id] -= 1
if indeg[tail_id] == 0:
queue.append(tail_id)
if len(topo_list) == len(self.node_list()):
valid = True
else:
# the graph has cycles, invalid topological sort
valid = False
return (valid, topo_list)
def forw_topo_sort(self):
"""
Topological sort.
Returns a list of nodes where the successors (based on outgoing edges)
of any given node appear in the sequence after that node.
"""
return self._topo_sort(forward=True)
def back_topo_sort(self):
"""
Reverse topological sort.
Returns a list of nodes where the successors (based on incoming edges)
of any given node appear in the sequence after that node.
"""
return self._topo_sort(forward=False)
def _bfs_subgraph(self, start_id, forward=True):
"""
Private method creates a subgraph in a bfs order.
The forward parameter specifies whether it is a forward or backward
traversal.
"""
if forward:
get_bfs = self.forw_bfs
get_nbrs = self.out_nbrs
else:
get_bfs = self.back_bfs
get_nbrs = self.inc_nbrs
g = Graph()
bfs_list = get_bfs(start_id)
for (hop_num, node) in bfs_list:
g.add_node(node)
for (hop_num, node) in bfs_list:
for nbr_id in get_nbrs(node):
g.add_edge(node, nbr_id)
return g
def forw_bfs_subgraph(self, start_id):
"""
Creates and returns a subgraph consisting of the breadth first
reachable nodes based on their outgoing edges.
"""
return self._bfs_subgraph(start_id, forward=True)
def back_bfs_subgraph(self, start_id):
"""
Creates and returns a subgraph consisting of the breadth first
reachable nodes based on the incoming edges.
"""
return self._bfs_subgraph(start_id, forward=True)
def iterdfs(self, start, end=None, forward=True):
"""
Collecting nodes in some depth first traversal.
The forward parameter specifies whether it is a forward or backward
traversal.
"""
visited, stack = set([start]), deque([start])
if forward:
get_edges = self.out_edges
else:
get_edges = self.inc_edges
while stack:
curr_node = stack.pop()
yield curr_node
if curr_node == end:
break
for edge in get_edges(curr_node):
tail = self.tail(edge)
if tail not in visited:
visited.add(tail)
stack.append(tail)
def iterdata(self, start, end=None, forward=True, condition=None):
visited, stack = set([start]), deque([start])
if forward:
get_edges = self.out_edges
else:
get_edges = self.inc_edges
get_data = self.node_data
while stack:
curr_node = stack.pop()
curr_data = get_data(curr_node)
if curr_data is not None:
if condition is not None and not condition(curr_data):
continue
yield curr_data
if curr_node == end:
break
for edge in get_edges(curr_node):
tail = self.tail(edge)
if tail not in visited:
visited.add(tail)
stack.append(tail)
def _dfs(self, start, end=None, forward=True):
return list(self.iterdfs(start, end=end, forward=forward))
def _iterbfs(self, start, end=None, forward=True):
"""
Private method, collecting nodes in some breadth first traversal.
The forward parameter specifies whether it is a forward or backward
traversal. Returns a list of tuples where the first value is the hop
value the second value is the node id.
"""
queue, visited = deque([(start, 0)]), set([start])
# the direction of the bfs depends on the edges that are sampled
if forward:
get_edges = self.out_edges
else:
get_edges = self.inc_edges
while queue:
curr_node, curr_step = queue.popleft()
yield (curr_node, curr_step)
if curr_node == end:
break
for edge in get_edges(curr_node):
tail = self.tail(edge)
if tail not in visited:
visited.add(tail)
queue.append((tail, curr_step + 1))
def _bfs(self, start, end=None, forward=True):
return list(self._iterbfs(start, end=end, forward=forward))
def forw_bfs(self, start, end=None):
"""
Returns a list of nodes in some forward BFS order.
Starting from the start node the breadth first search proceeds along
outgoing edges.
"""
return [node for node, step in self._bfs(start, end, forward=True)]
def back_bfs(self, start, end=None):
"""
Returns a list of nodes in some backward BFS order.
Starting from the start node the breadth first search proceeds along
incoming edges.
"""
return [node for node, step in self._bfs(start, end, forward=False)]
def forw_dfs(self, start, end=None):
"""
Returns a list of nodes in some forward DFS order.
Starting with the start node the depth first search proceeds along
outgoing edges.
"""
return self._dfs(start, end, forward=True)
def back_dfs(self, start, end=None):
"""
Returns a list of nodes in some backward DFS order.
Starting from the start node the depth first search proceeds along
incoming edges.
"""
return self._dfs(start, end, forward=False)
def connected(self):
"""
Returns C{True} if the graph's every node can be reached from every
other node.
"""
node_list = self.node_list()
for node in node_list:
bfs_list = self.forw_bfs(node)
if len(bfs_list) != len(node_list):
return False
return True
def clust_coef(self, node):
"""
Computes and returns the clustering coefficient of node. The clustering
coeffcient is defined as ...
"""
num = 0
nbr_set = set(self.out_nbrs(node))
nbr_set.remove(node) # loop defense
for nbr in nbr_set:
sec_set = set(self.out_nbrs(nbr))
sec_set.remove(nbr) # loop defense
num += len(nbr_set & sec_set)
nbr_num = len(nbr_set)
if nbr_num:
clust_coef = float(num) / (nbr_num * (nbr_num - 1))
else:
clust_coef = 0.0
return clust_coef
def get_hops(self, start, end=None, forward=True):
"""
Computes the hop distance to all nodes centered around a specified node.
First order neighbours are at hop 1, their neigbours are at hop 2 etc.
Uses L{forw_bfs} or L{back_bfs} depending on the value of the forward
parameter. If the distance between all neighbouring nodes is 1 the hop
number corresponds to the shortest distance between the nodes.
@param start: the starting node
@param end: ending node (optional). When not specified will search the whole graph.
@param forward: directionality parameter (optional). If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}.
@return: returns a list of tuples where each tuple contains the node and the hop.
Typical usage::
>>> print graph.get_hops(1, 8)
>>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
# node 1 is at 0 hops
# node 2 is at 1 hop
# ...
# node 8 is at 5 hops
"""
if forward:
return self._bfs(start=start, end=end, forward=True)
else:
return self._bfs(start=start, end=end, forward=False)
| gpl-2.0 |
smcantab/pele | pele/potentials/gminpotential.py | 5 | 1703 | from pele.potentials import BasePotential
import numpy as np
__all__ = ["GMINPotential"]
class GMINPotential(BasePotential): # pragma: no cover
"""
Interface to fortran GMIN potential
Potentials implemented in GMIN can be called from python if GMIN is compiled with the flag WITH_PYTHON enabled. This creates
python modules (dynamic libraries). However, the interface is still very rough and GMINPotential provides a wrapper for
easy access to GMIN.
The imported GMIN module requires a data file to be present in the current directory. All parameters except for the ones
responsible to setup the potential will be ignored and can be skipped. The first call after importing the module should be
initialize.
Attributes
----------
GMIN :
reference to the gmin module
Examples
--------
The following example imports the GMIN python interface and evaluates the energy
>>> import gmin_
>>>
>>> gmin_.initialize() # finish gmin initialization
>>> pot = GMINPotential(gmin_)
>>>
>>> coords = pot.getCoords()
>>> pot.getEnergy(coords)
"""
def __init__(self, GMIN):
"""
Constructor
"""
self.GMIN = GMIN
self.ncalls = 0
def getEnergy(self, coords):
self.ncalls += 1
return self.GMIN.getEnergy(coords)
def getEnergyGradient(self, coords):
self.ncalls += 1
grad = np.zeros(3 * self.GMIN.getNAtoms())
E = self.GMIN.getEnergyGradient(coords, grad)
return E, grad[0:coords.size]
def getCoords(self):
coords = np.zeros(self.GMIN.getDOF())
self.GMIN.getCoords(coords)
return coords
| gpl-3.0 |
Lilywei123/tempest | tempest/api/compute/admin/test_flavors_extra_specs.py | 3 | 5162 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import test
class FlavorsExtraSpecsTestJSON(base.BaseV2ComputeAdminTest):
"""
Tests Flavor Extra Spec API extension.
SET, UNSET, UPDATE Flavor Extra specs require admin privileges.
GET Flavor Extra specs can be performed even by without admin privileges.
"""
@classmethod
def resource_setup(cls):
super(FlavorsExtraSpecsTestJSON, cls).resource_setup()
if not test.is_extension_enabled('OS-FLV-EXT-DATA', 'compute'):
msg = "OS-FLV-EXT-DATA extension not enabled."
raise cls.skipException(msg)
cls.client = cls.os_adm.flavors_client
flavor_name = data_utils.rand_name('test_flavor')
ram = 512
vcpus = 1
disk = 10
ephemeral = 10
cls.new_flavor_id = data_utils.rand_int_id(start=1000)
swap = 1024
rxtx = 1
# Create a flavor so as to set/get/unset extra specs
resp, cls.flavor = cls.client.create_flavor(flavor_name,
ram, vcpus,
disk,
cls.new_flavor_id,
ephemeral=ephemeral,
swap=swap, rxtx=rxtx)
@classmethod
def resource_cleanup(cls):
resp, body = cls.client.delete_flavor(cls.flavor['id'])
cls.client.wait_for_resource_deletion(cls.flavor['id'])
super(FlavorsExtraSpecsTestJSON, cls).resource_cleanup()
@test.attr(type='gate')
def test_flavor_set_get_update_show_unset_keys(self):
# Test to SET, GET, UPDATE, SHOW, UNSET flavor extra
# spec as a user with admin privileges.
# Assigning extra specs values that are to be set
specs = {"key1": "value1", "key2": "value2"}
# SET extra specs to the flavor created in setUp
set_resp, set_body = \
self.client.set_flavor_extra_spec(self.flavor['id'], specs)
self.assertEqual(set_resp.status, 200)
self.assertEqual(set_body, specs)
# GET extra specs and verify
get_resp, get_body = \
self.client.get_flavor_extra_spec(self.flavor['id'])
self.assertEqual(get_resp.status, 200)
self.assertEqual(get_body, specs)
# UPDATE the value of the extra specs key1
update_resp, update_body = \
self.client.update_flavor_extra_spec(self.flavor['id'],
"key1",
key1="value")
self.assertEqual(update_resp.status, 200)
self.assertEqual({"key1": "value"}, update_body)
# GET extra specs and verify the value of the key2
# is the same as before
get_resp, get_body = \
self.client.get_flavor_extra_spec(self.flavor['id'])
self.assertEqual(get_resp.status, 200)
self.assertEqual(get_body, {"key1": "value", "key2": "value2"})
# UNSET extra specs that were set in this test
unset_resp, _ = \
self.client.unset_flavor_extra_spec(self.flavor['id'], "key1")
self.assertEqual(unset_resp.status, 200)
unset_resp, _ = \
self.client.unset_flavor_extra_spec(self.flavor['id'], "key2")
self.assertEqual(unset_resp.status, 200)
@test.attr(type='gate')
def test_flavor_non_admin_get_all_keys(self):
specs = {"key1": "value1", "key2": "value2"}
set_resp, set_body = self.client.set_flavor_extra_spec(
self.flavor['id'], specs)
resp, body = self.flavors_client.get_flavor_extra_spec(
self.flavor['id'])
self.assertEqual(resp.status, 200)
for key in specs:
self.assertEqual(body[key], specs[key])
@test.attr(type='gate')
def test_flavor_non_admin_get_specific_key(self):
specs = {"key1": "value1", "key2": "value2"}
resp, body = self.client.set_flavor_extra_spec(
self.flavor['id'], specs)
self.assertEqual(resp.status, 200)
self.assertEqual(body['key1'], 'value1')
self.assertIn('key2', body)
resp, body = self.flavors_client.get_flavor_extra_spec_with_key(
self.flavor['id'], 'key1')
self.assertEqual(resp.status, 200)
self.assertEqual(body['key1'], 'value1')
self.assertNotIn('key2', body)
| apache-2.0 |
openstack/python-muranoclient | doc/source/conf.py | 1 | 2827 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'openstackdocstheme',]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'OpenStack Foundation'
exclude_trees = ['api']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'openstackdocs'
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['_theme']
#html_theme_path = [openstackdocstheme.get_html_theme_path()]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/python-muranoclient'
openstackdocs_bug_project = 'python-muranoclient'
openstackdocs_bug_tag = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-muranoclientdoc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
(
'index',
'python-muranoclient.tex',
u'python-muranoclient Documentation',
u'OpenStack Foundation',
'manual'
),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 |
betoesquivel/fil2014 | build/django/build/lib.linux-x86_64-2.7/django/templatetags/future.py | 130 | 1640 | from django.template import Library
from django.template import defaulttags
register = Library()
@register.tag
def ssi(parser, token):
# Used for deprecation path during 1.3/1.4, will be removed in 2.0
return defaulttags.ssi(parser, token)
@register.tag
def url(parser, token):
# Used for deprecation path during 1.3/1.4, will be removed in 2.0
return defaulttags.url(parser, token)
@register.tag
def cycle(parser, token):
"""
This is the future version of `cycle` with auto-escaping.
By default all strings are escaped.
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% cycle var1 var2 var3 as somecycle %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% cycle var1 var2|safe var3|safe as somecycle %}
"""
return defaulttags.cycle(parser, token, escape=True)
@register.tag
def firstof(parser, token):
"""
This is the future version of `firstof` with auto-escaping.
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
return defaulttags.firstof(parser, token, escape=True)
| mit |
joopert/home-assistant | homeassistant/components/brunt/cover.py | 4 | 5263 | """Support for Brunt Blind Engine covers."""
import logging
from brunt import BruntAPI
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_POSITION,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverDevice,
)
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
COVER_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
DEVICE_CLASS = "window"
ATTR_REQUEST_POSITION = "request_position"
NOTIFICATION_ID = "brunt_notification"
NOTIFICATION_TITLE = "Brunt Cover Setup"
ATTRIBUTION = "Based on an unofficial Brunt SDK."
CLOSED_POSITION = 0
OPEN_POSITION = 100
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the brunt platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
bapi = BruntAPI(username=username, password=password)
try:
things = bapi.getThings()["things"]
if not things:
_LOGGER.error("No things present in account.")
else:
add_entities(
[
BruntDevice(bapi, thing["NAME"], thing["thingUri"])
for thing in things
],
True,
)
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
class BruntDevice(CoverDevice):
"""
Representation of a Brunt cover device.
Contains the common logic for all Brunt devices.
"""
def __init__(self, bapi, name, thing_uri):
"""Init the Brunt device."""
self._bapi = bapi
self._name = name
self._thing_uri = thing_uri
self._state = {}
self._available = None
@property
def name(self):
"""Return the name of the device as reported by tellcore."""
return self._name
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def current_cover_position(self):
"""
Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("currentPosition")
return int(pos) if pos else None
@property
def request_cover_position(self):
"""
Return request position of cover.
The request position is the position of the last request
to Brunt, at times there is a diff of 1 to current
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("requestPosition")
return int(pos) if pos else None
@property
def move_state(self):
"""
Return current moving state of cover.
None is unknown, 0 when stopped, 1 when opening, 2 when closing
"""
mov = self._state.get("moveState")
return int(mov) if mov else None
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self.move_state == 1
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self.move_state == 2
@property
def device_state_attributes(self):
"""Return the detailed device state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_REQUEST_POSITION: self.request_cover_position,
}
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS
@property
def supported_features(self):
"""Flag supported features."""
return COVER_FEATURES
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return self.current_cover_position == CLOSED_POSITION
def update(self):
"""Poll the current state of the device."""
try:
self._state = self._bapi.getState(thingUri=self._thing_uri).get("thing")
self._available = True
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
self._available = False
def open_cover(self, **kwargs):
"""Set the cover to the open position."""
self._bapi.changeRequestPosition(OPEN_POSITION, thingUri=self._thing_uri)
def close_cover(self, **kwargs):
"""Set the cover to the closed position."""
self._bapi.changeRequestPosition(CLOSED_POSITION, thingUri=self._thing_uri)
def set_cover_position(self, **kwargs):
"""Set the cover to a specific position."""
self._bapi.changeRequestPosition(
kwargs[ATTR_POSITION], thingUri=self._thing_uri
)
| apache-2.0 |
StamusNetworks/scirius | setup.py | 1 | 1133 | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='scirius',
version='2.0',
packages=['scirius','rules','suricata', 'accounts', 'viz'],
scripts=['manage.py'],
include_package_data=True,
description='A web interface to manage Suricata rulesets',
long_description=README,
url='https://www.stamus-networks.com/open-source/#scirius',
author='Eric Leblond',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| gpl-3.0 |
cfriedt/gnuradio | gr-digital/python/digital/ofdm_txrx.py | 27 | 20975 | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
OFDM Transmitter / Receiver hier blocks.
For simple configurations, no need to connect all the relevant OFDM blocks
to form an OFDM Tx/Rx--simply use these.
"""
# Reminder: All frequency-domain stuff is in shifted form, i.e. DC carrier
# in the middle!
import numpy
from gnuradio import gr
import digital_swig as digital
from utils import tagged_streams
try:
# This will work when feature #505 is added.
from gnuradio import fft
from gnuradio import blocks
from gnuradio import analog
except ImportError:
# Until then this will work.
import fft_swig as fft
import blocks_swig as blocks
import analog_swig as analog
_def_fft_len = 64
_def_cp_len = 16
_def_frame_length_tag_key = "frame_length"
_def_packet_length_tag_key = "packet_length"
_def_packet_num_tag_key = "packet_num"
# Data and pilot carriers are same as in 802.11a
_def_occupied_carriers = (range(-26, -21) + range(-20, -7) + range(-6, 0) + range(1, 7) + range(8, 21) + range(22, 27),)
_def_pilot_carriers=((-21, -7, 7, 21,),)
_pilot_sym_scramble_seq = (
1,1,1,1, -1,-1,-1,1, -1,-1,-1,-1, 1,1,-1,1, -1,-1,1,1, -1,1,1,-1, 1,1,1,1, 1,1,-1,1,
1,1,-1,1, 1,-1,-1,1, 1,1,-1,1, -1,-1,-1,1, -1,1,-1,-1, 1,-1,-1,1, 1,1,1,1, -1,-1,1,1,
-1,-1,1,-1, 1,-1,1,1, -1,-1,-1,1, 1,-1,-1,-1, -1,1,-1,-1, 1,-1,1,1, 1,1,-1,1, -1,1,-1,1,
-1,-1,-1,-1, -1,1,-1,1, 1,-1,1,-1, 1,1,1,-1, -1,1,-1,-1, -1,1,1,1, -1,-1,-1,-1, -1,-1,-1
)
_def_pilot_symbols= tuple([(x, x, x, -x) for x in _pilot_sym_scramble_seq])
_seq_seed = 42
def _get_active_carriers(fft_len, occupied_carriers, pilot_carriers):
""" Returns a list of all carriers that at some point carry data or pilots. """
active_carriers = list()
for carrier in list(occupied_carriers[0]) + list(pilot_carriers[0]):
if carrier < 0:
carrier += fft_len
active_carriers.append(carrier)
return active_carriers
def _make_sync_word1(fft_len, occupied_carriers, pilot_carriers):
""" Creates a random sync sequence for fine frequency offset and timing
estimation. This is the first of typically two sync preamble symbols
for the Schmidl & Cox sync algorithm.
The relevant feature of this symbols is that every second sub-carrier
is zero. In the time domain, this results in two identical halves of
the OFDM symbols.
Symbols are always BPSK symbols. Carriers are scaled by sqrt(2) to keep
total energy constant.
Carrier 0 (DC carrier) is always zero. If used, carrier 1 is non-zero.
This means the sync algorithm has to check on odd carriers!
"""
active_carriers = _get_active_carriers(fft_len, occupied_carriers, pilot_carriers)
numpy.random.seed(_seq_seed)
bpsk = {0: numpy.sqrt(2), 1: -numpy.sqrt(2)}
sw1 = [bpsk[numpy.random.randint(2)] if x in active_carriers and x % 2 else 0 for x in range(fft_len)]
return numpy.fft.fftshift(sw1)
def _make_sync_word2(fft_len, occupied_carriers, pilot_carriers):
""" Creates a random sync sequence for coarse frequency offset and channel
estimation. This is the second of typically two sync preamble symbols
for the Schmidl & Cox sync algorithm.
Symbols are always BPSK symbols.
"""
active_carriers = _get_active_carriers(fft_len, occupied_carriers, pilot_carriers)
numpy.random.seed(_seq_seed)
bpsk = {0: 1, 1: -1}
sw2 = [bpsk[numpy.random.randint(2)] if x in active_carriers else 0 for x in range(fft_len)]
sw2[0] = 0j
return numpy.fft.fftshift(sw2)
def _get_constellation(bps):
""" Returns a modulator block for a given number of bits per symbol """
constellation = {
1: digital.constellation_bpsk(),
2: digital.constellation_qpsk(),
3: digital.constellation_8psk()
}
try:
return constellation[bps]
except KeyError:
print 'Modulation not supported.'
exit(1)
class ofdm_tx(gr.hier_block2):
"""Hierarchical block for OFDM modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
Args:
fft_len: The length of FFT (integer).
cp_len: The length of cyclic prefix in total samples (integer).
packet_length_tag_key: The name of the tag giving packet length at the input.
occupied_carriers: A vector of vectors describing which OFDM carriers are occupied.
pilot_carriers: A vector of vectors describing which OFDM carriers are occupied with pilot symbols.
pilot_symbols: The pilot symbols.
bps_header: Bits per symbol (header).
bps_payload: Bits per symbol (payload).
sync_word1: The first sync preamble symbol. This has to be with
| zeros on alternating carriers. Used for fine and
| coarse frequency offset and timing estimation.
sync_word2: The second sync preamble symbol. This has to be filled
| entirely. Also used for coarse frequency offset and
| channel estimation.
rolloff: The rolloff length in samples. Must be smaller than the CP.
debug_log: Write output into log files (Warning: creates lots of data!)
scramble_bits: Activates the scramblers (set this to True unless debugging)
"""
def __init__(self, fft_len=_def_fft_len, cp_len=_def_cp_len,
packet_length_tag_key=_def_packet_length_tag_key,
occupied_carriers=_def_occupied_carriers,
pilot_carriers=_def_pilot_carriers,
pilot_symbols=_def_pilot_symbols,
bps_header=1,
bps_payload=1,
sync_word1=None,
sync_word2=None,
rolloff=0,
debug_log=False,
scramble_bits=False
):
gr.hier_block2.__init__(self, "ofdm_tx",
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(1, 1, gr.sizeof_gr_complex))
### Param init / sanity check ########################################
self.fft_len = fft_len
self.cp_len = cp_len
self.packet_length_tag_key = packet_length_tag_key
self.occupied_carriers = occupied_carriers
self.pilot_carriers = pilot_carriers
self.pilot_symbols = pilot_symbols
self.bps_header = bps_header
self.bps_payload = bps_payload
self.sync_word1 = sync_word1
if sync_word1 is None:
self.sync_word1 = _make_sync_word1(fft_len, occupied_carriers, pilot_carriers)
else:
if len(sync_word1) != self.fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_words = [self.sync_word1,]
if sync_word2 is None:
self.sync_word2 = _make_sync_word2(fft_len, occupied_carriers, pilot_carriers)
else:
self.sync_word2 = sync_word2
if len(self.sync_word2):
if len(self.sync_word2) != fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word2 = list(self.sync_word2)
self.sync_words.append(self.sync_word2)
if scramble_bits:
self.scramble_seed = 0x7f
else:
self.scramble_seed = 0x00 # We deactivate the scrambler by init'ing it with zeros
### Header modulation ################################################
crc = digital.crc32_bb(False, self.packet_length_tag_key)
header_constellation = _get_constellation(bps_header)
header_mod = digital.chunks_to_symbols_bc(header_constellation.points())
formatter_object = digital.packet_header_ofdm(
occupied_carriers=occupied_carriers, n_syms=1,
bits_per_header_sym=self.bps_header,
bits_per_payload_sym=self.bps_payload,
scramble_header=scramble_bits
)
header_gen = digital.packet_headergenerator_bb(formatter_object.base(), self.packet_length_tag_key)
header_payload_mux = blocks.tagged_stream_mux(
itemsize=gr.sizeof_gr_complex*1,
lengthtagname=self.packet_length_tag_key,
tag_preserve_head_pos=1 # Head tags on the payload stream stay on the head
)
self.connect(
self,
crc,
header_gen,
header_mod,
(header_payload_mux, 0)
)
if debug_log:
self.connect(header_gen, blocks.file_sink(1, 'tx-hdr.dat'))
### Payload modulation ###############################################
payload_constellation = _get_constellation(bps_payload)
payload_mod = digital.chunks_to_symbols_bc(payload_constellation.points())
payload_scrambler = digital.additive_scrambler_bb(
0x8a,
self.scramble_seed,
7,
0, # Don't reset after fixed length (let the reset tag do that)
bits_per_byte=8, # This is before unpacking
reset_tag_key=self.packet_length_tag_key
)
payload_unpack = blocks.repack_bits_bb(
8, # Unpack 8 bits per byte
bps_payload,
self.packet_length_tag_key
)
self.connect(
crc,
payload_scrambler,
payload_unpack,
payload_mod,
(header_payload_mux, 1)
)
### Create OFDM frame ################################################
allocator = digital.ofdm_carrier_allocator_cvc(
self.fft_len,
occupied_carriers=self.occupied_carriers,
pilot_carriers=self.pilot_carriers,
pilot_symbols=self.pilot_symbols,
sync_words=self.sync_words,
len_tag_key=self.packet_length_tag_key
)
ffter = fft.fft_vcc(
self.fft_len,
False, # Inverse FFT
(), # No window
True # Shift
)
cyclic_prefixer = digital.ofdm_cyclic_prefixer(
self.fft_len,
self.fft_len+self.cp_len,
rolloff,
self.packet_length_tag_key
)
self.connect(header_payload_mux, allocator, ffter, cyclic_prefixer, self)
if debug_log:
self.connect(allocator, blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'tx-post-allocator.dat'))
self.connect(cyclic_prefixer, blocks.file_sink(gr.sizeof_gr_complex, 'tx-signal.dat'))
class ofdm_rx(gr.hier_block2):
"""Hierarchical block for OFDM demodulation.
The input is a complex baseband signal (e.g. from a UHD source).
The detected packets are output as a stream of packed bits on the output.
Args:
fft_len: The length of FFT (integer).
cp_len: The length of cyclic prefix in total samples (integer).
frame_length_tag_key: Used internally to tag the length of the OFDM frame.
packet_length_tag_key: The name of the tag giving packet length at the input.
occupied_carriers: A vector of vectors describing which OFDM carriers are occupied.
pilot_carriers: A vector of vectors describing which OFDM carriers are occupied with pilot symbols.
pilot_symbols: The pilot symbols.
bps_header: Bits per symbol (header).
bps_payload: Bits per symbol (payload).
sync_word1: The first sync preamble symbol. This has to be with
| zeros on alternating carriers. Used for fine and
| coarse frequency offset and timing estimation.
sync_word2: The second sync preamble symbol. This has to be filled
| entirely. Also used for coarse frequency offset and
| channel estimation.
"""
def __init__(self, fft_len=_def_fft_len, cp_len=_def_cp_len,
frame_length_tag_key=_def_frame_length_tag_key,
packet_length_tag_key=_def_packet_length_tag_key,
packet_num_tag_key=_def_packet_num_tag_key,
occupied_carriers=_def_occupied_carriers,
pilot_carriers=_def_pilot_carriers,
pilot_symbols=_def_pilot_symbols,
bps_header=1,
bps_payload=1,
sync_word1=None,
sync_word2=None,
debug_log=False,
scramble_bits=False
):
gr.hier_block2.__init__(self, "ofdm_rx",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_char))
### Param init / sanity check ########################################
self.fft_len = fft_len
self.cp_len = cp_len
self.frame_length_tag_key = frame_length_tag_key
self.packet_length_tag_key = packet_length_tag_key
self.occupied_carriers = occupied_carriers
self.bps_header = bps_header
self.bps_payload = bps_payload
n_sync_words = 1
if sync_word1 is None:
self.sync_word1 = _make_sync_word1(fft_len, occupied_carriers, pilot_carriers)
else:
if len(sync_word1) != self.fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word1 = sync_word1
self.sync_word2 = ()
if sync_word2 is None:
self.sync_word2 = _make_sync_word2(fft_len, occupied_carriers, pilot_carriers)
n_sync_words = 2
elif len(sync_word2):
if len(sync_word2) != fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word2 = sync_word2
n_sync_words = 2
if scramble_bits:
self.scramble_seed = 0x7f
else:
self.scramble_seed = 0x00 # We deactivate the scrambler by init'ing it with zeros
### Sync ############################################################
sync_detect = digital.ofdm_sync_sc_cfb(fft_len, cp_len)
delay = blocks.delay(gr.sizeof_gr_complex, fft_len+cp_len)
oscillator = analog.frequency_modulator_fc(-2.0 / fft_len)
mixer = blocks.multiply_cc()
hpd = digital.header_payload_demux(
n_sync_words+1, # Number of OFDM symbols before payload (sync + 1 sym header)
fft_len, cp_len, # FFT length, guard interval
frame_length_tag_key, # Frame length tag key
"", # We're not using trigger tags
True # One output item is one OFDM symbol (False would output complex scalars)
)
self.connect(self, sync_detect)
self.connect(self, delay, (mixer, 0), (hpd, 0))
self.connect((sync_detect, 0), oscillator, (mixer, 1))
self.connect((sync_detect, 1), (hpd, 1))
if debug_log:
self.connect((sync_detect, 0), blocks.file_sink(gr.sizeof_float, 'freq-offset.dat'))
self.connect((sync_detect, 1), blocks.file_sink(gr.sizeof_char, 'sync-detect.dat'))
### Header demodulation ##############################################
header_fft = fft.fft_vcc(self.fft_len, True, (), True)
chanest = digital.ofdm_chanest_vcvc(self.sync_word1, self.sync_word2, 1)
header_constellation = _get_constellation(bps_header)
header_equalizer = digital.ofdm_equalizer_simpledfe(
fft_len,
header_constellation.base(),
occupied_carriers,
pilot_carriers,
pilot_symbols,
symbols_skipped=0,
)
header_eq = digital.ofdm_frame_equalizer_vcvc(
header_equalizer.base(),
cp_len,
self.frame_length_tag_key,
True,
1 # Header is 1 symbol long
)
header_serializer = digital.ofdm_serializer_vcc(
fft_len, occupied_carriers,
self.frame_length_tag_key
)
header_demod = digital.constellation_decoder_cb(header_constellation.base())
header_formatter = digital.packet_header_ofdm(
occupied_carriers, 1,
packet_length_tag_key,
frame_length_tag_key,
packet_num_tag_key,
bps_header,
bps_payload,
scramble_header=scramble_bits
)
header_parser = digital.packet_headerparser_b(header_formatter.formatter())
self.connect(
(hpd, 0),
header_fft,
chanest,
header_eq,
header_serializer,
header_demod,
header_parser
)
self.msg_connect(header_parser, "header_data", hpd, "header_data")
if debug_log:
self.connect((chanest, 1), blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'channel-estimate.dat'))
self.connect((chanest, 0), blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'post-hdr-chanest.dat'))
self.connect((chanest, 0), blocks.tag_debug(gr.sizeof_gr_complex * fft_len, 'post-hdr-chanest'))
self.connect(header_eq, blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'post-hdr-eq.dat'))
self.connect(header_serializer, blocks.file_sink(gr.sizeof_gr_complex, 'post-hdr-serializer.dat'))
self.connect(header_descrambler, blocks.file_sink(1, 'post-hdr-demod.dat'))
### Payload demod ####################################################
payload_fft = fft.fft_vcc(self.fft_len, True, (), True)
payload_constellation = _get_constellation(bps_payload)
payload_equalizer = digital.ofdm_equalizer_simpledfe(
fft_len,
payload_constellation.base(),
occupied_carriers,
pilot_carriers,
pilot_symbols,
symbols_skipped=1, # (that was already in the header)
alpha=0.1
)
payload_eq = digital.ofdm_frame_equalizer_vcvc(
payload_equalizer.base(),
cp_len,
self.frame_length_tag_key
)
payload_serializer = digital.ofdm_serializer_vcc(
fft_len, occupied_carriers,
self.frame_length_tag_key,
self.packet_length_tag_key,
1 # Skip 1 symbol (that was already in the header)
)
payload_demod = digital.constellation_decoder_cb(payload_constellation.base())
self.payload_descrambler = digital.additive_scrambler_bb(
0x8a,
self.scramble_seed,
7,
0, # Don't reset after fixed length
bits_per_byte=8, # This is after packing
reset_tag_key=self.packet_length_tag_key
)
payload_pack = blocks.repack_bits_bb(bps_payload, 8, self.packet_length_tag_key, True)
self.crc = digital.crc32_bb(True, self.packet_length_tag_key)
self.connect(
(hpd, 1),
payload_fft,
payload_eq,
payload_serializer,
payload_demod,
payload_pack,
self.payload_descrambler,
self.crc,
self
)
if debug_log:
self.connect((hpd, 1), blocks.tag_debug(gr.sizeof_gr_complex*fft_len, 'post-hpd'))
self.connect(payload_fft, blocks.file_sink(gr.sizeof_gr_complex*fft_len, 'post-payload-fft.dat'))
self.connect(payload_eq, blocks.file_sink(gr.sizeof_gr_complex*fft_len, 'post-payload-eq.dat'))
self.connect(payload_serializer, blocks.file_sink(gr.sizeof_gr_complex, 'post-payload-serializer.dat'))
self.connect(payload_demod, blocks.file_sink(1, 'post-payload-demod.dat'))
self.connect(payload_pack, blocks.file_sink(1, 'post-payload-pack.dat'))
self.connect(crc, blocks.file_sink(1, 'post-payload-crc.dat'))
| gpl-3.0 |
nspierbundel/amlm3-3.x | tools/perf/python/twatch.py | 7370 | 1334 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <[email protected]>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
wooyek/nuntio | web/plebe/template.py | 1 | 3070 | # -*- coding: utf-8 -*-
# Copyright 2008 Janusz Skonieczny
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is a set of utilities for faster development with Django templates.
Template loaders that load from the app/template/model directories
'templates' folder when you specify an app prefix ('app/template.html').
It's possible to register global template libraries by adding this to your
settings:
GLOBALTAGS = (
'myapp.templatetags.cooltags',
)
"""
from django.conf import settings
from django.http import HttpResponse
from django.template import RequestContext, add_to_builtins, loader, TemplateDoesNotExist
from django.utils.functional import Promise
from django.utils.encoding import force_unicode
from django.utils import simplejson
from ragendja.apputils import get_app_dirs
import os, logging
def get_template_sources(template_name, template_dirs=None):
""" Returs a collection of paths used to load templates in this module """
packed = template_name.split('/', 1)
if len(packed) == 2 and packed[0] in app_template_dirs:
model_prefixed = packed[1].split('_',1)
generic_path = os.path.join(app_template_dirs[packed[0]], model_prefixed[1])
model_prefixed = os.path.join(*model_prefixed)
model_path = os.path.join(app_template_dirs[packed[0]], model_prefixed)
return [model_path, generic_path]
return []
def app_model_templates_loader(template_name, template_dirs=None):
"""
Loader for model dependent templates stored in model named
directories, app/templates/<model_name>/form.html and generic
templates fallback app/templates/form.html.
The following defines a template loader that loads templates from a specific
app based on the prefix of the template path:
get_template("app/<model_name>_template.html") => app/templates/<model_name>/template.html
if not found, will try generic template
get_template("app/<model_name>_template.html") => app/templates/template.html
This keeps the code DRY and prevents name clashes.
"""
for path in get_template_sources(template_name, template_dirs):
logging.debug("Looking for tempalte: %s" % path)
try:
return (open(path).read().decode(settings.FILE_CHARSET), path)
except IOError:
pass
raise TemplateDoesNotExist, template_name
app_model_templates_loader.is_usable = True
# This is needed by app_prefixed_loader.
app_template_dirs = get_app_dirs('templates') | mit |
zmarvel/playground | sound/testplay.py | 1 | 3152 | import alsaaudio
from math import pi, sin, pow
import getch
SAMPLE_RATE = 44100
FORMAT = alsaaudio.PCM_FORMAT_U8
PERIOD_SIZE = 512
N_SAMPLES = 1024
notes = "abcdefg"
frequencies = {}
for i, note in enumerate(notes):
frequencies[note] = 440 * pow(pow(2, 1/2), i)
# Generate the sine wave, centered at y=128 with 1024 samples
sine_wave = [int(sin(x * 2*pi/N_SAMPLES) * 127) for x in range(0, N_SAMPLES)]
square_wave = []
sawtooth_wave = []
triangle_wave = []
for i in range(0, N_SAMPLES):
phase = (i * 2*pi / N_SAMPLES) % 2*pi
if phase < pi:
square_wave.append(127)
else:
square_wave.append(-128)
sawtooth_wave.append(int(127 - (127 // pi * phase)))
if phase < pi:
triangle_wave.append(int(-127 + (2 * 127 * phase // pi)))
else:
triangle_wave.append(int(3 * 127 - (2 * 127 * phase // pi)))
def main():
buf = bytearray(PERIOD_SIZE)
# alsaaudio setup
dev = alsaaudio.PCM(type=alsaaudio.PCM_PLAYBACK)
dev.setchannels(1)
dev.setrate(SAMPLE_RATE)
dev.setformat(FORMAT)
dev.setperiodsize(PERIOD_SIZE)
#load_buf(buf, 440)
f = 440
w_half = [x//2 + 128 for x in make_wave(sine_wave, f)]
#w_o1 = [x//4 for x in make_wave(f*2)]
#w_o2 = [x//6 for x in make_wave(f*3)]
#w_o3 = [x//8 for x in make_wave(f*4)]
#w_o4 = [x//10 for x in make_wave(f*5)]
#w_o4 = [x//12 for x in make_wave(f*6)]
#w_o5 = [x//14 for x in make_wave(f*7)]
#w_o6 = [x//16 for x in make_wave(f*8)]
#for i, samp in enumerate(w_o1):
# w[i] += samp + w_o2[i] + w_o3[i] + w_o4[i] + w_o5[i] + w_o6[i] + 128
# print(w[i])
#buf = bytearray(w)
#for i, samp in enumerate(w):
# if samp > 0:
# samp = 127
# else:
# samp = -128
w = [x + 128 for x in make_wave(square_wave, 440)]
buf = bytearray(w)
char = getch.getch()
last = 'q'
while char != 'q':
if char != last:
if char == '1':
w = [x//2 + 128 for x in make_wave(sine_wave, 440)]
buf = bytearray(w)
elif char == '2':
w = [x//2 + 128 for x in make_wave(square_wave, 440)]
buf = bytearray(w)
elif char == '3':
w = [x//2 + 128 for x in make_wave(sawtooth_wave, 440)]
buf = bytearray(w)
elif char == '4':
w = [x//2 + 128 for x in make_wave(triangle_wave, 440)]
buf = bytearray(w)
elif char == '5':
buf = bytearray(w_half)
dev.write(buf)
dev.write(buf)
dev.write(buf)
last = char
char = getch.getch()
return 0
#def load_buf(buf, frequency):
# step = N_SAMPLES * frequency // SAMPLE_RATE
# for i in range(0, PERIOD_SIZE):
# buf[i] = wave[(step * i * N_SAMPLES // PERIOD_SIZE) % N_SAMPLES]
# return buf
def make_wave(wave, frequency):
step = N_SAMPLES * frequency // SAMPLE_RATE
w = []
for i in range(0, PERIOD_SIZE):
w.append(wave[(step * i * N_SAMPLES // PERIOD_SIZE) % N_SAMPLES])
return w
if __name__ == '__main__':
main()
| mit |
imageboards/Orphereus | Orphereus/controllers/Orphie_Public.py | 1 | 12982 | # -*- coding: utf-8 -*-
################################################################################
# Copyright (C) 2009 Johan Liebert, Mantycore, Hedger, Rusanon #
# < [email protected] ; http://orphereus.anoma.ch > #
# #
# This file is part of Orphereus, an imageboard engine. #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. #
################################################################################
import logging
from Orphereus.lib.base import *
from Orphereus.model import *
from sqlalchemy.orm import eagerload
import os
import datetime
from Orphereus.lib.miscUtils import *
from Orphereus.lib.constantValues import *
from OrphieBaseController import OrphieBaseController
log = logging.getLogger(__name__)
class OrphiePublicController(OrphieBaseController):
def __before__(self):
OrphieBaseController.__before__(self)
c.title = g.OPT.title
if g.OPT.refControlEnabled:
ref = request.headers.get('REFERER', False)
if ref:
ref = filterText(ref)
if ref:
rickroll = True
for rc in g.OPT.refControlList:
if rc in ref:
rickroll = False
if (rickroll):
redir = g.OPT.fakeLinks[random.randint(0, len(g.OPT.fakeLinks) - 1)]
toLog(LOG_EVENT_RICKROLLD, "Request rickrolld. Referer: %s, Redir: %s, IP: %s, User-Agent: %s" % (ref, redir, getUserIp(), filterText(request.headers.get('User-Agent', '?'))))
redirect_to(str(redir))
if (self.userInst and self.userInst.isValid()) or g.OPT.allowAnonymous:
self.initEnvironment()
else:
self.setCookie()
def ipBanned(self):
if c.ban:
return self.error(_('You are banned on %s for %s days for the following reason:<br/>%s') % (c.ban.date, c.ban.period, c.ban.reason))
else:
return self.error(_("ORLY?"))
def login(self, user):
if g.OPT.allowLogin:
session['uidNumber'] = user.uidNumber
session.save()
else:
self.logout()
def logout(self):
session.clear()
session.save()
session.delete()
redirect_to('boardBase')
def captchaPic(self, cid):
# TODO: fix shitty code
#log.debug('user cap lang: %s' %c.userInst.cLang)
self.setLang(True)
"""
sessionCid = None
if session.has_key('anonCaptId'):
sessionCid = session['anonCaptId']
if session.has_key('cid'):
sessionCid = session['cid']
"""
pic = Captcha.picture(cid, g.OPT.captchaFont)
"""
if sessionCid:
log.debug("%s:%s" % (str(cid), str(sessionCid)))
if (str(cid) != str(sessionCid)):
redirect_to('captcha', cid = sessionCid)
"""
if ("Wrong ID" == pic):
newCaptcha = Captcha.create()
session['anonCaptId'] = newCaptcha.id
session.save()
redirect_to('captcha', cid = newCaptcha.id)
response.headers['Content-Length'] = len(pic)
response.headers['Content-Type'] = 'image/png'
return str(pic)
def authorize(self, url):
if url:
c.currentURL = u'/%s' % url #.encode('utf-8')
else:
c.currentURL = u''
if not g.OPT.allowLogin:
return self.error(_("Authorization disabled"))
ip = getUserIp()
tracker = LoginTracker.getTracker(ip)
captchaOk = True
captcha = False
if tracker.attempts >= 2:
if session and session.has_key('anonCaptId'):
anonCapt = Captcha.getCaptcha(session['anonCaptId'])
if tracker.cid and (str(tracker.cid) != str(anonCapt.id)):
trackerCapt = Captcha.getCaptcha(tracker.cid)
if trackerCapt:
trackerCapt.delete()
tracker.cid = anonCapt.id
meta.Session.commit()
c.showCaptcha = True
captchaOk = False
if tracker.cid:
captcha = Captcha.getCaptcha(tracker.cid)
if not captcha:
if c.userInst.isValid():
oldLang = h.setLang(self.userInst.cLang)
captcha = Captcha.create()
if c.userInst.isValid():
h.setLang(oldLang)
tracker.cid = captcha.id
meta.Session.commit()
c.captcha = Captcha.getCaptcha(tracker.cid)
if request.POST.get('code', False):
code = User.genUid(request.POST['code'].encode('utf-8'))
user = User.getByUid(code)
#log.debug("code: %s user: %s",code,str(user))
captid = request.POST.get('captid', False)
captval = request.POST.get('captcha', False)
#log.debug("got: %s:%s" %(captid, captval))
if (not captchaOk) and captid and captval and isNumber(captid):
if captcha and int(captid) == captcha.id:
captchaOk = captcha.test(captval)
captcha = False
if not captchaOk:
if c.userInst.isValid():
oldLang = h.setLang(self.userInst.cLang)
captcha = Captcha.create()
if c.userInst.isValid():
h.setLang(oldLang)
tracker.cid = captcha.id
if user and captchaOk:
if tracker:
tracker.delete()
if captcha:
captcha.delete()
self.login(user)
c.loginSuccessful = True
else:
tracker.attempts += 1
tracker.lastAttempt = datetime.datetime.now()
meta.Session.commit()
#log.debug("redir: %s" % c.currentURL)
if (not g.OPT.framedMain or (user and not(user.useFrame))): # (1) frame turned off
if (g.OPT.allowAnonymous): # (1.1) remove navigation frame if exists
c.proceedRedirect = True
c.frameEnabled = False
return self.render('loginRedirect')
else: # (1.2) frame is impossible
return redirect_to('boardBase', board = c.currentURL)
else: # (2) frame turned on
if (g.OPT.allowAnonymous and not g.OPT.obligatoryFrameCreation):
# (2.1) change navigation frame location if exists. DON'T create frame!
c.proceedRedirect = True
c.frameEnabled = True
return self.render('loginRedirect')
else: # (2.2) create new frame with correct target.
if c.currentURL:
return redirect_to('boardBase', frameTarget = c.currentURL)
else:
return redirect_to('boardBase')
c.boardName = _('Login')
return self.render('login')
def register(self, invite):
if 'invite' not in session:
iid = Invite.getId(invite)
if iid:
session['invite'] = invite
session['iid'] = iid
session['openReg'] = False
session.save()
elif g.OPT.allowRegistration:
session['invite'] = invite
session['iid'] = False
session['openReg'] = True
session.save()
else:
c.currentURL = u''
return self.render('login')
c.openReg = session['openReg']
c.captcha = None
captchaOk = True
if session['openReg']:
captchaOk = False
if session.get('cid', False):
captcha = Captcha.getCaptcha(session['cid'])
if captcha:
captchaOk = captcha.test(request.POST.get('captcha', False))
session['cid'] = None
session.save()
if not captchaOk:
captcha = Captcha.create()
session['cid'] = captcha.id
session.save()
c.captcha = captcha
key = request.POST.get('key', '').encode('utf-8')
key2 = request.POST.get('key2', '').encode('utf-8')
if key and captchaOk:
if len(key) >= g.OPT.minPassLength and key == key2:
uid = User.genUid(key)
user = User.getByUid(uid)
if user:
user.ban(7777, _("Your Security Code was used during registration by another user. Contact administrator immediately please."), -1)
del session['invite']
del session['iid']
return self.error(_("You entered already existing password. Previous account was banned. Contact administrator please."))
user = User.create(uid)
regId = user.secid() * user.secid() - user.secid()
toLog(LOG_EVENT_INVITE_USED, _("Utilized invite #%d [RID:%d]") % (session['iid'], regId))
del session['invite']
del session['iid']
session.save()
self.login(user)
redirect_to('boardBase', board = '!')
c.boardName = _('Register')
return self.render('register')
def banned(self):
c.userInst = self.userInst
if self.userInst.isValid() and self.userInst.isBanned():
c.boardName = _('Banned')
return self.render('banned')
else:
return self.error(_("ORLY?"))
def UnknownAction(self):
c.userInst = self.userInst
return self.error(_("Excuse me, WTF are you?"))
def saveUploaded(self, expandedName, content):
localFilePath = os.path.join(g.OPT.uploadPath, expandedName)
targetDir = os.path.dirname(localFilePath)
if not os.path.exists(targetDir):
os.makedirs(targetDir)
localFile = open(localFilePath, 'wb')
localFile.write(content)
localFile.close()
def oekakiSave(self, environ, start_response, url, tempid):
start_response('200 OK', [('Content-Type', 'text/plain'), ('Content-Length', '2')])
oekaki = Oekaki.get(tempid)
cl = int(request.environ['CONTENT_LENGTH'])
if oekaki and cl:
id = request.environ['wsgi.input'].read(1)
if id == 'S':
headerLength = int(request.environ['wsgi.input'].read(8))
header = request.environ['wsgi.input'].read(headerLength)
bodyLength = int(request.environ['wsgi.input'].read(8))
request.environ['wsgi.input'].read(2)
body = request.environ['wsgi.input'].read(bodyLength)
headers = header.split('&')
type = filterText(headers[0].split('=')[1])
time = headers[1].split('=')[1]
savedOekakiPath = h.expandName('%s.%s' % (tempid, type))
self.saveUploaded(savedOekakiPath, body)
animPath = None
animLength = request.environ['wsgi.input'].read(8)
if animLength:
animLength = int(animLength)
anim = request.environ['wsgi.input'].read(animLength)
animPath = h.expandName('%s.%s' % (tempid, 'pch'))
self.saveUploaded(animPath, anim)
oekaki.setPathsAndTime(savedOekakiPath, animPath, time)
return ['ok']
| gpl-2.0 |
Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/build/android/pylib/instrumentation/test_package.py | 7 | 1335 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class representing instrumentation test apk and jar."""
import os
from devil.android import apk_helper
from pylib.instrumentation import test_jar
class TestPackage(test_jar.TestJar):
def __init__(self, apk_path, jar_path, test_support_apk_path):
test_jar.TestJar.__init__(self, jar_path)
if not os.path.exists(apk_path):
raise Exception('%s not found, please build it' % apk_path)
self._apk_path = apk_path
self._apk_name = os.path.splitext(os.path.basename(apk_path))[0]
self._package_name = apk_helper.GetPackageName(self._apk_path)
self._test_support_apk_path = test_support_apk_path
def GetApkPath(self):
"""Returns the absolute path to the APK."""
return self._apk_path
def GetApkName(self):
"""Returns the name of the apk without the suffix."""
return self._apk_name
def GetPackageName(self):
"""Returns the package name of this APK."""
return self._package_name
# Override.
def Install(self, device):
device.Install(self.GetApkPath())
if (self._test_support_apk_path and
os.path.exists(self._test_support_apk_path)):
device.Install(self._test_support_apk_path)
| mit |
jamestwebber/scipy | scipy/linalg/tests/test_solvers.py | 2 | 31084 | from __future__ import division, print_function, absolute_import
import os
import numpy as np
from numpy.testing import assert_array_almost_equal
import pytest
from pytest import raises as assert_raises
from scipy.linalg import solve_sylvester
from scipy.linalg import solve_continuous_lyapunov, solve_discrete_lyapunov
from scipy.linalg import solve_continuous_are, solve_discrete_are
from scipy.linalg import block_diag, solve, LinAlgError
from scipy.sparse.sputils import matrix
def _load_data(name):
"""
Load npz data file under data/
Returns a copy of the data, rather than keeping the npz file open.
"""
filename = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'data', name)
with np.load(filename) as f:
return dict(f.items())
class TestSolveLyapunov(object):
cases = [
(np.array([[1, 2], [3, 4]]),
np.array([[9, 10], [11, 12]])),
# a, q all complex.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a real; q complex.
(np.array([[1.0, 2.0], [3.0, 5.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a complex; q real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[2.0, 2.0], [-1.0, 2.0]])),
# An example from Kitagawa, 1977
(np.array([[3, 9, 5, 1, 4], [1, 2, 3, 8, 4], [4, 6, 6, 6, 3],
[1, 5, 2, 0, 7], [5, 3, 3, 1, 5]]),
np.array([[2, 4, 1, 0, 1], [4, 1, 0, 2, 0], [1, 0, 3, 0, 3],
[0, 2, 0, 1, 0], [1, 0, 3, 0, 4]])),
# Companion matrix example. a complex; q real; a.shape[0] = 11
(np.array([[0.100+0.j, 0.091+0.j, 0.082+0.j, 0.073+0.j, 0.064+0.j,
0.055+0.j, 0.046+0.j, 0.037+0.j, 0.028+0.j, 0.019+0.j,
0.010+0.j],
[1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j,
0.000+0.j]]),
np.eye(11)),
# https://github.com/scipy/scipy/issues/4176
(matrix([[0, 1], [-1/2, -1]]),
(matrix([0, 3]).T * matrix([0, 3]).T.T)),
# https://github.com/scipy/scipy/issues/4176
(matrix([[0, 1], [-1/2, -1]]),
(np.array(matrix([0, 3]).T * matrix([0, 3]).T.T))),
]
def test_continuous_squareness_and_shape(self):
nsq = np.ones((3, 2))
sq = np.eye(3)
assert_raises(ValueError, solve_continuous_lyapunov, nsq, sq)
assert_raises(ValueError, solve_continuous_lyapunov, sq, nsq)
assert_raises(ValueError, solve_continuous_lyapunov, sq, np.eye(2))
def check_continuous_case(self, a, q):
x = solve_continuous_lyapunov(a, q)
assert_array_almost_equal(
np.dot(a, x) + np.dot(x, a.conj().transpose()), q)
def check_discrete_case(self, a, q, method=None):
x = solve_discrete_lyapunov(a, q, method=method)
assert_array_almost_equal(
np.dot(np.dot(a, x), a.conj().transpose()) - x, -1.0*q)
def test_cases(self):
for case in self.cases:
self.check_continuous_case(case[0], case[1])
self.check_discrete_case(case[0], case[1])
self.check_discrete_case(case[0], case[1], method='direct')
self.check_discrete_case(case[0], case[1], method='bilinear')
def test_solve_continuous_are():
mat6 = _load_data('carex_6_data.npz')
mat15 = _load_data('carex_15_data.npz')
mat18 = _load_data('carex_18_data.npz')
mat19 = _load_data('carex_19_data.npz')
mat20 = _load_data('carex_20_data.npz')
cases = [
# Carex examples taken from (with default parameters):
# [1] P.BENNER, A.J. LAUB, V. MEHRMANN: 'A Collection of Benchmark
# Examples for the Numerical Solution of Algebraic Riccati
# Equations II: Continuous-Time Case', Tech. Report SPC 95_23,
# Fak. f. Mathematik, TU Chemnitz-Zwickau (Germany), 1995.
#
# The format of the data is (a, b, q, r, knownfailure), where
# knownfailure is None if the test passes or a string
# indicating the reason for failure.
#
# Test Case 0: carex #1
(np.diag([1.], 1),
np.array([[0], [1]]),
block_diag(1., 2.),
1,
None),
# Test Case 1: carex #2
(np.array([[4, 3], [-4.5, -3.5]]),
np.array([[1], [-1]]),
np.array([[9, 6], [6, 4.]]),
1,
None),
# Test Case 2: carex #3
(np.array([[0, 1, 0, 0],
[0, -1.89, 0.39, -5.53],
[0, -0.034, -2.98, 2.43],
[0.034, -0.0011, -0.99, -0.21]]),
np.array([[0, 0], [0.36, -1.6], [-0.95, -0.032], [0.03, 0]]),
np.array([[2.313, 2.727, 0.688, 0.023],
[2.727, 4.271, 1.148, 0.323],
[0.688, 1.148, 0.313, 0.102],
[0.023, 0.323, 0.102, 0.083]]),
np.eye(2),
None),
# Test Case 3: carex #4
(np.array([[-0.991, 0.529, 0, 0, 0, 0, 0, 0],
[0.522, -1.051, 0.596, 0, 0, 0, 0, 0],
[0, 0.522, -1.118, 0.596, 0, 0, 0, 0],
[0, 0, 0.522, -1.548, 0.718, 0, 0, 0],
[0, 0, 0, 0.922, -1.64, 0.799, 0, 0],
[0, 0, 0, 0, 0.922, -1.721, 0.901, 0],
[0, 0, 0, 0, 0, 0.922, -1.823, 1.021],
[0, 0, 0, 0, 0, 0, 0.922, -1.943]]),
np.array([[3.84, 4.00, 37.60, 3.08, 2.36, 2.88, 3.08, 3.00],
[-2.88, -3.04, -2.80, -2.32, -3.32, -3.82, -4.12, -3.96]]
).T * 0.001,
np.array([[1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.1],
[0.0, 1.0, 0.0, 0.0, 0.1, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.5, 0.1, 0.0, 0.0, 0.1, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.0, 0.0, 0.1, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.0],
[0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1]]),
np.eye(2),
None),
# Test Case 4: carex #5
(np.array(
[[-4.019, 5.120, 0., 0., -2.082, 0., 0., 0., 0.870],
[-0.346, 0.986, 0., 0., -2.340, 0., 0., 0., 0.970],
[-7.909, 15.407, -4.069, 0., -6.450, 0., 0., 0., 2.680],
[-21.816, 35.606, -0.339, -3.870, -17.800, 0., 0., 0., 7.390],
[-60.196, 98.188, -7.907, 0.340, -53.008, 0., 0., 0., 20.400],
[0, 0, 0, 0, 94.000, -147.200, 0., 53.200, 0.],
[0, 0, 0, 0, 0, 94.000, -147.200, 0, 0],
[0, 0, 0, 0, 0, 12.800, 0.000, -31.600, 0],
[0, 0, 0, 0, 12.800, 0.000, 0.000, 18.800, -31.600]]),
np.array([[0.010, -0.011, -0.151],
[0.003, -0.021, 0.000],
[0.009, -0.059, 0.000],
[0.024, -0.162, 0.000],
[0.068, -0.445, 0.000],
[0.000, 0.000, 0.000],
[0.000, 0.000, 0.000],
[0.000, 0.000, 0.000],
[0.000, 0.000, 0.000]]),
np.eye(9),
np.eye(3),
None),
# Test Case 5: carex #6
(mat6['A'], mat6['B'], mat6['Q'], mat6['R'], None),
# Test Case 6: carex #7
(np.array([[1, 0], [0, -2.]]),
np.array([[1e-6], [0]]),
np.ones((2, 2)),
1.,
'Bad residual accuracy'),
# Test Case 7: carex #8
(block_diag(-0.1, -0.02),
np.array([[0.100, 0.000], [0.001, 0.010]]),
np.array([[100, 1000], [1000, 10000]]),
np.ones((2, 2)) + block_diag(1e-6, 0),
None),
# Test Case 8: carex #9
(np.array([[0, 1e6], [0, 0]]),
np.array([[0], [1.]]),
np.eye(2),
1.,
None),
# Test Case 9: carex #10
(np.array([[1.0000001, 1], [1., 1.0000001]]),
np.eye(2),
np.eye(2),
np.eye(2),
None),
# Test Case 10: carex #11
(np.array([[3, 1.], [4, 2]]),
np.array([[1], [1]]),
np.array([[-11, -5], [-5, -2.]]),
1.,
None),
# Test Case 11: carex #12
(np.array([[7000000., 2000000., -0.],
[2000000., 6000000., -2000000.],
[0., -2000000., 5000000.]]) / 3,
np.eye(3),
np.array([[1., -2., -2.], [-2., 1., -2.], [-2., -2., 1.]]).dot(
np.diag([1e-6, 1, 1e6])).dot(
np.array([[1., -2., -2.], [-2., 1., -2.], [-2., -2., 1.]])) / 9,
np.eye(3) * 1e6,
'Bad Residual Accuracy'),
# Test Case 12: carex #13
(np.array([[0, 0.4, 0, 0],
[0, 0, 0.345, 0],
[0, -0.524e6, -0.465e6, 0.262e6],
[0, 0, 0, -1e6]]),
np.array([[0, 0, 0, 1e6]]).T,
np.diag([1, 0, 1, 0]),
1.,
None),
# Test Case 13: carex #14
(np.array([[-1e-6, 1, 0, 0],
[-1, -1e-6, 0, 0],
[0, 0, 1e-6, 1],
[0, 0, -1, 1e-6]]),
np.ones((4, 1)),
np.ones((4, 4)),
1.,
None),
# Test Case 14: carex #15
(mat15['A'], mat15['B'], mat15['Q'], mat15['R'], None),
# Test Case 15: carex #16
(np.eye(64, 64, k=-1) + np.eye(64, 64)*(-2.) + np.rot90(
block_diag(1, np.zeros((62, 62)), 1)) + np.eye(64, 64, k=1),
np.eye(64),
np.eye(64),
np.eye(64),
None),
# Test Case 16: carex #17
(np.diag(np.ones((20, )), 1),
np.flipud(np.eye(21, 1)),
np.eye(21, 1) * np.eye(21, 1).T,
1,
'Bad Residual Accuracy'),
# Test Case 17: carex #18
(mat18['A'], mat18['B'], mat18['Q'], mat18['R'], None),
# Test Case 18: carex #19
(mat19['A'], mat19['B'], mat19['Q'], mat19['R'],
'Bad Residual Accuracy'),
# Test Case 19: carex #20
(mat20['A'], mat20['B'], mat20['Q'], mat20['R'],
'Bad Residual Accuracy')
]
# Makes the minimum precision requirements customized to the test.
# Here numbers represent the number of decimals that agrees with zero
# matrix when the solution x is plugged in to the equation.
#
# res = array([[8e-3,1e-16],[1e-16,1e-20]]) --> min_decimal[k] = 2
#
# If the test is failing use "None" for that entry.
#
min_decimal = (14, 12, 13, 14, 11, 6, None, 5, 7, 14, 14,
None, 9, 14, 13, 14, None, 12, None, None)
def _test_factory(case, dec):
"""Checks if 0 = XA + A'X - XB(R)^{-1} B'X + Q is true"""
a, b, q, r, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_continuous_are(a, b, q, r)
res = x.dot(a) + a.conj().T.dot(x) + q
out_fact = x.dot(b)
res -= out_fact.dot(solve(np.atleast_2d(r), out_fact.conj().T))
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
def test_solve_discrete_are():
cases = [
# Darex examples taken from (with default parameters):
# [1] P.BENNER, A.J. LAUB, V. MEHRMANN: 'A Collection of Benchmark
# Examples for the Numerical Solution of Algebraic Riccati
# Equations II: Discrete-Time Case', Tech. Report SPC 95_23,
# Fak. f. Mathematik, TU Chemnitz-Zwickau (Germany), 1995.
# [2] T. GUDMUNDSSON, C. KENNEY, A.J. LAUB: 'Scaling of the
# Discrete-Time Algebraic Riccati Equation to Enhance Stability
# of the Schur Solution Method', IEEE Trans.Aut.Cont., vol.37(4)
#
# The format of the data is (a, b, q, r, knownfailure), where
# knownfailure is None if the test passes or a string
# indicating the reason for failure.
#
# TEST CASE 0 : Complex a; real b, q, r
(np.array([[2, 1-2j], [0, -3j]]),
np.array([[0], [1]]),
np.array([[1, 0], [0, 2]]),
np.array([[1]]),
None),
# TEST CASE 1 :Real a, q, r; complex b
(np.array([[2, 1], [0, -1]]),
np.array([[-2j], [1j]]),
np.array([[1, 0], [0, 2]]),
np.array([[1]]),
None),
# TEST CASE 2 : Real a, b; complex q, r
(np.array([[3, 1], [0, -1]]),
np.array([[1, 2], [1, 3]]),
np.array([[1, 1+1j], [1-1j, 2]]),
np.array([[2, -2j], [2j, 3]]),
None),
# TEST CASE 3 : User-reported gh-2251 (Trac #1732)
(np.array([[0.63399379, 0.54906824, 0.76253406],
[0.5404729, 0.53745766, 0.08731853],
[0.27524045, 0.84922129, 0.4681622]]),
np.array([[0.96861695], [0.05532739], [0.78934047]]),
np.eye(3),
np.eye(1),
None),
# TEST CASE 4 : darex #1
(np.array([[4, 3], [-4.5, -3.5]]),
np.array([[1], [-1]]),
np.array([[9, 6], [6, 4]]),
np.array([[1]]),
None),
# TEST CASE 5 : darex #2
(np.array([[0.9512, 0], [0, 0.9048]]),
np.array([[4.877, 4.877], [-1.1895, 3.569]]),
np.array([[0.005, 0], [0, 0.02]]),
np.array([[1/3, 0], [0, 3]]),
None),
# TEST CASE 6 : darex #3
(np.array([[2, -1], [1, 0]]),
np.array([[1], [0]]),
np.array([[0, 0], [0, 1]]),
np.array([[0]]),
None),
# TEST CASE 7 : darex #4 (skipped the gen. Ric. term S)
(np.array([[0, 1], [0, -1]]),
np.array([[1, 0], [2, 1]]),
np.array([[-4, -4], [-4, 7]]) * (1/11),
np.array([[9, 3], [3, 1]]),
None),
# TEST CASE 8 : darex #5
(np.array([[0, 1], [0, 0]]),
np.array([[0], [1]]),
np.array([[1, 2], [2, 4]]),
np.array([[1]]),
None),
# TEST CASE 9 : darex #6
(np.array([[0.998, 0.067, 0, 0],
[-.067, 0.998, 0, 0],
[0, 0, 0.998, 0.153],
[0, 0, -.153, 0.998]]),
np.array([[0.0033, 0.0200],
[0.1000, -.0007],
[0.0400, 0.0073],
[-.0028, 0.1000]]),
np.array([[1.87, 0, 0, -0.244],
[0, 0.744, 0.205, 0],
[0, 0.205, 0.589, 0],
[-0.244, 0, 0, 1.048]]),
np.eye(2),
None),
# TEST CASE 10 : darex #7
(np.array([[0.984750, -.079903, 0.0009054, -.0010765],
[0.041588, 0.998990, -.0358550, 0.0126840],
[-.546620, 0.044916, -.3299100, 0.1931800],
[2.662400, -.100450, -.9245500, -.2632500]]),
np.array([[0.0037112, 0.0007361],
[-.0870510, 9.3411e-6],
[-1.198440, -4.1378e-4],
[-3.192700, 9.2535e-4]]),
np.eye(4)*1e-2,
np.eye(2),
None),
# TEST CASE 11 : darex #8
(np.array([[-0.6000000, -2.2000000, -3.6000000, -5.4000180],
[1.0000000, 0.6000000, 0.8000000, 3.3999820],
[0.0000000, 1.0000000, 1.8000000, 3.7999820],
[0.0000000, 0.0000000, 0.0000000, -0.9999820]]),
np.array([[1.0, -1.0, -1.0, -1.0],
[0.0, 1.0, -1.0, -1.0],
[0.0, 0.0, 1.0, -1.0],
[0.0, 0.0, 0.0, 1.0]]),
np.array([[2, 1, 3, 6],
[1, 2, 2, 5],
[3, 2, 6, 11],
[6, 5, 11, 22]]),
np.eye(4),
None),
# TEST CASE 12 : darex #9
(np.array([[95.4070, 1.9643, 0.3597, 0.0673, 0.0190],
[40.8490, 41.3170, 16.0840, 4.4679, 1.1971],
[12.2170, 26.3260, 36.1490, 15.9300, 12.3830],
[4.1118, 12.8580, 27.2090, 21.4420, 40.9760],
[0.1305, 0.5808, 1.8750, 3.6162, 94.2800]]) * 0.01,
np.array([[0.0434, -0.0122],
[2.6606, -1.0453],
[3.7530, -5.5100],
[3.6076, -6.6000],
[0.4617, -0.9148]]) * 0.01,
np.eye(5),
np.eye(2),
None),
# TEST CASE 13 : darex #10
(np.kron(np.eye(2), np.diag([1, 1], k=1)),
np.kron(np.eye(2), np.array([[0], [0], [1]])),
np.array([[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, -1, 0],
[0, 0, 0, -1, 1, 0],
[0, 0, 0, 0, 0, 0]]),
np.array([[3, 0], [0, 1]]),
None),
# TEST CASE 14 : darex #11
(0.001 * np.array(
[[870.1, 135.0, 11.59, .5014, -37.22, .3484, 0, 4.242, 7.249],
[76.55, 897.4, 12.72, 0.5504, -40.16, .3743, 0, 4.53, 7.499],
[-127.2, 357.5, 817, 1.455, -102.8, .987, 0, 11.85, 18.72],
[-363.5, 633.9, 74.91, 796.6, -273.5, 2.653, 0, 31.72, 48.82],
[-960, 1645.9, -128.9, -5.597, 71.42, 7.108, 0, 84.52, 125.9],
[-664.4, 112.96, -88.89, -3.854, 84.47, 13.6, 0, 144.3, 101.6],
[-410.2, 693, -54.71, -2.371, 66.49, 12.49, .1063, 99.97, 69.67],
[-179.9, 301.7, -23.93, -1.035, 60.59, 22.16, 0, 213.9, 35.54],
[-345.1, 580.4, -45.96, -1.989, 105.6, 19.86, 0, 219.1, 215.2]]),
np.array([[4.7600, -0.5701, -83.6800],
[0.8790, -4.7730, -2.7300],
[1.4820, -13.1200, 8.8760],
[3.8920, -35.1300, 24.8000],
[10.3400, -92.7500, 66.8000],
[7.2030, -61.5900, 38.3400],
[4.4540, -36.8300, 20.2900],
[1.9710, -15.5400, 6.9370],
[3.7730, -30.2800, 14.6900]]) * 0.001,
np.diag([50, 0, 0, 0, 50, 0, 0, 0, 0]),
np.eye(3),
None),
# TEST CASE 15 : darex #12 - numerically least accurate example
(np.array([[0, 1e6], [0, 0]]),
np.array([[0], [1]]),
np.eye(2),
np.array([[1]]),
None),
# TEST CASE 16 : darex #13
(np.array([[16, 10, -2],
[10, 13, -8],
[-2, -8, 7]]) * (1/9),
np.eye(3),
1e6 * np.eye(3),
1e6 * np.eye(3),
None),
# TEST CASE 17 : darex #14
(np.array([[1 - 1/1e8, 0, 0, 0],
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]]),
np.array([[1e-08], [0], [0], [0]]),
np.diag([0, 0, 0, 1]),
np.array([[0.25]]),
None),
# TEST CASE 18 : darex #15
(np.eye(100, k=1),
np.flipud(np.eye(100, 1)),
np.eye(100),
np.array([[1]]),
None)
]
# Makes the minimum precision requirements customized to the test.
# Here numbers represent the number of decimals that agrees with zero
# matrix when the solution x is plugged in to the equation.
#
# res = array([[8e-3,1e-16],[1e-16,1e-20]]) --> min_decimal[k] = 2
#
# If the test is failing use "None" for that entry.
#
min_decimal = (12, 14, 13, 14, 13, 16, 18, 14, 14, 13,
14, 13, 13, 14, 12, 2, 5, 6, 10)
def _test_factory(case, dec):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
a, b, q, r, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_discrete_are(a, b, q, r)
res = a.conj().T.dot(x.dot(a)) - x + q
res -= a.conj().T.dot(x.dot(b)).dot(
solve(r+b.conj().T.dot(x.dot(b)), b.conj().T).dot(x.dot(a))
)
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
# An infeasible example taken from https://arxiv.org/abs/1505.04861v1
A = np.triu(np.ones((3, 3)))
A[0, 1] = -1
B = np.array([[1, 1, 0], [0, 0, 1]]).T
Q = np.full_like(A, -2) + np.diag([8, -1, -1.9])
R = np.diag([-10, 0.1])
assert_raises(LinAlgError, solve_continuous_are, A, B, Q, R)
def test_solve_generalized_continuous_are():
cases = [
# Two random examples differ by s term
# in the absence of any literature for demanding examples.
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.zeros((3, 2)),
None),
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.ones((3, 2)),
None)
]
min_decimal = (10, 10)
def _test_factory(case, dec):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
a, b, q, r, e, s, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_continuous_are(a, b, q, r, e, s)
res = a.conj().T.dot(x.dot(e)) + e.conj().T.dot(x.dot(a)) + q
out_fact = e.conj().T.dot(x).dot(b) + s
res -= out_fact.dot(solve(np.atleast_2d(r), out_fact.conj().T))
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
def test_solve_generalized_discrete_are():
mat20170120 = _load_data('gendare_20170120_data.npz')
cases = [
# Two random examples differ by s term
# in the absence of any literature for demanding examples.
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.zeros((3, 2)),
None),
(np.array([[2.769230e-01, 8.234578e-01, 9.502220e-01],
[4.617139e-02, 6.948286e-01, 3.444608e-02],
[9.713178e-02, 3.170995e-01, 4.387444e-01]]),
np.array([[3.815585e-01, 1.868726e-01],
[7.655168e-01, 4.897644e-01],
[7.951999e-01, 4.455862e-01]]),
np.eye(3),
np.eye(2),
np.array([[6.463130e-01, 2.760251e-01, 1.626117e-01],
[7.093648e-01, 6.797027e-01, 1.189977e-01],
[7.546867e-01, 6.550980e-01, 4.983641e-01]]),
np.ones((3, 2)),
None),
# user-reported (under PR-6616) 20-Jan-2017
# tests against the case where E is None but S is provided
(mat20170120['A'],
mat20170120['B'],
mat20170120['Q'],
mat20170120['R'],
None,
mat20170120['S'],
None),
]
min_decimal = (11, 11, 16)
def _test_factory(case, dec):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
a, b, q, r, e, s, knownfailure = case
if knownfailure:
pytest.xfail(reason=knownfailure)
x = solve_discrete_are(a, b, q, r, e, s)
if e is None:
e = np.eye(a.shape[0])
if s is None:
s = np.zeros_like(b)
res = a.conj().T.dot(x.dot(a)) - e.conj().T.dot(x.dot(e)) + q
res -= (a.conj().T.dot(x.dot(b)) + s).dot(
solve(r+b.conj().T.dot(x.dot(b)),
(b.conj().T.dot(x.dot(a)) + s.conj().T)
)
)
assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)
for ind, case in enumerate(cases):
_test_factory(case, min_decimal[ind])
def test_are_validate_args():
def test_square_shape():
nsq = np.ones((3, 2))
sq = np.eye(3)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, nsq, 1, 1, 1)
assert_raises(ValueError, x, sq, sq, nsq, 1)
assert_raises(ValueError, x, sq, sq, sq, nsq)
assert_raises(ValueError, x, sq, sq, sq, sq, nsq)
def test_compatible_sizes():
nsq = np.ones((3, 2))
sq = np.eye(4)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, sq, nsq, 1, 1)
assert_raises(ValueError, x, sq, sq, sq, sq, sq, nsq)
assert_raises(ValueError, x, sq, sq, np.eye(3), sq)
assert_raises(ValueError, x, sq, sq, sq, np.eye(3))
assert_raises(ValueError, x, sq, sq, sq, sq, np.eye(3))
def test_symmetry():
nsym = np.arange(9).reshape(3, 3)
sym = np.eye(3)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, sym, sym, nsym, sym)
assert_raises(ValueError, x, sym, sym, sym, nsym)
def test_singularity():
sing = np.full((3, 3), 1e12)
sing[2, 2] -= 1
sq = np.eye(3)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, sq, sq, sq, sq, sing)
assert_raises(ValueError, solve_continuous_are, sq, sq, sq, sing)
def test_finiteness():
nm = np.full((2, 2), np.nan)
sq = np.eye(2)
for x in (solve_continuous_are, solve_discrete_are):
assert_raises(ValueError, x, nm, sq, sq, sq)
assert_raises(ValueError, x, sq, nm, sq, sq)
assert_raises(ValueError, x, sq, sq, nm, sq)
assert_raises(ValueError, x, sq, sq, sq, nm)
assert_raises(ValueError, x, sq, sq, sq, sq, nm)
assert_raises(ValueError, x, sq, sq, sq, sq, sq, nm)
class TestSolveSylvester(object):
cases = [
# a, b, c all real.
(np.array([[1, 2], [0, 4]]),
np.array([[5, 6], [0, 8]]),
np.array([[9, 10], [11, 12]])),
# a, b, c all real, 4x4. a and b have non-trival 2x2 blocks in their
# quasi-triangular form.
(np.array([[1.0, 0, 0, 0],
[0, 1.0, 2.0, 0.0],
[0, 0, 3.0, -4],
[0, 0, 2, 5]]),
np.array([[2.0, 0, 0, 1.0],
[0, 1.0, 0.0, 0.0],
[0, 0, 1.0, -1],
[0, 0, 1, 1]]),
np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 1.0]])),
# a, b, c all complex.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 2j], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a and b real; c complex.
(np.array([[1.0, 2.0], [3.0, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a and c complex; b real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j], [-1.0-1j, 2.0]])),
# a complex; b and c real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0, 2.0], [-1.0, 2.0]])),
# not square matrices, real
(np.array([[8, 1, 6], [3, 5, 7], [4, 9, 2]]),
np.array([[2, 3], [4, 5]]),
np.array([[1, 2], [3, 4], [5, 6]])),
# not square matrices, complex
(np.array([[8, 1j, 6+2j], [3, 5, 7], [4, 9, 2]]),
np.array([[2, 3], [4, 5-1j]]),
np.array([[1, 2j], [3, 4j], [5j, 6+7j]])),
]
def check_case(self, a, b, c):
x = solve_sylvester(a, b, c)
assert_array_almost_equal(np.dot(a, x) + np.dot(x, b), c)
def test_cases(self):
for case in self.cases:
self.check_case(case[0], case[1], case[2])
def test_trivial(self):
a = np.array([[1.0, 0.0], [0.0, 1.0]])
b = np.array([[1.0]])
c = np.array([2.0, 2.0]).reshape(-1, 1)
x = solve_sylvester(a, b, c)
assert_array_almost_equal(x, np.array([1.0, 1.0]).reshape(-1, 1))
| bsd-3-clause |
shikhardb/scikit-learn | examples/covariance/plot_mahalanobis_distances.py | 348 | 6232 | r"""
================================================================
Robust covariance estimation and Mahalanobis distances relevance
================================================================
An example to show covariance estimation with the Mahalanobis
distances on Gaussian distributed data.
For Gaussian distributed data, the distance of an observation
:math:`x_i` to the mode of the distribution can be computed using its
Mahalanobis distance: :math:`d_{(\mu,\Sigma)}(x_i)^2 = (x_i -
\mu)'\Sigma^{-1}(x_i - \mu)` where :math:`\mu` and :math:`\Sigma` are
the location and the covariance of the underlying Gaussian
distribution.
In practice, :math:`\mu` and :math:`\Sigma` are replaced by some
estimates. The usual covariance maximum likelihood estimate is very
sensitive to the presence of outliers in the data set and therefor,
the corresponding Mahalanobis distances are. One would better have to
use a robust estimator of covariance to guarantee that the estimation is
resistant to "erroneous" observations in the data set and that the
associated Mahalanobis distances accurately reflect the true
organisation of the observations.
The Minimum Covariance Determinant estimator is a robust,
high-breakdown point (i.e. it can be used to estimate the covariance
matrix of highly contaminated datasets, up to
:math:`\frac{n_\text{samples}-n_\text{features}-1}{2}` outliers)
estimator of covariance. The idea is to find
:math:`\frac{n_\text{samples}+n_\text{features}+1}{2}`
observations whose empirical covariance has the smallest determinant,
yielding a "pure" subset of observations from which to compute
standards estimates of location and covariance.
The Minimum Covariance Determinant estimator (MCD) has been introduced
by P.J.Rousseuw in [1].
This example illustrates how the Mahalanobis distances are affected by
outlying data: observations drawn from a contaminating distribution
are not distinguishable from the observations coming from the real,
Gaussian distribution that one may want to work with. Using MCD-based
Mahalanobis distances, the two populations become
distinguishable. Associated applications are outliers detection,
observations ranking, clustering, ...
For visualization purpose, the cubic root of the Mahalanobis distances
are represented in the boxplot, as Wilson and Hilferty suggest [2]
[1] P. J. Rousseeuw. Least median of squares regression. J. Am
Stat Ass, 79:871, 1984.
[2] Wilson, E. B., & Hilferty, M. M. (1931). The distribution of chi-square.
Proceedings of the National Academy of Sciences of the United States
of America, 17, 684-688.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.covariance import EmpiricalCovariance, MinCovDet
n_samples = 125
n_outliers = 25
n_features = 2
# generate data
gen_cov = np.eye(n_features)
gen_cov[0, 0] = 2.
X = np.dot(np.random.randn(n_samples, n_features), gen_cov)
# add some outliers
outliers_cov = np.eye(n_features)
outliers_cov[np.arange(1, n_features), np.arange(1, n_features)] = 7.
X[-n_outliers:] = np.dot(np.random.randn(n_outliers, n_features), outliers_cov)
# fit a Minimum Covariance Determinant (MCD) robust estimator to data
robust_cov = MinCovDet().fit(X)
# compare estimators learnt from the full data set with true parameters
emp_cov = EmpiricalCovariance().fit(X)
###############################################################################
# Display results
fig = plt.figure()
plt.subplots_adjust(hspace=-.1, wspace=.4, top=.95, bottom=.05)
# Show data set
subfig1 = plt.subplot(3, 1, 1)
inlier_plot = subfig1.scatter(X[:, 0], X[:, 1],
color='black', label='inliers')
outlier_plot = subfig1.scatter(X[:, 0][-n_outliers:], X[:, 1][-n_outliers:],
color='red', label='outliers')
subfig1.set_xlim(subfig1.get_xlim()[0], 11.)
subfig1.set_title("Mahalanobis distances of a contaminated data set:")
# Show contours of the distance functions
xx, yy = np.meshgrid(np.linspace(plt.xlim()[0], plt.xlim()[1], 100),
np.linspace(plt.ylim()[0], plt.ylim()[1], 100))
zz = np.c_[xx.ravel(), yy.ravel()]
mahal_emp_cov = emp_cov.mahalanobis(zz)
mahal_emp_cov = mahal_emp_cov.reshape(xx.shape)
emp_cov_contour = subfig1.contour(xx, yy, np.sqrt(mahal_emp_cov),
cmap=plt.cm.PuBu_r,
linestyles='dashed')
mahal_robust_cov = robust_cov.mahalanobis(zz)
mahal_robust_cov = mahal_robust_cov.reshape(xx.shape)
robust_contour = subfig1.contour(xx, yy, np.sqrt(mahal_robust_cov),
cmap=plt.cm.YlOrBr_r, linestyles='dotted')
subfig1.legend([emp_cov_contour.collections[1], robust_contour.collections[1],
inlier_plot, outlier_plot],
['MLE dist', 'robust dist', 'inliers', 'outliers'],
loc="upper right", borderaxespad=0)
plt.xticks(())
plt.yticks(())
# Plot the scores for each point
emp_mahal = emp_cov.mahalanobis(X - np.mean(X, 0)) ** (0.33)
subfig2 = plt.subplot(2, 2, 3)
subfig2.boxplot([emp_mahal[:-n_outliers], emp_mahal[-n_outliers:]], widths=.25)
subfig2.plot(1.26 * np.ones(n_samples - n_outliers),
emp_mahal[:-n_outliers], '+k', markeredgewidth=1)
subfig2.plot(2.26 * np.ones(n_outliers),
emp_mahal[-n_outliers:], '+k', markeredgewidth=1)
subfig2.axes.set_xticklabels(('inliers', 'outliers'), size=15)
subfig2.set_ylabel(r"$\sqrt[3]{\rm{(Mahal. dist.)}}$", size=16)
subfig2.set_title("1. from non-robust estimates\n(Maximum Likelihood)")
plt.yticks(())
robust_mahal = robust_cov.mahalanobis(X - robust_cov.location_) ** (0.33)
subfig3 = plt.subplot(2, 2, 4)
subfig3.boxplot([robust_mahal[:-n_outliers], robust_mahal[-n_outliers:]],
widths=.25)
subfig3.plot(1.26 * np.ones(n_samples - n_outliers),
robust_mahal[:-n_outliers], '+k', markeredgewidth=1)
subfig3.plot(2.26 * np.ones(n_outliers),
robust_mahal[-n_outliers:], '+k', markeredgewidth=1)
subfig3.axes.set_xticklabels(('inliers', 'outliers'), size=15)
subfig3.set_ylabel(r"$\sqrt[3]{\rm{(Mahal. dist.)}}$", size=16)
subfig3.set_title("2. from robust estimates\n(Minimum Covariance Determinant)")
plt.yticks(())
plt.show()
| bsd-3-clause |
5y/kivy | kivy/tools/report.py | 17 | 3660 | '''
Report tool
===========
This tool is a helper for users. It can be used to dump information
for help during the debugging process.
'''
import os
import sys
import time
from time import ctime
from configparser import ConfigParser
from io import StringIO
from xmlrpc.client import ServerProxy
import kivy
report = []
def title(t):
report.append('')
report.append('=' * 80)
report.append(t)
report.append('=' * 80)
report.append('')
# ----------------------------------------------------------
# Start output debugging
# ----------------------------------------------------------
title('Global')
report.append('OS platform : %s' % sys.platform)
report.append('Python EXE : %s' % sys.executable)
report.append('Python Version : %s' % sys.version)
report.append('Python API : %s' % sys.api_version)
report.append('Kivy Version : %s' % kivy.__version__)
report.append('Install path : %s' % os.path.dirname(kivy.__file__))
report.append('Install date : %s' % ctime(os.path.getctime(kivy.__file__)))
title('OpenGL')
from kivy.core import gl
from kivy.core.window import Window
report.append('GL Vendor: %s' % gl.glGetString(gl.GL_VENDOR))
report.append('GL Renderer: %s' % gl.glGetString(gl.GL_RENDERER))
report.append('GL Version: %s' % gl.glGetString(gl.GL_VERSION))
ext = gl.glGetString(gl.GL_EXTENSIONS)
if ext is None:
report.append('GL Extensions: %s' % ext)
else:
report.append('GL Extensions:')
for x in ext.split():
report.append('\t%s' % x)
Window.close()
title('Core selection')
from kivy.core.audio import SoundLoader
report.append('Audio = %s' % SoundLoader._classes)
from kivy.core.camera import Camera
report.append('Camera = %s' % Camera)
from kivy.core.image import ImageLoader
report.append('Image = %s' % ImageLoader.loaders)
from kivy.core.text import Label
report.append('Text = %s' % Label)
from kivy.core.video import Video
report.append('Video = %s' % Video)
report.append('Window = %s' % Window)
title('Libraries')
def testimport(libname):
try:
l = __import__(libname)
report.append('%-20s exist at %s' % (libname, l.__file__))
except ImportError:
report.append('%-20s is missing' % libname)
for x in (
'gst',
'pygame',
'pygame.midi',
'pyglet',
'videocapture',
'squirtle',
'PIL',
'opencv',
'opencv.cv',
'opencv.highgui',
'cython'):
testimport(x)
title('Configuration')
s = StringIO()
from kivy.config import Config
ConfigParser.write(Config, s)
report.extend(s.getvalue().split('\n'))
title('Input availability')
from kivy.input.factory import MotionEventFactory
for x in MotionEventFactory.list():
report.append(x)
'''
title('Log')
for x in pymt_logger_history.history:
report.append(x.message)
'''
title('Environ')
for k, v in os.environ.items():
report.append('%s = %s' % (k, v))
title('Options')
for k, v in kivy.kivy_options.items():
report.append('%s = %s' % (k, v))
report = '\n'.join(report)
print(report)
print()
print()
try:
reply = input(
'Do you accept to send report to paste.pocoo.org (Y/n) : ')
except EOFError:
sys.exit(0)
if reply.lower().strip() in ('', 'y'):
print('Please wait while sending the report...')
s = ServerProxy('http://paste.pocoo.org/xmlrpc/')
r = s.pastes.newPaste('text', report)
print()
print()
print('REPORT posted at http://paste.pocoo.org/show/%s/' % r)
print()
print()
else:
print('No report posted.')
# On windows system, the console leave directly after the end
# of the dump. That's not cool if we want get report url
input('Enter any key to leave.')
| mit |
SchoolIdolTomodachi/CinderellaProducers | cpro/filters.py | 1 | 10802 | from django.db.models import Q
from django.core.exceptions import PermissionDenied
from cpro import models
############################################################
# Cards
def filterCards(queryset, parameters, request):
if request.user.is_authenticated():
request.user.all_accounts = request.user.accounts.all()
accounts_pks = ','.join([str(account.pk) for account in request.user.all_accounts])
if accounts_pks:
queryset = queryset.extra(select={
'total_owned': 'SELECT COUNT(*) FROM cpro_ownedcard WHERE card_id = cpro_card.id AND account_id IN ({})'.format(accounts_pks),
'favorited': 'SELECT COUNT(*) FROM cpro_favoritecard WHERE card_id = cpro_card.id AND owner_id IN ({})'.format(request.user.id),
})
if 'favorite_of' in parameters and parameters['favorite_of']:
queryset = queryset.filter(fans__owner_id=parameters['favorite_of'])
if 'ids' in parameters and parameters['ids']:
queryset = queryset.filter(id__in=parameters['ids'].split(','))
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(idol__name__icontains=term)
| Q(idol__japanese_name__icontains=term)
| Q(title__icontains=term)
| Q(translated_title__icontains=term)
| Q(skill_name__icontains=term)
| Q(translated_skill_name__icontains=term)
)
if 'i_rarity' in parameters and parameters['i_rarity']:
queryset = queryset.filter(i_rarity=parameters['i_rarity'])
if 'type' in parameters and parameters['type']:
queryset = queryset.filter(idol__i_type=parameters['type'])
if 'is_event' in parameters and parameters['is_event']:
if parameters['is_event'] == '2':
queryset = queryset.filter(event__isnull=False)
elif parameters['is_event'] == '3':
queryset = queryset.filter(event__isnull=True)
if 'is_limited' in parameters and parameters['is_limited']:
if parameters['is_limited'] == '2':
queryset = queryset.filter(is_limited=True)
elif parameters['is_limited'] == '3':
queryset = queryset.filter(is_limited=False)
if 'has_art' in parameters and parameters['has_art']:
if parameters['has_art'] == '2':
queryset = queryset.filter(art__isnull=False).exclude(art='')
elif parameters['has_art'] == '3':
queryset = queryset.filter(Q(art__isnull=True) | Q(art=''))
if 'has_art_hd' in parameters and parameters['has_art_hd']:
if parameters['has_art_hd'] == '2':
queryset = queryset.filter(art_hd__isnull=False).exclude(art_hd='')
elif parameters['has_art_hd'] == '3':
queryset = queryset.filter(Q(art_hd__isnull=True) | Q(art_hd=''))
if 'i_skill' in parameters and parameters['i_skill']:
queryset = queryset.filter(i_skill=parameters['i_skill'])
if 'leader_skill' in parameters and parameters['leader_skill']:
value = parameters['leader_skill']
if value.startswith('type-'):
queryset = queryset.filter(leader_skill_type=int(value[5:]))
elif value.startswith('apply-'):
queryset = queryset.filter(leader_skill_apply=int(value[6:]))
if 'idol' in parameters and parameters['idol']:
queryset = queryset.filter(idol=parameters['idol'])
if 'event' in parameters and parameters['event']:
queryset = queryset.filter(event=parameters['event'])
return queryset
def filterCard(queryset, parameters, request):
queryset = filterCards(queryset, parameters, request)
return queryset
############################################################
# Idols
def filterIdols(queryset, parameters, request):
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(name__icontains=term)
| Q(japanese_name__icontains=term)
| Q(romaji_hometown__icontains=term)
| Q(hometown__icontains=term)
| Q(hobbies__icontains=term)
| Q(CV__icontains=term)
| Q(romaji_CV__icontains=term)
)
if 'type' in parameters and parameters['type']:
queryset = queryset.filter(i_type=parameters['type'])
if 'i_blood_type' in parameters and parameters['i_blood_type']:
queryset = queryset.filter(i_blood_type=parameters['i_blood_type'])
if 'i_writing_hand' in parameters and parameters['i_writing_hand']:
queryset = queryset.filter(i_writing_hand=parameters['i_writing_hand'])
if 'i_astrological_sign' in parameters and parameters['i_astrological_sign']:
queryset = queryset.filter(i_astrological_sign=parameters['i_astrological_sign'])
if 'has_signature' in parameters and parameters['has_signature']:
if parameters['has_signature'] == '2':
queryset = queryset.filter(signature__isnull=False).exclude(signature='')
elif parameters['has_signature'] == '3':
queryset = queryset.filter(Q(signature__isnull=True) | Q(signature=''))
return queryset
############################################################
# Accounts
def filterAccounts(queryset, parameters, request):
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(owner__username__icontains=term)
| Q(owner__email__iexact=term)
| Q(nickname__icontains=term)
| Q(device__icontains=term)
| Q(owner__preferences__description__icontains=term)
| Q(owner__preferences__location__icontains=term)
)
if 'own_card' in parameters and parameters['own_card']:
queryset = queryset.filter(ownedcards__card__id=parameters['own_card'])
if 'favorite_card' in parameters and parameters['favorite_card']:
queryset = queryset.filter(owner__favoritecards__card__id=parameters['favorite_card'])
if 'user_type' in parameters and parameters['user_type']:
queryset = queryset.filter(owner__preferences__color=unicode(parameters['user_type']))
if 'game_id' in parameters and parameters['game_id']:
queryset = queryset.filter(game_id=parameters['game_id'])
if 'favorite_character' in parameters and parameters['favorite_character']:
queryset = queryset.filter(Q(owner__preferences__favorite_character1=parameters['favorite_character'])
| Q(owner__preferences__favorite_character2=parameters['favorite_character'])
| Q(owner__preferences__favorite_character3=parameters['favorite_character'])
)
if 'starter_id' in parameters and parameters['starter_id']:
queryset = queryset.filter(starter_id=parameters['starter_id'])
if 'center_type' in parameters and parameters['center_type']:
queryset = queryset.filter(center__card__idol__i_type=parameters['center_type'])
if 'center_rarity' in parameters and parameters['center_rarity']:
queryset = queryset.filter(center__card__i_rarity=parameters['center_rarity'])
if 'accept_friend_requests' in parameters and parameters['accept_friend_requests']:
if parameters['accept_friend_requests'] == '2':
queryset = queryset.filter(accept_friend_requests=True)
elif parameters['accept_friend_requests'] == '3':
queryset = queryset.filter(accept_friend_requests=False)
if 'ordering' in parameters:
if parameters['ordering'] == 'level':
queryset = queryset.exclude(level=0).exclude(level=None)
if parameters['ordering'] == 'start_date':
queryset = queryset.exclude(start_date=None)
return queryset
############################################################
# Events
def filterEvents(queryset, parameters, request):
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(name__icontains=term)
| Q(translated_name__icontains=term)
)
if 'i_kind' in parameters and parameters['i_kind']:
queryset = queryset.filter(i_kind=parameters['i_kind'])
if 'idol' in parameters and parameters['idol']:
queryset = queryset.filter(cards__idol=parameters['idol'])
return queryset
############################################################
# Owned Cards
def filterFavoriteCards(queryset, parameters, request):
if 'owner' in parameters:
queryset = queryset.filter(owner_id=parameters['owner'])
else:
raise PermissionDenied()
return queryset
############################################################
# Owned Cards
def filterOwnedCards(queryset, parameters, request):
if 'account' in parameters:
queryset = queryset.filter(account_id=parameters['account'])
elif 'ids' in parameters and parameters['ids']:
queryset = queryset.filter(id__in=parameters['ids'].split(','))
else:
raise PermissionDenied()
if 'search' in parameters and parameters['search']:
terms = parameters['search'].split(' ')
for term in terms:
queryset = queryset.filter(Q(card__title__icontains=term)
| Q(card__idol__name__icontains=term)
)
if 'i_rarity' in parameters and parameters['i_rarity']:
queryset = queryset.filter(card__i_rarity=parameters['i_rarity'])
if 'is_event' in parameters and parameters['is_event']:
if parameters['is_event'] == '2':
queryset = queryset.filter(card__event__isnull=False)
elif parameters['is_event'] == '3':
queryset = queryset.filter(card__event__isnull=True)
if 'type' in parameters and parameters['type']:
queryset = queryset.filter(card__idol__i_type=parameters['type'])
if 'i_skill' in parameters and parameters['i_skill']:
queryset = queryset.filter(card__i_skill=parameters['i_skill'])
return queryset
| apache-2.0 |
virajs/selenium-1 | py/test/selenium/webdriver/common/cookie_tests.py | 28 | 3282 | import calendar
import time
import unittest
import random
import pytest
from selenium.test.selenium.webdriver.common import utils
class CookieTest(unittest.TestCase):
def setUp(self):
self._loadPage("simpleTest")
# Set the cookie to expire in 30 minutes
timestamp = calendar.timegm(time.gmtime()) + (30 * 60)
self.COOKIE_A = {"name": "foo",
"value": "bar",
"path": "/",
"secure": False}
def tearDown(self):
self.driver.delete_all_cookies()
def testAddCookie(self):
self.driver.execute_script("return document.cookie")
self.driver.add_cookie(self.COOKIE_A)
cookie_returned = str(self.driver.execute_script("return document.cookie"))
self.assertTrue(self.COOKIE_A["name"] in cookie_returned)
def testAddingACookieThatExpiredInThePast(self):
if self.driver.name == 'internet explorer':
pytest.skip("Issue needs investigating")
cookie = self.COOKIE_A.copy()
cookie["expiry"] = calendar.timegm(time.gmtime()) - 1
self.driver.add_cookie(cookie)
cookies = self.driver.get_cookies()
self.assertEquals(0, len(cookies))
def testDeleteAllCookie(self):
self.driver.add_cookie(utils.convert_cookie_to_json(self.COOKIE_A))
self.driver.delete_all_cookies()
self.assertFalse(self.driver.get_cookies())
def testDeleteCookie(self):
self.driver.add_cookie(utils.convert_cookie_to_json(self.COOKIE_A))
self.driver.delete_cookie("foo")
self.assertFalse(self.driver.get_cookies())
def testShouldGetCookieByName(self):
key = "key_%d" % int(random.random()*10000000)
self.driver.execute_script("document.cookie = arguments[0] + '=set';", key)
cookie = self.driver.get_cookie(key)
self.assertEquals("set", cookie["value"])
def testGetAllCookies(self):
key1 = "key_%d" % int(random.random()*10000000)
key2 = "key_%d" % int(random.random()*10000000)
cookies = self.driver.get_cookies()
count = len(cookies)
one = {"name" :key1,
"value": "value"}
two = {"name":key2,
"value": "value"}
self.driver.add_cookie(one)
self.driver.add_cookie(two)
self._loadPage("simpleTest")
cookies = self.driver.get_cookies()
self.assertEquals(count + 2, len(cookies))
def testShouldNotDeleteCookiesWithASimilarName(self):
cookieOneName = "fish"
cookie1 = {"name" :cookieOneName,
"value":"cod"}
cookie2 = {"name" :cookieOneName + "x",
"value": "earth"}
self.driver.add_cookie(cookie1)
self.driver.add_cookie(cookie2)
self.driver.delete_cookie(cookieOneName)
cookies = self.driver.get_cookies()
self.assertFalse(cookie1["name"] == cookies[0]["name"], msg=str(cookies))
self.assertEquals(cookie2["name"] , cookies[0]["name"], msg=str(cookies))
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
| apache-2.0 |
theju/safebrowsing-python | safebrowsing/backend.py | 2 | 5534 | import conf
from base import BaseDbObj
class SqliteDbObj(BaseDbObj):
def __init__(self):
try:
import sqlite3 as sqlite
except ImportError:
from pysqlite2 import dbapi2 as sqlite
self.connection = sqlite.connect(self.db_name)
self.cursor = self.connection.cursor()
def get_version(self, badware_type):
self.cursor.execute("select * from %s_version;" %(badware_type))
row = self.cursor.fetchall()
if not row:
return None
return row[0][0]
def insert_version_row(self, badware_type, version_number):
self.cursor.execute("INSERT INTO %s_version (version_number) VALUES "
"('%s');" %(badware_type, version_number))
def update_version_row(self, badware_type, new_version_number, version_number):
self.cursor.execute("UPDATE %s_version SET version_number='%s' WHERE "
"version_number='%s';" %(badware_type, new_version_number,
version_number))
def insert_rows(self, url_hash_dict):
for (url_hash, badware_code) in url_hash_dict.items():
self.cursor.execute("INSERT INTO url_hashes_table (badware_type,url_hash) "
"VALUES ('%s','%s');" %(badware_code, url_hash))
self.connection.commit()
self.connection.close()
def delete_rows(self, url_hash_dict):
for (url_hash, badware_code) in url_hash_dict.items():
self.cursor.execute("DELETE FROM url_hashes_table WHERE badware_type='%s' "
"AND url_hash='%s';" %(badware_code, url_hash))
def lookup_by_md5(self, md5_hash_list):
for md5_hash in md5_hash_list:
self.cursor.execute("SELECT * FROM url_hashes_table WHERE url_hash='%s';" %(md5_hash))
row = self.cursor.fetchall()
if not row:
continue
# If row is non-empty then the URL is in
# database and stop operation by returning 1
return row[0][0]
class MySqlDbObj(SqliteDbObj):
def __init__(self):
try:
import MySQLDb
except ImportError:
raise Exception("Python Db library (MySQLDb) not found.")
kwargs = {}
if self.db_user:
kwargs['user'] = self.db_user
if self.db_name:
kwargs['Db'] = self.db_name
if self.db_password:
kwargs['passwd'] = self.db_password
if self.db_host.startswith('/'):
kwargs['unix_socket'] = self.db_host
elif self.db_host:
kwargs['host'] = self.db_host
if self.db_port:
kwargs['port'] = int(self.db_port)
self.connection = MySQLDb.connect(**kwargs)
self.cursor = self.connection.cursor()
class PostgresqlDbObj(SqliteDbObj):
def __init__(self):
try:
import psycopg2 as Database
except ImportError:
try:
import psycopg as Database
except ImportError:
raise Exception("Libraries psycopg2/psycopg not found.")
conn_string = ""
if not self.db_name:
raise Exception("Database name not specified.")
conn_string += "dbname=%s" %self.db_name
if self.db_user:
conn_string += " user=%s %s" %(self.db_user, conn_string)
if self.db_password:
conn_string += " password='%s'" %self.db_password
if self.db_host:
conn_string += " host=%s" %self.db_host
if self.db_port:
conn_string += " port=%s" % self.db_port
self.connection = Database.connect(conn_string)
self.cursor = self.connection.cursor()
class MemcachedDbObj(BaseDbObj):
def __init__(self):
try:
import memcache
except ImportError:
raise Exception("Could not find the memcached module.")
if isinstance(self.db_host, (str, unicode)):
self.db_host = [self.db_host,]
if isinstance(self.db_port, (int, str, unicode)):
self.db_port = [self.db_port, ]
servers = ["%s:%s" %(ii[0], ii[1]) for ii in zip(self.db_host, self.db_port)]
self.client = memcache.Client(servers)
def get_version(self, badware_type):
return self.client.get("%s_version" %(badware_type))
def insert_version_row(self, badware_type, version_number):
self.client.set("%s_version" %badware_type, version_number)
def update_version_row(self, badware_type, new_version_number, version_number):
self.client.set("%s_version" %badware_type, version_number)
def insert_rows(self, url_hash_dict):
self.client.set_multi(url_hash_dict)
def delete_rows(self, url_hash_dict):
self.client.delete_multi(url_hash_dict.keys())
def lookup_by_md5(self, md5_hash_list):
hash_row = self.client.get_multi(md5_hash_list)
if not hash_row:
return None
return hash_row.values()[0]
DB_BACKENDS = {'sqlite3' : SqliteDbObj,
'mysql' : MySqlDbObj,
'postgresql' : PostgresqlDbObj,
'memcached' : MemcachedDbObj,}
class DbObj(object):
def __init__(self):
backend = getattr(conf, 'DATABASE_ENGINE')
if not backend in DB_BACKENDS:
raise Exception("The DATABASE_ENGINE is not among the supported backends.")
self.backend = DB_BACKENDS[backend]()
| mit |
Changaco/oh-mainline | vendor/packages/mechanize/test/test_pickle.py | 22 | 1042 | import cPickle
import cStringIO as StringIO
import pickle
import mechanize
import mechanize._response
import mechanize._testcase
def pickle_and_unpickle(obj, implementation):
return implementation.loads(implementation.dumps(obj))
def test_pickling(obj, check=lambda unpickled: None):
check(pickle_and_unpickle(obj, cPickle))
check(pickle_and_unpickle(obj, pickle))
class PickleTest(mechanize._testcase.TestCase):
def test_pickle_cookie(self):
cookiejar = mechanize.CookieJar()
url = "http://example.com/"
request = mechanize.Request(url)
response = mechanize._response.test_response(
headers=[("Set-Cookie", "spam=eggs")],
url=url)
[cookie] = cookiejar.make_cookies(response, request)
check_equality = lambda unpickled: self.assertEqual(unpickled, cookie)
test_pickling(cookie, check_equality)
def test_pickle_cookiejar(self):
test_pickling(mechanize.CookieJar())
if __name__ == "__main__":
mechanize._testcase.main()
| agpl-3.0 |
lmorchard/django | tests/null_queries/tests.py | 290 | 2928 | from __future__ import unicode_literals
from django.core.exceptions import FieldError
from django.test import TestCase
from .models import Choice, Inner, OuterA, OuterB, Poll
class NullQueriesTests(TestCase):
def test_none_as_null(self):
"""
Regression test for the use of None as a query value.
None is interpreted as an SQL NULL, but only in __exact and __iexact
queries.
Set up some initial polls and choices
"""
p1 = Poll(question='Why?')
p1.save()
c1 = Choice(poll=p1, choice='Because.')
c1.save()
c2 = Choice(poll=p1, choice='Why Not?')
c2.save()
# Exact query with value None returns nothing ("is NULL" in sql,
# but every 'id' field has a value).
self.assertQuerysetEqual(Choice.objects.filter(choice__exact=None), [])
# The same behavior for iexact query.
self.assertQuerysetEqual(Choice.objects.filter(choice__iexact=None), [])
# Excluding the previous result returns everything.
self.assertQuerysetEqual(
Choice.objects.exclude(choice=None).order_by('id'),
[
'<Choice: Choice: Because. in poll Q: Why? >',
'<Choice: Choice: Why Not? in poll Q: Why? >'
]
)
# Valid query, but fails because foo isn't a keyword
self.assertRaises(FieldError, Choice.objects.filter, foo__exact=None)
# Can't use None on anything other than __exact and __iexact
self.assertRaises(ValueError, Choice.objects.filter, id__gt=None)
# Related managers use __exact=None implicitly if the object hasn't been saved.
p2 = Poll(question="How?")
self.assertEqual(repr(p2.choice_set.all()), '[]')
def test_reverse_relations(self):
"""
Querying across reverse relations and then another relation should
insert outer joins correctly so as not to exclude results.
"""
obj = OuterA.objects.create()
self.assertQuerysetEqual(
OuterA.objects.filter(inner__third=None),
['<OuterA: OuterA object>']
)
self.assertQuerysetEqual(
OuterA.objects.filter(inner__third__data=None),
['<OuterA: OuterA object>']
)
Inner.objects.create(first=obj)
self.assertQuerysetEqual(
Inner.objects.filter(first__inner__third=None),
['<Inner: Inner object>']
)
# Ticket #13815: check if <reverse>_isnull=False does not produce
# faulty empty lists
OuterB.objects.create(data="reverse")
self.assertQuerysetEqual(
OuterB.objects.filter(inner__isnull=False),
[]
)
Inner.objects.create(first=obj)
self.assertQuerysetEqual(
OuterB.objects.exclude(inner__isnull=False),
['<OuterB: OuterB object>']
)
| bsd-3-clause |
michael-yin/scrapy | scrapy/core/scraper.py | 6 | 8972 | """This module implements the Scraper component which parses responses and
extracts information from them"""
from collections import deque
from twisted.python.failure import Failure
from twisted.internet import defer
from scrapy.utils.defer import defer_result, defer_succeed, parallel, iter_errback
from scrapy.utils.spider import iterate_spider_output
from scrapy.utils.misc import load_object
from scrapy.exceptions import CloseSpider, DropItem, IgnoreRequest
from scrapy import signals
from scrapy.http import Request, Response
from scrapy.item import BaseItem
from scrapy.core.spidermw import SpiderMiddlewareManager
from scrapy import log
class Slot(object):
"""Scraper slot (one per running spider)"""
MIN_RESPONSE_SIZE = 1024
def __init__(self, max_active_size=5000000):
self.max_active_size = max_active_size
self.queue = deque()
self.active = set()
self.active_size = 0
self.itemproc_size = 0
self.closing = None
def add_response_request(self, response, request):
deferred = defer.Deferred()
self.queue.append((response, request, deferred))
if isinstance(response, Response):
self.active_size += max(len(response.body), self.MIN_RESPONSE_SIZE)
else:
self.active_size += self.MIN_RESPONSE_SIZE
return deferred
def next_response_request_deferred(self):
response, request, deferred = self.queue.popleft()
self.active.add(request)
return response, request, deferred
def finish_response(self, response, request):
self.active.remove(request)
if isinstance(response, Response):
self.active_size -= max(len(response.body), self.MIN_RESPONSE_SIZE)
else:
self.active_size -= self.MIN_RESPONSE_SIZE
def is_idle(self):
return not (self.queue or self.active)
def needs_backout(self):
return self.active_size > self.max_active_size
class Scraper(object):
def __init__(self, crawler):
self.slot = None
self.spidermw = SpiderMiddlewareManager.from_crawler(crawler)
itemproc_cls = load_object(crawler.settings['ITEM_PROCESSOR'])
self.itemproc = itemproc_cls.from_crawler(crawler)
self.concurrent_items = crawler.settings.getint('CONCURRENT_ITEMS')
self.crawler = crawler
self.signals = crawler.signals
self.logformatter = crawler.logformatter
@defer.inlineCallbacks
def open_spider(self, spider):
"""Open the given spider for scraping and allocate resources for it"""
self.slot = Slot()
yield self.itemproc.open_spider(spider)
def close_spider(self, spider):
"""Close a spider being scraped and release its resources"""
slot = self.slot
slot.closing = defer.Deferred()
slot.closing.addCallback(self.itemproc.close_spider)
self._check_if_closing(spider, slot)
return slot.closing
def is_idle(self):
"""Return True if there isn't any more spiders to process"""
return not self.slot
def _check_if_closing(self, spider, slot):
if slot.closing and slot.is_idle():
slot.closing.callback(spider)
def enqueue_scrape(self, response, request, spider):
slot = self.slot
dfd = slot.add_response_request(response, request)
def finish_scraping(_):
slot.finish_response(response, request)
self._check_if_closing(spider, slot)
self._scrape_next(spider, slot)
return _
dfd.addBoth(finish_scraping)
dfd.addErrback(log.err, 'Scraper bug processing %s' % request, \
spider=spider)
self._scrape_next(spider, slot)
return dfd
def _scrape_next(self, spider, slot):
while slot.queue:
response, request, deferred = slot.next_response_request_deferred()
self._scrape(response, request, spider).chainDeferred(deferred)
def _scrape(self, response, request, spider):
"""Handle the downloaded response or failure trough the spider
callback/errback"""
assert isinstance(response, (Response, Failure))
dfd = self._scrape2(response, request, spider) # returns spiders processed output
dfd.addErrback(self.handle_spider_error, request, response, spider)
dfd.addCallback(self.handle_spider_output, request, response, spider)
return dfd
def _scrape2(self, request_result, request, spider):
"""Handle the diferent cases of request's result been a Response or a
Failure"""
if not isinstance(request_result, Failure):
return self.spidermw.scrape_response(self.call_spider, \
request_result, request, spider)
else:
# FIXME: don't ignore errors in spider middleware
dfd = self.call_spider(request_result, request, spider)
return dfd.addErrback(self._log_download_errors, \
request_result, request, spider)
def call_spider(self, result, request, spider):
result.request = request
dfd = defer_result(result)
dfd.addCallbacks(request.callback or spider.parse, request.errback)
return dfd.addCallback(iterate_spider_output)
def handle_spider_error(self, _failure, request, response, spider):
exc = _failure.value
if isinstance(exc, CloseSpider):
self.crawler.engine.close_spider(spider, exc.reason or 'cancelled')
return
log.err(_failure, "Spider error processing %s" % request, spider=spider)
self.signals.send_catch_log(signal=signals.spider_error, failure=_failure, response=response, \
spider=spider)
self.crawler.stats.inc_value("spider_exceptions/%s" % _failure.value.__class__.__name__, \
spider=spider)
def handle_spider_output(self, result, request, response, spider):
if not result:
return defer_succeed(None)
it = iter_errback(result, self.handle_spider_error, request, response, spider)
dfd = parallel(it, self.concurrent_items,
self._process_spidermw_output, request, response, spider)
return dfd
def _process_spidermw_output(self, output, request, response, spider):
"""Process each Request/Item (given in the output parameter) returned
from the given spider
"""
if isinstance(output, Request):
self.crawler.engine.crawl(request=output, spider=spider)
elif isinstance(output, BaseItem):
self.slot.itemproc_size += 1
dfd = self.itemproc.process_item(output, spider)
dfd.addBoth(self._itemproc_finished, output, response, spider)
return dfd
elif output is None:
pass
else:
typename = type(output).__name__
log.msg(format='Spider must return Request, BaseItem or None, '
'got %(typename)r in %(request)s',
level=log.ERROR, spider=spider, request=request, typename=typename)
def _log_download_errors(self, spider_failure, download_failure, request, spider):
"""Log and silence errors that come from the engine (typically download
errors that got propagated thru here)
"""
if isinstance(download_failure, Failure) \
and not download_failure.check(IgnoreRequest):
if download_failure.frames:
log.err(download_failure, 'Error downloading %s' % request,
spider=spider)
else:
errmsg = download_failure.getErrorMessage()
if errmsg:
log.msg(format='Error downloading %(request)s: %(errmsg)s',
level=log.ERROR, spider=spider, request=request,
errmsg=errmsg)
if spider_failure is not download_failure:
return spider_failure
def _itemproc_finished(self, output, item, response, spider):
"""ItemProcessor finished for the given ``item`` and returned ``output``
"""
self.slot.itemproc_size -= 1
if isinstance(output, Failure):
ex = output.value
if isinstance(ex, DropItem):
logkws = self.logformatter.dropped(item, ex, response, spider)
log.msg(spider=spider, **logkws)
return self.signals.send_catch_log_deferred(signal=signals.item_dropped, \
item=item, spider=spider, exception=output.value)
else:
log.err(output, 'Error processing %s' % item, spider=spider)
else:
logkws = self.logformatter.scraped(output, response, spider)
log.msg(spider=spider, **logkws)
return self.signals.send_catch_log_deferred(signal=signals.item_scraped, \
item=output, response=response, spider=spider)
| bsd-3-clause |
jeremyh/agdc | contrib/agdc_workshop_exercises/ndvi_analysis_stacker-finished.py | 5 | 7285 | '''
Created on 21/02/2013
@author: u76345
'''
import os
import sys
import logging
import re
import numpy
from datetime import datetime, time
from osgeo import gdal
from agdc.stacker import Stacker
from EOtools.utils import log_multiline
from EOtools.stats import temporal_stats
SCALE_FACTOR = 10000
# Set top level standard output
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.INFO)
console_formatter = logging.Formatter('%(message)s')
console_handler.setFormatter(console_formatter)
logger = logging.getLogger(__name__)
if not logger.level:
logger.setLevel(logging.DEBUG) # Default logging level for all modules
logger.addHandler(console_handler)
class NDVIStacker(Stacker):
""" Subclass of Stacker
Used to implement specific functionality to create stacks of derived datasets.
"""
def derive_datasets(self, input_dataset_dict, stack_output_info, tile_type_info):
assert type(input_dataset_dict) == dict, 'input_dataset_dict must be a dict'
log_multiline(logger.debug, input_dataset_dict, 'input_dataset_dict', '\t')
output_dataset_dict = {}
nbar_dataset_info = input_dataset_dict['NBAR'] # Only need NBAR data for NDVI
nbar_dataset_path = nbar_dataset_info['tile_pathname']
# Get a boolean mask from the PQA dataset (use default parameters for mask and dilation)
pqa_mask = self.get_pqa_mask(input_dataset_dict['PQA']['tile_pathname'])
nbar_dataset = gdal.Open(nbar_dataset_path)
assert nbar_dataset, 'Unable to open dataset %s' % nbar_dataset
logger.debug('Opened NBAR dataset %s', nbar_dataset_path)
#no_data_value = nbar_dataset_info['nodata_value']
no_data_value = -32767 # Need a value outside the scaled range -10000 - +10000
output_stack_path = os.path.join(self.output_dir, 'NDVI_pqa_masked.vrt')
output_tile_path = os.path.join(self.output_dir, re.sub('\.\w+$',
'_NDVI%s' % (tile_type_info['file_extension']),
os.path.basename(nbar_dataset_path)
)
)
# Copy metadata for eventual inclusion in stack file output
# This could also be written to the output tile if required
output_dataset_info = dict(nbar_dataset_info)
output_dataset_info['tile_pathname'] = output_tile_path # This is the most important modification - used to find
output_dataset_info['band_name'] = 'NDVI with PQA mask applied'
output_dataset_info['band_tag'] = 'NDVI-PQA'
output_dataset_info['tile_layer'] = 1
# NBAR bands into 2D NumPy arrays.
near_ir_band_data = nbar_dataset.GetRasterBand(4).ReadAsArray() # Near Infrared light
visible_band_data = nbar_dataset.GetRasterBand(3).ReadAsArray() # Red Visible Light
logger.debug('near_ir_band_data = %s', near_ir_band_data)
logger.debug('visible_band_data = %s', visible_band_data)
logger.debug('SCALE_FACTOR = %s', SCALE_FACTOR)
# Calculate NDVI for every element in the array using
# ((NIR - VIS) / (NIR + VIS)) * SCALE_FACTOR
# HINT - Use numpy.true_divide(numerator, denominator) to avoid divide by 0 errors
data_array = numpy.true_divide(near_ir_band_data - visible_band_data, (near_ir_band_data + visible_band_data)) * SCALE_FACTOR
self.apply_pqa_mask(data_array, pqa_mask, no_data_value)
# Create our output file
gdal_driver = gdal.GetDriverByName(tile_type_info['file_format'])
output_dataset = gdal_driver.Create(output_tile_path,
nbar_dataset.RasterXSize, nbar_dataset.RasterYSize,
1, nbar_dataset.GetRasterBand(1).DataType,
tile_type_info['format_options'].split(','))
assert output_dataset, 'Unable to open output dataset %s'% output_dataset
output_dataset.SetGeoTransform(nbar_dataset.GetGeoTransform())
output_dataset.SetProjection(nbar_dataset.GetProjection())
output_band = output_dataset.GetRasterBand(1)
output_band.WriteArray(data_array)
output_band.SetNoDataValue(no_data_value)
output_band.FlushCache()
# This is not strictly necessary - copy metadata to output dataset
output_dataset_metadata = nbar_dataset.GetMetadata()
if output_dataset_metadata:
output_dataset.SetMetadata(output_dataset_metadata)
log_multiline(logger.debug, output_dataset_metadata, 'output_dataset_metadata', '\t')
output_dataset.FlushCache()
logger.info('Finished writing %s', output_tile_path)
output_dataset_dict[output_stack_path] = output_dataset_info
# NDVI dataset processed - return info
return output_dataset_dict
if __name__ == '__main__':
def date2datetime(input_date, time_offset=time.min):
if not input_date:
return None
return datetime.combine(input_date, time_offset)
# Stacker class takes care of command line parameters
ndvi_stacker = NDVIStacker()
if ndvi_stacker.debug:
console_handler.setLevel(logging.DEBUG)
# Check for required command line parameters
assert (ndvi_stacker.x_index and ndvi_stacker.y_index), 'You must specify Tile X/Y-index (-x/-y or --x_index/--y_index)'
assert ndvi_stacker.output_dir, 'Output directory not specified (-o or --output)'
# Create derived datasets
stack_info_dict = ndvi_stacker.stack_derived(x_index=ndvi_stacker.x_index,
y_index=ndvi_stacker.y_index,
stack_output_dir=ndvi_stacker.output_dir,
start_datetime=date2datetime(ndvi_stacker.start_date, time.min),
end_datetime=date2datetime(ndvi_stacker.end_date, time.max),
satellite=ndvi_stacker.satellite,
sensor=ndvi_stacker.sensor)
log_multiline(logger.debug, stack_info_dict, 'stack_info_dict', '\t')
logger.info('Finished creating %d temporal stack files in %s.', len(stack_info_dict), ndvi_stacker.output_dir)
# Create statistics on derived datasets
logger.info('Beginning creation of statistics')
for vrt_stack_path in stack_info_dict:
# Find a place to write the stats
stats_dataset_path = vrt_stack_path.replace('.vrt', '_stats_envi')
# Calculate and write the stats
temporal_stats_numexpr_module.main(vrt_stack_path, stats_dataset_path,
noData=stack_info_dict[vrt_stack_path][0]['nodata_value'],
provenance=True)
logger.info('Finished creating stats file %s', stats_dataset_path)
| bsd-3-clause |
ehazlett/sensu-py | examples/mail.py | 1 | 2439 | #!/usr/bin/env python
import sys
import smtplib
from optparse import OptionParser
from email.mime.text import MIMEText
import json
from datetime import datetime
try:
from sensu import Handler
except ImportError:
print('You must have the sensu Python module i.e.: pip install sensu')
sys.exit(1)
class MailHandler(Handler):
def handle(self):
subj = self.settings.get('mail', {}).get('subject', 'Sensu Alert')
to = self.settings.get('mail', {}).get('to', 'root@localhost')
from_addr = self.settings.get('mail', {}).get('from', 'sensu@localhost')
host = self.settings.get('mail', {}).get('host', 'localhost')
port = self.settings.get('mail', {}).get('port', 25)
user = self.settings.get('mail', {}).get('user', None)
password = self.settings.get('mail', {}).get('password', None)
self.send(subj, to, from_addr, host, port, user, password)
def send(self, subj=None, to_addr=None, from_addr=None, host='localhost',
port=25, user=None, password=None):
# attempt to parse sensu message
try:
data = self.event
client_host = data.get('client', {}).get('name')
check_name = data.get('check', {}).get('name')
check_action = data.get('action')
timestamp = data.get('check', {}).get('issued')
check_date = datetime.fromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
parts = (
'Date: {0}'.format(check_date),
'Host: {0}'.format(client_host),
'Address: {0}'.format(data.get('client', {}).get('address')),
'Action: {0}'.format(check_action),
'Name: {0}'.format(check_name),
'Command: {0}'.format(data.get('check', {}).get('command')),
'Output: {0}'.format(data.get('check', {}).get('output')),
)
text = '\n'.join(parts)
subj = '{0} [{1}: {2} ({3})]'.format(subj, client_host, check_name, check_action)
except Exception, e:
text = str(e)
msg = MIMEText(text)
msg['Subject'] = subj
msg['To'] = to_addr
msg['From'] = from_addr
s = smtplib.SMTP(host, int(port))
if user:
s.login(user, password)
s.sendmail(from_addr, [to_addr], msg.as_string())
s.quit()
if __name__=='__main__':
m = MailHandler()
sys.exit(0)
| mit |
stingaci/heat-tutorial | partVI/lib/elements/heat-config-salt/install.d/hook-salt.py | 4 | 3759 | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import os
import sys
import salt.cli.caller
import salt.config
from salt import exceptions
import yaml
WORKING_DIR = os.environ.get('HEAT_SALT_WORKING',
'/var/lib/heat-config/heat-config-salt')
SALT_MINION_CONFIG = os.environ.get('SALT_MINION_CONFIG',
'/etc/salt/minion')
def prepare_dir(path):
if not os.path.isdir(path):
os.makedirs(path, 0o700)
def main(argv=sys.argv):
log = logging.getLogger('heat-config')
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(
logging.Formatter(
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
log.addHandler(handler)
log.setLevel('DEBUG')
prepare_dir(WORKING_DIR)
os.chdir(WORKING_DIR)
c = json.load(sys.stdin)
opts = salt.config.minion_config(SALT_MINION_CONFIG)
opts['file_roots'] = {'base': [WORKING_DIR]}
opts['file_client'] = 'local'
opts['local'] = 'local'
opts['fun'] = 'state.sls'
opts['arg'] = [c['id']]
for input in c['inputs']:
key = input['name']
opts[key] = input.get('value', '')
state_file = '%s.sls' % c['id']
config = c.get('config', '')
if isinstance(config, dict):
yaml_config = yaml.safe_dump(config, default_flow_style=False)
else:
yaml_config = config
fn = os.path.join(WORKING_DIR, state_file)
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY, 0o700), 'w') as f:
f.write(yaml_config.encode('utf-8'))
caller = salt.cli.caller.Caller.factory(opts)
log.debug('Applying Salt state %s' % state_file)
stdout, stderr = None, None
ret = {}
try:
ret = caller.call()
except exceptions.SaltInvocationError as err:
log.error(
'Salt invocation error while applying Salt sate %s' % state_file)
stderr = err
if ret:
log.info('Results: %s' % ret)
output = yaml.safe_dump(ret['return'])
# returncode of 0 means there were successful changes
if ret['retcode'] == 0:
log.info('Completed applying salt state %s' % state_file)
stdout = output
else:
# Salt doesn't always return sane return codes so we have to check
# individual results
runfailed = False
for state, data in ret['return'].items():
if not data['result']:
runfailed = True
break
if runfailed:
log.error('Error applying Salt state %s. [%s]\n'
% (state_file, ret['retcode']))
stderr = output
else:
ret['retcode'] = 0
stdout = output
response = {}
for output in c.get('outputs', []):
output_name = output['name']
response[output_name] = ret.get(output_name)
response.update({
'deploy_stdout': stdout,
'deploy_stderr': stderr,
'deploy_status_code': ret['retcode'],
})
json.dump(response, sys.stdout)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
veger/ansible | lib/ansible/modules/network/f5/bigip_wait.py | 21 | 11508 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_wait
short_description: Wait for a BIG-IP condition before continuing
description:
- You can wait for BIG-IP to be "ready". By "ready", we mean that BIG-IP is ready
to accept configuration.
- This module can take into account situations where the device is in the middle
of rebooting due to a configuration change.
version_added: 2.5
options:
timeout:
description:
- Maximum number of seconds to wait for.
- When used without other conditions it is equivalent of just sleeping.
- The default timeout is deliberately set to 2 hours because no individual
REST API.
default: 7200
delay:
description:
- Number of seconds to wait before starting to poll.
default: 0
sleep:
default: 1
description:
- Number of seconds to sleep between checks, before 2.3 this was hardcoded to 1 second.
msg:
description:
- This overrides the normal error message from a failure to meet the required conditions.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Wait for BIG-IP to be ready to take configuration
bigip_wait:
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Wait a maximum of 300 seconds for BIG-IP to be ready to take configuration
bigip_wait:
timeout: 300
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Wait for BIG-IP to be ready, don't start checking for 10 seconds
bigip_wait:
delay: 10
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
# only common fields returned
'''
import datetime
import signal
import time
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
def hard_timeout(module, want, start):
elapsed = datetime.datetime.utcnow() - start
module.fail_json(
msg=want.msg or "Timeout when waiting for BIG-IP", elapsed=elapsed.seconds
)
class Parameters(AnsibleF5Parameters):
returnables = [
'elapsed'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
@property
def delay(self):
if self._values['delay'] is None:
return None
return int(self._values['delay'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def sleep(self):
if self._values['sleep'] is None:
return None
return int(self._values['sleep'])
class Changes(Parameters):
pass
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Parameters()
def exec_module(self):
result = dict()
changed = self.execute()
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _get_client_connection(self):
return F5RestClient(**self.module.params)
def execute(self):
signal.signal(
signal.SIGALRM,
lambda sig, frame: hard_timeout(self.module, self.want, start)
)
# setup handler before scheduling signal, to eliminate a race
signal.alarm(int(self.want.timeout))
start = datetime.datetime.utcnow()
if self.want.delay:
time.sleep(float(self.want.delay))
end = start + datetime.timedelta(seconds=int(self.want.timeout))
while datetime.datetime.utcnow() < end:
time.sleep(int(self.want.sleep))
try:
# The first test verifies that the REST API is available; this is done
# by repeatedly trying to login to it.
self.client = self._get_client_connection()
if not self.client:
continue
if self._device_is_rebooting():
# Wait for the reboot to happen and then start from the beginning
# of the waiting.
continue
if self._is_mprov_running_on_device():
self._wait_for_module_provisioning()
break
except Exception as ex:
if 'Failed to validate the SSL' in str(ex):
raise F5ModuleError(str(ex))
# The types of exception's we're handling here are "REST API is not
# ready" exceptions.
#
# For example,
#
# Typically caused by device starting up:
#
# icontrol.exceptions.iControlUnexpectedHTTPError: 404 Unexpected Error:
# Not Found for uri: https://localhost:10443/mgmt/tm/sys/
# icontrol.exceptions.iControlUnexpectedHTTPError: 503 Unexpected Error:
# Service Temporarily Unavailable for uri: https://localhost:10443/mgmt/tm/sys/
#
#
# Typically caused by a device being down
#
# requests.exceptions.SSLError: HTTPSConnectionPool(host='localhost', port=10443):
# Max retries exceeded with url: /mgmt/tm/sys/ (Caused by SSLError(
# SSLError("bad handshake: SysCallError(-1, 'Unexpected EOF')",),))
#
#
# Typically caused by device still booting
#
# raise SSLError(e, request=request)\nrequests.exceptions.SSLError:
# HTTPSConnectionPool(host='localhost', port=10443): Max retries
# exceeded with url: /mgmt/shared/authn/login (Caused by
# SSLError(SSLError(\"bad handshake: SysCallError(-1, 'Unexpected EOF')\",),)),
continue
else:
elapsed = datetime.datetime.utcnow() - start
self.module.fail_json(
msg=self.want.msg or "Timeout when waiting for BIG-IP", elapsed=elapsed.seconds
)
elapsed = datetime.datetime.utcnow() - start
self.changes.update({'elapsed': elapsed.seconds})
return False
def _device_is_rebooting(self):
params = {
"command": "run",
"utilCmdArgs": '-c "runlevel"'
}
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if 'commandResult' in response and '6' in response['commandResult']:
return True
return False
def _wait_for_module_provisioning(self):
# To prevent things from running forever, the hack is to check
# for mprov's status twice. If mprov is finished, then in most
# cases (not ASM) the provisioning is probably ready.
nops = 0
# Sleep a little to let provisioning settle and begin properly
time.sleep(5)
while nops < 4:
try:
if not self._is_mprov_running_on_device():
nops += 1
else:
nops = 0
except Exception as ex:
# This can be caused by restjavad restarting.
pass
time.sleep(10)
def _is_mprov_running_on_device(self):
params = {
"command": "run",
"utilCmdArgs": '-c "ps aux | grep \'[m]prov\'"'
}
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if 'commandResult' in response:
return True
return False
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
timeout=dict(default=7200, type='int'),
delay=dict(default=0, type='int'),
sleep=dict(default=1, type='int'),
msg=dict()
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
exit_json(module, results, client)
except F5ModuleError as ex:
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
creative-workflow/pi-setup | services/webiopi/src/python/webiopi/__init__.py | 5 | 1082 | # Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import sleep
from webiopi.utils.version import BOARD_REVISION, VERSION
from webiopi.utils.logger import setInfo, setDebug, info, debug, warn, error, exception
from webiopi.utils.thread import runLoop
from webiopi.server import Server
from webiopi.devices.instance import deviceInstance
from webiopi.decorators.rest import macro
from webiopi.devices import bus as _bus
try:
import _webiopi.GPIO as GPIO
except:
pass
setInfo()
_bus.checkAllBus()
| mit |
candrews/portage | pym/portage/tests/dep/test_get_required_use_flags.py | 18 | 1431 | # Copyright 2010-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
from portage.dep import get_required_use_flags
from portage.exception import InvalidDependString
class TestCheckRequiredUse(TestCase):
def testCheckRequiredUse(self):
test_cases = (
("a b c", ["a", "b", "c"]),
("|| ( a b c )", ["a", "b", "c"]),
("^^ ( a b c )", ["a", "b", "c"]),
("?? ( a b c )", ["a", "b", "c"]),
("?? ( )", []),
("|| ( a b ^^ ( d e f ) )", ["a", "b", "d", "e", "f"]),
("^^ ( a b || ( d e f ) )", ["a", "b", "d", "e", "f"]),
("( ^^ ( a ( b ) ( || ( ( d e ) ( f ) ) ) ) )", ["a", "b", "d", "e", "f"]),
("a? ( ^^ ( b c ) )", ["a", "b", "c"]),
("a? ( ^^ ( !b !d? ( c ) ) )", ["a", "b", "c", "d"]),
)
test_cases_xfail = (
("^^ ( || ( a b ) ^^ ( b c )"),
("^^( || ( a b ) ^^ ( b c ) )"),
("^^ || ( a b ) ^^ ( b c )"),
("^^ ( ( || ) ( a b ) ^^ ( b c ) )"),
("^^ ( || ( a b ) ) ^^ ( b c ) )"),
)
for required_use, expected in test_cases:
result = get_required_use_flags(required_use)
expected = set(expected)
self.assertEqual(result, expected, \
"REQUIRED_USE: '%s', expected: '%s', got: '%s'" % (required_use, expected, result))
for required_use in test_cases_xfail:
self.assertRaisesMsg("REQUIRED_USE: '%s'" % (required_use,), \
InvalidDependString, get_required_use_flags, required_use)
| gpl-2.0 |
jtakayama/makahiki-draft | makahiki/apps/widgets/bonus_points/admin.py | 7 | 3615 | """Admin definition for Bonus Points widget."""
from django.shortcuts import render_to_response
from django.template import RequestContext
from apps.admin.admin import challenge_designer_site, challenge_manager_site, developer_site
'''
Created on Aug 5, 2012
@author: Cam Moore
'''
from django.contrib import admin
from django import forms
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from apps.widgets.bonus_points.models import BonusPoint
from apps.managers.challenge_mgr import challenge_mgr
class BonusPointAdminForm(forms.ModelForm):
"""Bonus Points Admin Form."""
point_value = forms.IntegerField(initial=5,
label="Number of bonus points to award.",
help_text="The number of bonus points the player earns.")
class Meta:
"""Meta"""
model = BonusPoint
def save(self, *args, **kwargs):
"""Generates the number of bonus point codes."""
_ = args
_ = kwargs
num = self.cleaned_data.get("num_codes")
p = self.cleaned_data.get("point_value")
# Generate
if num > 0:
BonusPoint.generate_bonus_points(p, num)
class BonusPointAdmin(admin.ModelAdmin):
"""admin for Bonus Points."""
actions = ["delete_selected", "deactivate_selected", "view_selected",
"print_selected"]
list_display = ["pk", "code", "point_value", "create_date", "is_active",
"printed_or_distributed", "user"]
ordering = ["-create_date", "is_active"]
list_filter = ["point_value", "is_active", "printed_or_distributed"]
date_hierarchy = "create_date"
form = BonusPointAdminForm
def delete_selected(self, request, queryset):
"""override the delete selected method."""
_ = request
for obj in queryset:
obj.delete()
delete_selected.short_description = "Delete the selected Bonus Points."
def deactivate_selected(self, request, queryset):
"""Changes the is_active flag to false for the selected Bonus Points."""
_ = request
queryset.update(is_active=False)
deactivate_selected.short_description = "Deactivate the selected Bonus Points."
def print_selected(self, request, queryset):
"""Changes the printed_or_distributed flag to True for the selected
Bonus Points."""
_ = request
queryset.update(printed_or_distributed=True)
print_selected.short_description = "Set the printed or distributed flag."
def view_selected(self, request, queryset):
"""Views the Bonus Points Codes for printing."""
_ = request
_ = queryset
return render_to_response("view_bonus_points.html", {
"codes": queryset,
"per_page": 10,
}, context_instance=RequestContext(request))
view_selected.short_description = "View the selected Bonus Points."
def view_codes(self, request, queryset):
"""Views the Bonus Points Codes for printing."""
_ = request
_ = queryset
response = HttpResponseRedirect(reverse("bonus_view_codes", args=()))
return response
admin.site.register(BonusPoint, BonusPointAdmin)
challenge_designer_site.register(BonusPoint, BonusPointAdmin)
challenge_manager_site.register(BonusPoint, BonusPointAdmin)
developer_site.register(BonusPoint, BonusPointAdmin)
challenge_mgr.register_designer_game_info_model("Smart Grid Game", BonusPoint)
challenge_mgr.register_admin_game_info_model("Smart Grid Game", BonusPoint)
challenge_mgr.register_developer_game_info_model("Smart Grid Game", BonusPoint)
| mit |
matthijsvk/multimodalSR | code/Experiments/neon-master/neon/backends/cuda_batchnorm.py | 3 | 14722 | from pycuda.tools import context_dependent_memoize
# from neon.backends.cuda_templates import (_ew_template,
# _stage_template,
# _fin_template,
# _init_rand_func,
# _init_rand_round_func,
# _finish_rand_func,
# _common_urand_gen,
# _common_frand,
# _common_round,
# _common_fp16_to_fp32,
# _ew_types,
# _ew_strings,
# _is_finite,
# _float_ops,
# _reduction_ops)
from neon.backends.cuda_templates import (_common_round,
_common_kepler,
_ew_types,
_common_fp16_to_fp32,
_ew_strings)
from neon.backends.kernels.cuda.binary import shift_element
from neon.backends.util.source_module import SourceModule
@context_dependent_memoize
def _get_bn_fprop_kernel(dtype, threads, compute_capability):
if threads > 32:
shr_code = "__shared__ float sPartials[THREADS];"
red_code = r"""
sPartials[tid] = xvar;
__syncthreads();
#pragma unroll
for (int a = THREADS >> 1; a > 32; a >>= 1)
{
if ( tid < a )
sPartials[tid] += sPartials[tid + a];
__syncthreads();
}
if ( tid < 32 )
{
xvar = sPartials[tid] + sPartials[tid + 32];
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
xvar += __shfl_xor(xvar, i);
sPartials[tid] = xvar * rcpN;
}
__syncthreads();
xvar = sPartials[0];
"""
else:
shr_code = ""
red_code = r"""
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
xvar += __shfl_xor(xvar, i);
xvar *= rcpN;
"""
code = r"""
#define THREADS %(threads)s
%(common)s
%(binary)s
__global__ void batchnorm_fprop (
%(type)s* y_out, float* xvar_out, float* gmean_out, float* gvar_out,
const %(type)s* x_in, const float* xsum_in, const float* gmean_in,
const float* gvar_in, const float* gamma_in, const float* beta_in,
const float eps, const float rho, const float accumbeta, const int N,
const int relu, bool binary)
{
%(share)s
const int tid = threadIdx.x;
const int bid = blockIdx.x;
int offset = bid * N;
const %(type)s* x_in0 = x_in + offset + tid;
const float rcpN = 1.0f/(float)N;
float xmean = __ldg(xsum_in + bid) * rcpN;
float xvar = 0.0f;
for (int i = tid; i < N; i += THREADS)
{
float x = %(cvt)s(__ldg(x_in0));
x_in0 += THREADS;
x -= xmean;
if (binary) {
xvar += shift_element(x, x, true);
} else {
xvar += x * x;
}
}
%(red)s
float gamma = __ldg(gamma_in + bid);
float beta = __ldg(beta_in + bid);
if ( tid == 0 )
{
float gmean = __ldg(gmean_in + bid);
float gvar = __ldg(gvar_in + bid);
*(xvar_out + bid) = xvar;
*(gmean_out + bid) = gmean * rho + (1.0f - rho) * xmean;
*(gvar_out + bid) = gvar * rho + (1.0f - rho) * xvar;
}
float xvar_rcp_sqrt = 1.0f / sqrtf(xvar + eps);
int start = N - (THREADS*4 - tid);
offset += start;
x_in += offset;
y_out += offset;
for (int i = start; i >= -THREADS*3; i -= THREADS*4)
{
float x0 = i >= -THREADS*0 ? %(cvt)s(__ldg(x_in + THREADS*0)) : 0.0f;
float x1 = i >= -THREADS*1 ? %(cvt)s(__ldg(x_in + THREADS*1)) : 0.0f;
float x2 = i >= -THREADS*2 ? %(cvt)s(__ldg(x_in + THREADS*2)) : 0.0f;
float x3 = %(cvt)s(__ldg(x_in + THREADS*3));
x_in -= THREADS*4;
float xhat0 = 0.0f;
float xhat1 = 0.0f;
float xhat2 = 0.0f;
float xhat3 = 0.0f;
float y0 = 0.0f;
float y1 = 0.0f;
float y2 = 0.0f;
float y3 = 0.0f;
if (binary) {
xhat0 = shift_element(x0 - xmean, xvar_rcp_sqrt, true);
xhat1 = shift_element(x1 - xmean, xvar_rcp_sqrt, true);
xhat2 = shift_element(x2 - xmean, xvar_rcp_sqrt, true);
xhat3 = shift_element(x3 - xmean, xvar_rcp_sqrt, true);
y0 = shift_element(xhat0, gamma, true) + beta;
y1 = shift_element(xhat1, gamma, true) + beta;
y2 = shift_element(xhat2, gamma, true) + beta;
y3 = shift_element(xhat3, gamma, true) + beta;
} else {
xhat0 = (x0 - xmean) * xvar_rcp_sqrt;
xhat1 = (x1 - xmean) * xvar_rcp_sqrt;
xhat2 = (x2 - xmean) * xvar_rcp_sqrt;
xhat3 = (x3 - xmean) * xvar_rcp_sqrt;
y0 = xhat0 * gamma + beta;
y1 = xhat1 * gamma + beta;
y2 = xhat2 * gamma + beta;
y3 = xhat3 * gamma + beta;
}
if (relu)
{
y0 = fmaxf(y0, 0.0f);
y1 = fmaxf(y1, 0.0f);
y2 = fmaxf(y2, 0.0f);
y3 = fmaxf(y3, 0.0f);
}
%(y0_out)s
%(y1_out)s
%(y2_out)s
%(y3_out)s
if (accumbeta == 0.0)
{
if (i >= -THREADS*0) *(y_out + THREADS*0) = y0_val;
if (i >= -THREADS*1) *(y_out + THREADS*1) = y1_val;
if (i >= -THREADS*2) *(y_out + THREADS*2) = y2_val;
*(y_out + THREADS*3) = y3_val;
}
else
{
if (i >= -THREADS*0) *(y_out + THREADS*0) = y_out[THREADS*0] * accumbeta + y0_val;
if (i >= -THREADS*1) *(y_out + THREADS*1) = y_out[THREADS*1] * accumbeta + y1_val;
if (i >= -THREADS*2) *(y_out + THREADS*2) = y_out[THREADS*2] * accumbeta + y2_val;
*(y_out + THREADS*3) = y_out[THREADS*3] * accumbeta + y3_val;
}
y_out -= THREADS*4;
}
}
"""
out_code = _ew_strings["round"]["nearest"].get(dtype, "float {0} = {1};")
common_code = _common_round["nearest"].get(dtype, "")
if dtype == "f2":
common_code += _common_fp16_to_fp32
if (compute_capability[0] == 3 and compute_capability[1] < 5) or compute_capability[0] < 3:
common_code += _common_kepler
code = code % {
"common" : common_code,
"binary" : shift_element(),
"share" : shr_code,
"red" : red_code,
"threads" : threads,
"type" : _ew_types[dtype]["type"],
"cvt" : _ew_types[dtype]["cvt"],
"y0_out" : out_code.format("y0_val", "y0"),
"y1_out" : out_code.format("y1_val", "y1"),
"y2_out" : out_code.format("y2_val", "y2"),
"y3_out" : out_code.format("y3_val", "y3"),
}
module = SourceModule(code, options=["--use_fast_math"])
kernel = module.get_function("batchnorm_fprop")
kernel.prepare("PPPPPPPPPPfffIII")
kernel.name = "batchnorm_fprop"
return kernel
@context_dependent_memoize
def _get_bn_bprop_kernel(dtype, threads, compute_capability):
if threads > 32:
shr_code = "__shared__ float sPartials[THREADS * 2];"
red_code = r"""
sPartials[tid + THREADS*0] = grad_gamma;
sPartials[tid + THREADS*1] = grad_beta;
__syncthreads();
#pragma unroll
for (int a = THREADS >> 1; a > 32; a >>= 1)
{
if ( tid < a )
{
sPartials[tid + THREADS*0] += sPartials[tid + a + THREADS*0];
sPartials[tid + THREADS*1] += sPartials[tid + a + THREADS*1];
}
__syncthreads();
}
if ( tid < 32 )
{
grad_gamma = sPartials[tid + THREADS*0] + sPartials[tid + 32 + THREADS*0];
grad_beta = sPartials[tid + THREADS*1] + sPartials[tid + 32 + THREADS*1];
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
{
grad_gamma += __shfl_xor(grad_gamma, i);
grad_beta += __shfl_xor(grad_beta, i);
}
sPartials[tid + THREADS*0] = grad_gamma;
sPartials[tid + THREADS*1] = grad_beta;
}
__syncthreads();
grad_gamma = sPartials[THREADS*0];
grad_beta = sPartials[THREADS*1];
"""
else:
shr_code = ""
red_code = r"""
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
{
grad_gamma += __shfl_xor(grad_gamma, i);
grad_beta += __shfl_xor(grad_beta, i);
}
"""
code = r"""
#define THREADS %(threads)s
%(common)s
%(binary)s
__global__ void batchnorm_bprop (
%(type)s* delta_out, float* grad_gamma_out, float* grad_beta_out,
const %(type)s* delta_in, const %(type)s* x_in, const float* xsum_in,
const float* xvar_in, const float* gamma_in,
const float eps, const int N, bool binary)
{
%(share)s
const int tid = threadIdx.x;
const int bid = blockIdx.x;
const float rcpN = 1.0f/(float)N;
int offset = bid * N;
const %(type)s* x_in0 = x_in + offset + tid;
const %(type)s* d_in0 = delta_in + offset + tid;
float xmean = __ldg(xsum_in + bid) * rcpN;
float xvar = __ldg(xvar_in + bid);
float gamma = __ldg(gamma_in + bid);
float xvar_rcp_sqrt = 1.0f / sqrtf(xvar + eps);
float grad_gamma = 0.0f;
float grad_beta = 0.0f;
for (int i = tid; i < N; i += THREADS)
{
float x = %(cvt)s(__ldg(x_in0));
x_in0 += THREADS;
float d = %(cvt)s(__ldg(d_in0));
d_in0 += THREADS;
float xhat = 0.0f;
if (binary) {
xhat = shift_element(x - xmean, xvar_rcp_sqrt, true);
} else {
xhat = (x - xmean) * xvar_rcp_sqrt;
}
grad_gamma += xhat * d;
grad_beta += d;
}
%(red)s
if ( tid == 0 )
{
*(grad_gamma_out + bid) = grad_gamma;
*(grad_beta_out + bid) = grad_beta;
}
int start = N - (THREADS*4 - tid);
offset += start;
const %(type)s* x_in1 = x_in + offset;
const %(type)s* d_in1 = delta_in + offset;
delta_out += offset;
for (int i = start; i >= -THREADS*3; i -= THREADS*4)
{
float x0 = i >= -THREADS*0 ? %(cvt)s(__ldg(x_in1 + THREADS*0)) : 0.0f;
float x1 = i >= -THREADS*1 ? %(cvt)s(__ldg(x_in1 + THREADS*1)) : 0.0f;
float x2 = i >= -THREADS*2 ? %(cvt)s(__ldg(x_in1 + THREADS*2)) : 0.0f;
float x3 = %(cvt)s(__ldg(x_in1 + THREADS*3));
float d0 = i >= -THREADS*0 ? %(cvt)s(__ldg(d_in1 + THREADS*0)) : 0.0f;
float d1 = i >= -THREADS*1 ? %(cvt)s(__ldg(d_in1 + THREADS*1)) : 0.0f;
float d2 = i >= -THREADS*2 ? %(cvt)s(__ldg(d_in1 + THREADS*2)) : 0.0f;
float d3 = %(cvt)s(__ldg(d_in1 + THREADS*3));
x_in1 -= THREADS*4;
d_in1 -= THREADS*4;
float xhat0 = 0.0f;
float xhat1 = 0.0f;
float xhat2 = 0.0f;
float xhat3 = 0.0f;
float xtmp0 = 0.0f;
float xtmp1 = 0.0f;
float xtmp2 = 0.0f;
float xtmp3 = 0.0f;
float delta0 = 0.0f;
float delta1 = 0.0f;
float delta2 = 0.0f;
float delta3 = 0.0f;
if (binary) {
xhat0 = shift_element(x0 - xmean, xvar_rcp_sqrt, true);
xhat1 = shift_element(x1 - xmean, xvar_rcp_sqrt, true);
xhat2 = shift_element(x2 - xmean, xvar_rcp_sqrt, true);
xhat3 = shift_element(x3 - xmean, xvar_rcp_sqrt, true);
xtmp0 = (shift_element(xhat0, grad_gamma, true) + grad_beta) * rcpN;
xtmp1 = (shift_element(xhat1, grad_gamma, true) + grad_beta) * rcpN;
xtmp2 = (shift_element(xhat2, grad_gamma, true) + grad_beta) * rcpN;
xtmp3 = (shift_element(xhat3, grad_gamma, true) + grad_beta) * rcpN;
delta0 = shift_element(shift_element(d0 - xtmp0, gamma, true), xvar_rcp_sqrt, true);
delta1 = shift_element(shift_element(d1 - xtmp1, gamma, true), xvar_rcp_sqrt, true);
delta2 = shift_element(shift_element(d2 - xtmp2, gamma, true), xvar_rcp_sqrt, true);
delta3 = shift_element(shift_element(d3 - xtmp3, gamma, true), xvar_rcp_sqrt, true);
} else {
xhat0 = (x0 - xmean) * xvar_rcp_sqrt;
xhat1 = (x1 - xmean) * xvar_rcp_sqrt;
xhat2 = (x2 - xmean) * xvar_rcp_sqrt;
xhat3 = (x3 - xmean) * xvar_rcp_sqrt;
xtmp0 = (xhat0 * grad_gamma + grad_beta) * rcpN;
xtmp1 = (xhat1 * grad_gamma + grad_beta) * rcpN;
xtmp2 = (xhat2 * grad_gamma + grad_beta) * rcpN;
xtmp3 = (xhat3 * grad_gamma + grad_beta) * rcpN;
delta0 = gamma * (d0 - xtmp0) * xvar_rcp_sqrt;
delta1 = gamma * (d1 - xtmp1) * xvar_rcp_sqrt;
delta2 = gamma * (d2 - xtmp2) * xvar_rcp_sqrt;
delta3 = gamma * (d3 - xtmp3) * xvar_rcp_sqrt;
}
%(delta0_out)s
%(delta1_out)s
%(delta2_out)s
%(delta3_out)s
if (i >= -THREADS*0) *(delta_out + THREADS*0) = delta0_val;
if (i >= -THREADS*1) *(delta_out + THREADS*1) = delta1_val;
if (i >= -THREADS*2) *(delta_out + THREADS*2) = delta2_val;
*(delta_out + THREADS*3) = delta3_val;
delta_out -= THREADS*4;
}
}
"""
out_code = _ew_strings["round"]["nearest"].get(dtype, "float {0} = {1};")
common_code = _common_round["nearest"].get(dtype, "")
if dtype == "f2":
common_code += _common_fp16_to_fp32
if (compute_capability[0] == 3 and compute_capability[1] < 5) or compute_capability[0] < 3:
common_code += _common_kepler
code = code % {
"common" : common_code,
"binary" : shift_element(),
"share" : shr_code,
"red" : red_code,
"threads" : threads,
"type" : _ew_types[dtype]["type"],
"cvt" : _ew_types[dtype]["cvt"],
"delta0_out" : out_code.format("delta0_val", "delta0"),
"delta1_out" : out_code.format("delta1_val", "delta1"),
"delta2_out" : out_code.format("delta2_val", "delta2"),
"delta3_out" : out_code.format("delta3_val", "delta3"),
}
module = SourceModule(code, options=["--use_fast_math"])
kernel = module.get_function("batchnorm_bprop")
kernel.prepare("PPPPPPPPfII")
kernel.name = "batchnorm_bprop"
return kernel
| mit |
Dklotz-Circle/security_monkey | security_monkey/views/user_settings.py | 7 | 6398 | # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from security_monkey.views import AuthenticatedService
from security_monkey.views import __check_auth__
from security_monkey.views import USER_SETTINGS_FIELDS
from security_monkey.datastore import Account
from security_monkey.datastore import User
from security_monkey import db
from security_monkey import api
from flask.ext.restful import marshal, reqparse
from flask.ext.login import current_user
class UserSettings(AuthenticatedService):
def __init__(self):
super(UserSettings, self).__init__()
def get(self):
"""
.. http:get:: /api/1/settings
Get the settings for the given user.
**Example Request**:
.. sourcecode:: http
GET /api/1/settings HTTP/1.1
Host: example.com
Accept: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"auth": {
"authenticated": true,
"user": "[email protected]"
},
"settings": [
{
"accounts": [
1,
2,
3,
6,
17,
21,
22
],
"change_reports": "ISSUES",
"daily_audit_email": true
}
]
}
:statuscode 200: no error
:statuscode 401: Authentication Error. Please Authenticate.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
return_dict = {"auth": self.auth_dict}
if not current_user.is_authenticated():
return_val = return_dict, 401
return return_val
return_dict["settings"] = []
user = User.query.filter(User.id == current_user.get_id()).first()
if user:
sub_marshaled = marshal(user.__dict__, USER_SETTINGS_FIELDS)
account_ids = []
for account in user.accounts:
account_ids.append(account.id)
sub_marshaled = dict(sub_marshaled.items() +
{"accounts": account_ids}.items()
)
return_dict["settings"].append(sub_marshaled)
return return_dict, 200
def post(self):
"""
.. http:post:: /api/1/settings
Change the settings for the current user.
**Example Request**:
.. sourcecode:: http
POST /api/1/settings HTTP/1.1
Host: example.com
Accept: application/json
{
"accounts": [
1,
2,
3,
6,
17,
21,
22
],
"daily_audit_email": true,
"change_report_setting": "ALL"
}
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"auth": {
"authenticated": true,
"user": "[email protected]"
},
"settings": {
"accounts": [
1,
2,
3,
6,
17,
21,
22
],
"daily_audit_email": true,
"change_report_setting": "ALL"
}
}
:statuscode 200: no error
:statuscode 401: Authentication Error. Please Login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
self.reqparse.add_argument('accounts', required=True, type=list, help='Must provide accounts', location='json')
self.reqparse.add_argument('change_report_setting', required=True, type=str, help='Must provide change_report_setting', location='json')
self.reqparse.add_argument('daily_audit_email', required=True, type=bool, help='Must provide daily_audit_email', location='json')
args = self.reqparse.parse_args()
current_user.daily_audit_email = args['daily_audit_email']
current_user.change_reports = args['change_report_setting']
account_list = []
for account_id in args['accounts']:
account = Account.query.filter(Account.id == account_id).first()
if account:
account_list.append(account)
#current_user.accounts.append(account)
current_user.accounts = account_list
db.session.add(current_user)
db.session.commit()
retdict = {'auth': self.auth_dict}
account_ids = []
for account in current_user.accounts:
account_ids.append(account.id)
retdict['settings'] = {
"accounts": account_ids,
"change_report_setting": current_user.change_reports,
"daily_audit_email": current_user.daily_audit_email
}
return retdict, 200
| apache-2.0 |
NoahFlowa/glowing-spoon | venv/lib/python2.7/site-packages/psycopg2/tests/test_transaction.py | 7 | 9235 | #!/usr/bin/env python
# test_transaction - unit test on transaction behaviour
#
# Copyright (C) 2007-2011 Federico Di Gregorio <[email protected]>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import threading
from testutils import unittest, ConnectingTestCase, skip_before_postgres, slow
import psycopg2
from psycopg2.extensions import (
ISOLATION_LEVEL_SERIALIZABLE, STATUS_BEGIN, STATUS_READY)
class TransactionTests(ConnectingTestCase):
def setUp(self):
ConnectingTestCase.setUp(self)
self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
curs = self.conn.cursor()
curs.execute('''
CREATE TEMPORARY TABLE table1 (
id int PRIMARY KEY
)''')
# The constraint is set to deferrable for the commit_failed test
curs.execute('''
CREATE TEMPORARY TABLE table2 (
id int PRIMARY KEY,
table1_id int,
CONSTRAINT table2__table1_id__fk
FOREIGN KEY (table1_id) REFERENCES table1(id) DEFERRABLE)''')
curs.execute('INSERT INTO table1 VALUES (1)')
curs.execute('INSERT INTO table2 VALUES (1, 1)')
self.conn.commit()
def test_rollback(self):
# Test that rollback undoes changes
curs = self.conn.cursor()
curs.execute('INSERT INTO table2 VALUES (2, 1)')
# Rollback takes us from BEGIN state to READY state
self.assertEqual(self.conn.status, STATUS_BEGIN)
self.conn.rollback()
self.assertEqual(self.conn.status, STATUS_READY)
curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2')
self.assertEqual(curs.fetchall(), [])
def test_commit(self):
# Test that commit stores changes
curs = self.conn.cursor()
curs.execute('INSERT INTO table2 VALUES (2, 1)')
# Rollback takes us from BEGIN state to READY state
self.assertEqual(self.conn.status, STATUS_BEGIN)
self.conn.commit()
self.assertEqual(self.conn.status, STATUS_READY)
# Now rollback and show that the new record is still there:
self.conn.rollback()
curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2')
self.assertEqual(curs.fetchall(), [(2, 1)])
def test_failed_commit(self):
# Test that we can recover from a failed commit.
# We use a deferred constraint to cause a failure on commit.
curs = self.conn.cursor()
curs.execute('SET CONSTRAINTS table2__table1_id__fk DEFERRED')
curs.execute('INSERT INTO table2 VALUES (2, 42)')
# The commit should fail, and move the cursor back to READY state
self.assertEqual(self.conn.status, STATUS_BEGIN)
self.assertRaises(psycopg2.IntegrityError, self.conn.commit)
self.assertEqual(self.conn.status, STATUS_READY)
# The connection should be ready to use for the next transaction:
curs.execute('SELECT 1')
self.assertEqual(curs.fetchone()[0], 1)
class DeadlockSerializationTests(ConnectingTestCase):
"""Test deadlock and serialization failure errors."""
def connect(self):
conn = ConnectingTestCase.connect(self)
conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
return conn
def setUp(self):
ConnectingTestCase.setUp(self)
curs = self.conn.cursor()
# Drop table if it already exists
try:
curs.execute("DROP TABLE table1")
self.conn.commit()
except psycopg2.DatabaseError:
self.conn.rollback()
try:
curs.execute("DROP TABLE table2")
self.conn.commit()
except psycopg2.DatabaseError:
self.conn.rollback()
# Create sample data
curs.execute("""
CREATE TABLE table1 (
id int PRIMARY KEY,
name text)
""")
curs.execute("INSERT INTO table1 VALUES (1, 'hello')")
curs.execute("CREATE TABLE table2 (id int PRIMARY KEY)")
self.conn.commit()
def tearDown(self):
curs = self.conn.cursor()
curs.execute("DROP TABLE table1")
curs.execute("DROP TABLE table2")
self.conn.commit()
ConnectingTestCase.tearDown(self)
@slow
def test_deadlock(self):
self.thread1_error = self.thread2_error = None
step1 = threading.Event()
step2 = threading.Event()
def task1():
try:
conn = self.connect()
curs = conn.cursor()
curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE")
step1.set()
step2.wait()
curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE")
except psycopg2.DatabaseError, exc:
self.thread1_error = exc
step1.set()
conn.close()
def task2():
try:
conn = self.connect()
curs = conn.cursor()
step1.wait()
curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE")
step2.set()
curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE")
except psycopg2.DatabaseError, exc:
self.thread2_error = exc
step2.set()
conn.close()
# Run the threads in parallel. The "step1" and "step2" events
# ensure that the two transactions overlap.
thread1 = threading.Thread(target=task1)
thread2 = threading.Thread(target=task2)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
# Exactly one of the threads should have failed with
# TransactionRollbackError:
self.assertFalse(self.thread1_error and self.thread2_error)
error = self.thread1_error or self.thread2_error
self.assertTrue(isinstance(
error, psycopg2.extensions.TransactionRollbackError))
@slow
def test_serialisation_failure(self):
self.thread1_error = self.thread2_error = None
step1 = threading.Event()
step2 = threading.Event()
def task1():
try:
conn = self.connect()
curs = conn.cursor()
curs.execute("SELECT name FROM table1 WHERE id = 1")
curs.fetchall()
step1.set()
step2.wait()
curs.execute("UPDATE table1 SET name='task1' WHERE id = 1")
conn.commit()
except psycopg2.DatabaseError, exc:
self.thread1_error = exc
step1.set()
conn.close()
def task2():
try:
conn = self.connect()
curs = conn.cursor()
step1.wait()
curs.execute("UPDATE table1 SET name='task2' WHERE id = 1")
conn.commit()
except psycopg2.DatabaseError, exc:
self.thread2_error = exc
step2.set()
conn.close()
# Run the threads in parallel. The "step1" and "step2" events
# ensure that the two transactions overlap.
thread1 = threading.Thread(target=task1)
thread2 = threading.Thread(target=task2)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
# Exactly one of the threads should have failed with
# TransactionRollbackError:
self.assertFalse(self.thread1_error and self.thread2_error)
error = self.thread1_error or self.thread2_error
self.assertTrue(isinstance(
error, psycopg2.extensions.TransactionRollbackError))
class QueryCancellationTests(ConnectingTestCase):
"""Tests for query cancellation."""
def setUp(self):
ConnectingTestCase.setUp(self)
self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
@skip_before_postgres(8, 2)
def test_statement_timeout(self):
curs = self.conn.cursor()
# Set a low statement timeout, then sleep for a longer period.
curs.execute('SET statement_timeout TO 10')
self.assertRaises(psycopg2.extensions.QueryCanceledError,
curs.execute, 'SELECT pg_sleep(50)')
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
alhashash/odoo | addons/mrp/__init__.py | 8 | 1098 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import mrp
import stock
import product
import wizard
import report
import company
import procurement
import res_config
| agpl-3.0 |
digimarc/django | tests/m2m_through_regress/tests.py | 25 | 9146 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import management
from django.test import TestCase
from django.utils.six import StringIO
from .models import (
Car, CarDriver, Driver, Group, Membership, Person, UserMembership,
)
class M2MThroughTestCase(TestCase):
def test_everything(self):
bob = Person.objects.create(name="Bob")
jim = Person.objects.create(name="Jim")
rock = Group.objects.create(name="Rock")
roll = Group.objects.create(name="Roll")
frank = User.objects.create_user("frank", "[email protected]", "password")
jane = User.objects.create_user("jane", "[email protected]", "password")
Membership.objects.create(person=bob, group=rock)
Membership.objects.create(person=bob, group=roll)
Membership.objects.create(person=jim, group=rock)
self.assertQuerysetEqual(
bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
self.assertQuerysetEqual(
roll.members.all(), [
"<Person: Bob>",
]
)
self.assertRaises(AttributeError, setattr, bob, "group_set", [])
self.assertRaises(AttributeError, setattr, roll, "members", [])
self.assertRaises(AttributeError, rock.members.create, name="Anne")
self.assertRaises(AttributeError, bob.group_set.create, name="Funk")
UserMembership.objects.create(user=frank, group=rock)
UserMembership.objects.create(user=frank, group=roll)
UserMembership.objects.create(user=jane, group=rock)
self.assertQuerysetEqual(
frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
self.assertQuerysetEqual(
roll.user_members.all(), [
"<User: frank>",
]
)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
p = Person.objects.create(name="Bob")
g = Group.objects.create(name="Roll")
m = Membership.objects.create(person=p, group=g)
pks = {"p_pk": p.pk, "g_pk": g.pk, "m_pk": m.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(out.getvalue().strip(), """[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": 100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": "Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]""" % pks)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml",
indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
def test_join_trimming(self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
bob = Person.objects.create(name="Bob")
jim = Person.objects.create(name="Jim")
rock = Group.objects.create(name="Rock")
roll = Group.objects.create(name="Roll")
Membership.objects.create(person=bob, group=rock)
Membership.objects.create(person=jim, group=rock, price=50)
Membership.objects.create(person=bob, group=roll, price=50)
self.assertQuerysetEqual(
rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
self.assertQuerysetEqual(
bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_add_null(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
nullcar.drivers._add_items('car', 'driver', self.unused_driver)
def test_add_related_null(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
self.car.drivers._add_items('car', 'driver', nulldriver)
def test_add_reverse(self):
car2 = Car.objects.create(make="Honda")
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._add_items('driver', 'car', car2)
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>", "<Car: Honda>"],
ordered=False
)
def test_add_null_reverse(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
self.driver.car_set._add_items('driver', 'car', nullcar)
def test_add_null_reverse_related(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
nulldriver.car_set._add_items('driver', 'car', self.car)
def test_remove(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
self.car.drivers._remove_items('car', 'driver', self.driver)
self.assertQuerysetEqual(
self.car.drivers.all(), [])
def test_remove_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._remove_items('driver', 'car', self.car)
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
class ThroughLoadDataTestCase(TestCase):
fixtures = ["m2m_through"]
def test_sequence_creation(self):
"Check that sequences on an m2m_through are created for the through model, not a phantom auto-generated m2m table. Refs #11107"
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(out.getvalue().strip(), """[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user": 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, "model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]""")
| bsd-3-clause |
spyridonf/ardupilot | Tools/scripts/frame_sizes.py | 351 | 1117 | #!/usr/bin/env python
import re, sys, operator, os
code_line = re.compile("^\s*\d+:/")
frame_line = re.compile("^\s*\d+\s+/\* frame size = (\d+) \*/")
class frame(object):
def __init__(self, code, frame_size):
self.code = code
self.frame_size = int(frame_size)
frames = []
def process_lst(filename):
'''process one lst file'''
last_code = ''
h = open(filename, mode='r')
for line in h:
if code_line.match(line):
last_code = line.strip()
elif frame_line.match(line):
frames.append(frame(last_code, frame_line.match(line).group(1)))
h.close()
if len(sys.argv) > 1:
dname = sys.argv[1]
else:
dname = '.'
for root, dirs, files in os.walk(dname):
for f in files:
if f.endswith(".lst"):
process_lst(os.path.join(root, f))
sorted_frames = sorted(frames,
key=operator.attrgetter('frame_size'),
reverse=True)
print("FrameSize Code")
for frame in sorted_frames:
if frame.frame_size > 0:
print("%9u %s" % (frame.frame_size, frame.code))
| gpl-3.0 |
cstipkovic/spidermonkey-research | testing/web-platform/tests/tools/pywebsocket/src/example/abort_handshake_wsh.py | 465 | 1781 | # Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mod_pywebsocket import handshake
def web_socket_do_extra_handshake(request):
raise handshake.AbortedByUserException(
"Aborted in web_socket_do_extra_handshake")
def web_socket_transfer_data(request):
pass
# vi:sts=4 sw=4 et
| mpl-2.0 |
christophlsa/odoo | addons/website_mail_group/controllers/main.py | 306 | 7883 | # -*- coding: utf-8 -*-
import datetime
from dateutil import relativedelta
from openerp import tools, SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.website.models.website import slug
from openerp.addons.web.http import request
class MailGroup(http.Controller):
_thread_per_page = 20
_replies_per_page = 10
def _get_archives(self, group_id):
MailMessage = request.registry['mail.message']
groups = MailMessage.read_group(
request.cr, request.uid, [('model', '=', 'mail.group'), ('res_id', '=', group_id)], ['subject', 'date'],
groupby="date", orderby="date desc", context=request.context)
for group in groups:
begin_date = datetime.datetime.strptime(group['__domain'][0][2], tools.DEFAULT_SERVER_DATETIME_FORMAT).date()
end_date = datetime.datetime.strptime(group['__domain'][1][2], tools.DEFAULT_SERVER_DATETIME_FORMAT).date()
group['date_begin'] = '%s' % datetime.date.strftime(begin_date, tools.DEFAULT_SERVER_DATE_FORMAT)
group['date_end'] = '%s' % datetime.date.strftime(end_date, tools.DEFAULT_SERVER_DATE_FORMAT)
return groups
@http.route("/groups", type='http', auth="public", website=True)
def view(self, **post):
cr, uid, context = request.cr, request.uid, request.context
group_obj = request.registry.get('mail.group')
mail_message_obj = request.registry.get('mail.message')
group_ids = group_obj.search(cr, uid, [('alias_id', '!=', False), ('alias_id.alias_name', '!=', False)], context=context)
groups = group_obj.browse(cr, uid, group_ids, context)
# compute statistics
month_date = datetime.datetime.today() - relativedelta.relativedelta(months=1)
group_data = dict()
for group in groups:
group_data[group.id] = {
'monthly_message_nbr': mail_message_obj.search(
cr, SUPERUSER_ID,
[('model', '=', 'mail.group'), ('res_id', '=', group.id), ('date', '>=', month_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT))],
count=True, context=context)}
values = {'groups': groups, 'group_data': group_data}
return request.website.render('website_mail_group.mail_groups', values)
@http.route(["/groups/subscription/"], type='json', auth="user")
def subscription(self, group_id=0, action=False, **post):
""" TDE FIXME: seems dead code """
cr, uid, context = request.cr, request.uid, request.context
group_obj = request.registry.get('mail.group')
if action:
group_obj.message_subscribe_users(cr, uid, [group_id], context=context)
else:
group_obj.message_unsubscribe_users(cr, uid, [group_id], context=context)
return []
@http.route([
"/groups/<model('mail.group'):group>",
"/groups/<model('mail.group'):group>/page/<int:page>"
], type='http', auth="public", website=True)
def thread_headers(self, group, page=1, mode='thread', date_begin=None, date_end=None, **post):
cr, uid, context = request.cr, request.uid, request.context
thread_obj = request.registry.get('mail.message')
domain = [('model', '=', 'mail.group'), ('res_id', '=', group.id)]
if mode == 'thread':
domain += [('parent_id', '=', False)]
if date_begin and date_end:
domain += [('date', '>=', date_begin), ('date', '<=', date_end)]
thread_count = thread_obj.search_count(cr, uid, domain, context=context)
pager = request.website.pager(
url='/groups/%s' % slug(group),
total=thread_count,
page=page,
step=self._thread_per_page,
url_args={'mode': mode, 'date_begin': date_begin or '', 'date_end': date_end or ''},
)
thread_ids = thread_obj.search(cr, uid, domain, limit=self._thread_per_page, offset=pager['offset'])
messages = thread_obj.browse(cr, uid, thread_ids, context)
values = {
'messages': messages,
'group': group,
'pager': pager,
'mode': mode,
'archives': self._get_archives(group.id),
'date_begin': date_begin,
'date_end': date_end,
'replies_per_page': self._replies_per_page,
}
return request.website.render('website_mail_group.group_messages', values)
@http.route([
'''/groups/<model('mail.group'):group>/<model('mail.message', "[('model','=','mail.group'), ('res_id','=',group[0])]"):message>''',
], type='http', auth="public", website=True)
def thread_discussion(self, group, message, mode='thread', date_begin=None, date_end=None, **post):
cr, uid, context = request.cr, request.uid, request.context
Message = request.registry['mail.message']
if mode == 'thread':
base_domain = [('model', '=', 'mail.group'), ('res_id', '=', group.id), ('parent_id', '=', message.parent_id and message.parent_id.id or False)]
else:
base_domain = [('model', '=', 'mail.group'), ('res_id', '=', group.id)]
next_message = None
next_message_ids = Message.search(cr, uid, base_domain + [('date', '<', message.date)], order="date DESC", limit=1, context=context)
if next_message_ids:
next_message = Message.browse(cr, uid, next_message_ids[0], context=context)
prev_message = None
prev_message_ids = Message.search(cr, uid, base_domain + [('date', '>', message.date)], order="date ASC", limit=1, context=context)
if prev_message_ids:
prev_message = Message.browse(cr, uid, prev_message_ids[0], context=context)
values = {
'message': message,
'group': group,
'mode': mode,
'archives': self._get_archives(group.id),
'date_begin': date_begin,
'date_end': date_end,
'replies_per_page': self._replies_per_page,
'next_message': next_message,
'prev_message': prev_message,
}
return request.website.render('website_mail_group.group_message', values)
@http.route(
'''/groups/<model('mail.group'):group>/<model('mail.message', "[('model','=','mail.group'), ('res_id','=',group[0])]"):message>/get_replies''',
type='json', auth="public", methods=['POST'], website=True)
def render_messages(self, group, message, **post):
last_displayed_id = post.get('last_displayed_id')
if not last_displayed_id:
return False
Message = request.registry['mail.message']
replies_domain = [('id', '<', int(last_displayed_id)), ('parent_id', '=', message.id)]
msg_ids = Message.search(request.cr, request.uid, replies_domain, limit=self._replies_per_page, context=request.context)
msg_count = Message.search(request.cr, request.uid, replies_domain, count=True, context=request.context)
messages = Message.browse(request.cr, request.uid, msg_ids, context=request.context)
values = {
'group': group,
'thread_header': message,
'messages': messages,
'msg_more_count': msg_count - self._replies_per_page,
'replies_per_page': self._replies_per_page,
}
return request.registry['ir.ui.view'].render(request.cr, request.uid, 'website_mail_group.messages_short', values, engine='ir.qweb', context=request.context)
@http.route("/groups/<model('mail.group'):group>/get_alias_info", type='json', auth='public', website=True)
def get_alias_info(self, group, **post):
return {
'alias_name': group.alias_id and group.alias_id.alias_name and group.alias_id.alias_domain and '%s@%s' % (group.alias_id.alias_name, group.alias_id.alias_domain) or False
}
| agpl-3.0 |
smartdevice475/sdl_core | tools/InterfaceGenerator/test/generator/parsers/test_SDLRPCV2.py | 14 | 17475 | """SDLRPCV2 XML parser unit test."""
import os
import unittest
import generator.Model
import generator.parsers.SDLRPCV2
class TestSDLRPCV2Parser(unittest.TestCase):
"""Test for SDLRPCV2 xml parser."""
class _Issue:
def __init__(self, creator, value):
self.creator = creator
self.value = value
def __eq__(self, other):
return self.creator == other.creator and self.value == other.value
def setUp(self):
"""Test initialization."""
self.valid_xml_name = os.path.dirname(os.path.realpath(__file__)) + \
"/valid_SDLRPCV2.xml"
self.parser = generator.parsers.SDLRPCV2.Parser()
def test_valid_xml(self):
"""Test parsing of valid xml."""
interface = self.parser.parse(self.valid_xml_name)
self.assertEqual(2, len(interface.params))
self.assertDictEqual({"attribute1": "value1", "attribute2": "value2"},
interface.params)
# Enumerations
self.assertEqual(3, len(interface.enums))
# Enumeration "FunctionID"
self.assertIn("FunctionID", interface.enums)
enum = interface.enums["FunctionID"]
self.verify_base_item(item=enum,
name="FunctionID",
description=["Description string 1",
"Description string 2"],
todos=['Function id todo'])
self.assertIsNone(enum.internal_scope)
self.assertEqual(2, len(enum.elements))
self.assertIn("Function1_id", enum.elements)
element = enum.elements["Function1_id"]
self.verify_base_item(
item=element,
name="Function1_id",
design_description=["Function1 element design description"])
self.assertIsNone(element.internal_name)
self.assertEqual(10, element.value)
self.assertIn("Function2_id", enum.elements)
element = enum.elements["Function2_id"]
self.verify_base_item(
item=element,
name="Function2_id")
self.assertEqual("Function2_internal", element.internal_name)
self.assertIsNone(element.value)
# Enumeration "messageType"
self.assertIn("messageType", interface.enums)
enum = interface.enums["messageType"]
self.verify_base_item(
item=enum,
name="messageType",
design_description=["messageType design description",
"messageType design description 2"],
issues=[TestSDLRPCV2Parser._Issue(
creator="messageType issue creator",
value="Issue text")])
self.assertIsNone(enum.internal_scope)
self.assertEqual(3, len(enum.elements))
self.assertIn("request", enum.elements)
element = enum.elements["request"]
self.verify_base_item(item=element,
name="request",
todos=["request todo 1", "request todo 2"],
issues=[TestSDLRPCV2Parser._Issue(
creator="issue creator",
value="request issue")])
self.assertIsNone(element.internal_name)
self.assertEqual(0, element.value)
self.assertIn("response", enum.elements)
element = enum.elements["response"]
self.verify_base_item(item=element, name="response")
self.assertIsNone(element.internal_name)
self.assertEqual(1, element.value)
self.assertIn("notification", enum.elements)
element = enum.elements["notification"]
self.verify_base_item(item=element, name="notification")
self.assertIsNone(element.internal_name)
self.assertEqual(2, element.value)
# Enumeration "enum1"
self.assertIn("enum1", interface.enums)
enum = interface.enums["enum1"]
self.verify_base_item(item=enum, name="enum1",
platform="enum1 platform")
self.assertEqual("scope", enum.internal_scope)
self.assertEqual(3, len(enum.elements))
self.assertIn("element1", enum.elements)
element = enum.elements["element1"]
self.verify_base_item(item=element, name="element1")
self.assertIsNone(element.internal_name)
self.assertEqual(10, element.value)
self.assertIn("element2", enum.elements)
element = enum.elements["element2"]
self.verify_base_item(item=element, name="element2")
self.assertEqual("element2_internal", element.internal_name)
self.assertEqual(11, element.value)
self.assertIn("element3", enum.elements)
element = enum.elements["element3"]
self.verify_base_item(
item=element,
name="element3",
design_description=["Element design description"],
platform="element3 platform")
self.assertIsNone(element.internal_name)
self.assertIsNone(element.value)
# Structures
self.assertEqual(2, len(interface.structs))
# Structure "struct1"
self.assertIn("struct1", interface.structs)
struct = interface.structs["struct1"]
self.verify_base_item(
item=struct,
name="struct1",
description=["Struct description"],
issues=[TestSDLRPCV2Parser._Issue(creator="creator1",
value="Issue1"),
TestSDLRPCV2Parser._Issue(creator="creator2",
value="Issue2")])
self.assertEqual(4, len(struct.members))
self.assertIn("member1", struct.members)
member = struct.members["member1"]
self.verify_base_item(
item=member,
name="member1",
description=["Param1 description"])
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Integer)
self.assertIsNone(member.param_type.min_value)
self.assertIsNone(member.param_type.max_value)
self.assertIn("member2", struct.members)
member = struct.members["member2"]
self.verify_base_item(item=member, name="member2",
platform="member2 platform")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Boolean)
self.assertIn("member3", struct.members)
member = struct.members["member3"]
self.verify_base_item(item=member, name="member3")
self.assertEqual(False, member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Double)
self.assertIsNone(member.param_type.min_value)
self.assertAlmostEqual(20.5, member.param_type.max_value)
self.assertIn("member4", struct.members)
member = struct.members["member4"]
self.verify_base_item(item=member, name="member4")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Array)
self.assertIsNone(member.param_type.min_size)
self.assertIsNone(member.param_type.max_size)
self.assertIsInstance(member.param_type.element_type,
generator.Model.Integer)
self.assertEqual(11, member.param_type.element_type.min_value)
self.assertEqual(100, member.param_type.element_type.max_value)
# Structure "struct2"
self.assertIn("struct2", interface.structs)
struct = interface.structs["struct2"]
self.verify_base_item(item=struct,
name="struct2",
description=["Description of struct2"],
platform="struct2 platform")
self.assertEqual(4, len(struct.members))
self.assertIn("m1", struct.members)
member = struct.members["m1"]
self.verify_base_item(item=member, name="m1")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.String)
self.assertIsNone(member.param_type.max_length)
self.assertIn("m2", struct.members)
member = struct.members["m2"]
self.verify_base_item(item=member, name="m2")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Array)
self.assertEqual(1, member.param_type.min_size)
self.assertEqual(50, member.param_type.max_size)
self.assertIsInstance(member.param_type.element_type,
generator.Model.String)
self.assertEqual(100, member.param_type.element_type.max_length)
self.assertIn("m3", struct.members)
member = struct.members["m3"]
self.verify_base_item(item=member, name="m3")
self.assertTrue(member.is_mandatory)
self.assertIs(member.param_type, interface.enums["enum1"])
self.assertIn("m4", struct.members)
member = struct.members["m4"]
self.verify_base_item(item=member, name="m4")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Array)
self.assertIsNone(member.param_type.min_size)
self.assertEqual(10, member.param_type.max_size)
self.assertIs(member.param_type.element_type,
interface.structs["struct1"])
# Functions
self.assertEqual(3, len(interface.functions))
# Function request "Function1"
self.assertIn(
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["request"]),
interface.functions)
function = interface.functions[
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["request"])]
self.verify_base_item(
item=function,
name="Function1",
description=["Description of request Function1"],
todos=["Function1 request todo"])
self.assertIs(function.function_id,
interface.enums["FunctionID"].elements["Function1_id"])
self.assertIs(function.message_type,
interface.enums["messageType"].elements["request"])
self.assertEqual(3, len(function.params))
self.assertIn("param1", function.params)
param = function.params["param1"]
self.verify_base_item(
item=param,
name="param1",
issues=[TestSDLRPCV2Parser._Issue(creator="", value="")])
self.assertEqual(False, param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.String)
self.assertIsNone(param.param_type.max_length)
self.assertEqual("String default value", param.default_value)
self.assertIn("param2", function.params)
param = function.params["param2"]
self.verify_base_item(
item=param,
name="param2",
description=["Param2 description", ""],
todos=["Param2 todo"],
platform="param2 platform")
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.Integer)
self.assertIsNone(param.param_type.min_value)
self.assertIsNone(param.param_type.max_value)
self.assertIsNone(param.default_value)
self.assertIn("param3", function.params)
param = function.params["param3"]
self.verify_base_item(item=param, name="param3")
self.assertEqual(False, param.is_mandatory)
self.assertIs(param.param_type, interface.structs["struct1"])
self.assertIsNone(param.default_value)
# Function response "Function1"
self.assertIn(
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["response"]),
interface.functions)
function = interface.functions[
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["response"])]
self.verify_base_item(
item=function,
name="Function1",
issues=[TestSDLRPCV2Parser._Issue(creator="c1", value=""),
TestSDLRPCV2Parser._Issue(creator="c2", value="")],
platform="")
self.assertIs(function.function_id,
interface.enums["FunctionID"].elements["Function1_id"])
self.assertIs(function.message_type,
interface.enums["messageType"].elements["response"])
self.assertEqual(3, len(function.params))
self.assertIn("p1", function.params)
param = function.params["p1"]
self.verify_base_item(item=param, name="p1")
self.assertTrue(param.is_mandatory)
self.assertIs(param.param_type, interface.enums["enum1"])
self.assertIsNone(param.default_value)
self.assertIn("p2", function.params)
param = function.params["p2"]
self.verify_base_item(item=param, name="p2")
self.assertTrue(param.is_mandatory)
self.assertIs(param.param_type, interface.enums["enum1"])
self.assertIs(param.default_value,
interface.enums["enum1"].elements["element2"])
self.assertIn("p3", function.params)
param = function.params["p3"]
self.verify_base_item(item=param, name="p3", design_description=[""])
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.Boolean)
self.assertEqual(False, param.default_value)
# Function notification "Function2"
self.assertIn(
(interface.enums["FunctionID"].elements["Function2_id"],
interface.enums["messageType"].elements["notification"]),
interface.functions)
function = interface.functions[
(interface.enums["FunctionID"].elements["Function2_id"],
interface.enums["messageType"].elements["notification"])]
self.verify_base_item(item=function,
name="Function2",
description=["Function2 description"],
platform="function2 platform")
self.assertIs(function.function_id,
interface.enums["FunctionID"].elements["Function2_id"])
self.assertIs(function.message_type,
interface.enums["messageType"].elements["notification"])
self.assertEqual(3, len(function.params))
self.assertIn("n1", function.params)
param = function.params["n1"]
self.verify_base_item(item=param, name="n1", todos=["n1 todo"])
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.EnumSubset)
self.assertIs(param.param_type.enum, interface.enums["enum1"])
self.assertDictEqual(
{"element2": interface.enums["enum1"].elements["element2"],
"element3": interface.enums["enum1"].elements["element3"]},
param.param_type.allowed_elements)
self.assertIsNone(param.default_value)
self.assertIn("n2", function.params)
param = function.params["n2"]
self.verify_base_item(item=param, name="n2", todos=["n2 todo"])
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.Array)
self.assertEqual(1, param.param_type.min_size)
self.assertEqual(100, param.param_type.max_size)
self.assertIsInstance(param.param_type.element_type,
generator.Model.EnumSubset)
self.assertIs(param.param_type.element_type.enum,
interface.enums["enum1"])
self.assertDictEqual(
{"element1": interface.enums["enum1"].elements["element1"],
"element3": interface.enums["enum1"].elements["element3"]},
param.param_type.element_type.allowed_elements)
self.assertIsNone(param.default_value)
self.assertIn("n3", function.params)
param = function.params["n3"]
self.verify_base_item(item=param, name="n3")
self.assertEqual(False, param.is_mandatory)
self.assertIs(param.param_type, interface.structs["struct2"])
self.assertIsNone(param.default_value)
def verify_base_item(self, item, name, description=None,
design_description=None, issues=None, todos=None,
platform=None):
"""Verify base interface item variables."""
self.assertEqual(name, item.name)
self.assertSequenceEqual(self.get_list(description), item.description)
self.assertSequenceEqual(self.get_list(design_description),
item.design_description)
self.assertSequenceEqual(self.get_list(issues), item.issues)
self.assertSequenceEqual(self.get_list(todos), item.todos)
self.assertEqual(platform, item.platform)
@staticmethod
def get_list(list=None):
"""Return provided list or empty list if None is provided."""
return list if list is not None else []
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
BenTheElder/test-infra | gubernator/filters.py | 14 | 8242 | # Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import datetime
import hashlib
import os
import re
import time
import urllib
import urlparse
import jinja2
GITHUB_VIEW_TEMPLATE = 'https://github.com/%s/blob/%s/%s#L%s'
GITHUB_COMMIT_TEMPLATE = 'https://github.com/%s/commit/%s'
LINKIFY_RE = re.compile(
r'(^\s*/\S*/)(kubernetes/(\S+):(\d+)(?: \+0x[0-9a-f]+)?)$',
flags=re.MULTILINE)
def do_timestamp(unix_time, css_class='timestamp', tmpl='%F %H:%M'):
"""Convert an int Unix timestamp into a human-readable datetime."""
t = datetime.datetime.utcfromtimestamp(unix_time)
return jinja2.Markup('<span class="%s" data-epoch="%s">%s</span>' %
(css_class, unix_time, t.strftime(tmpl)))
def do_dt_to_epoch(dt):
return time.mktime(dt.timetuple())
def do_shorttimestamp(unix_time):
t = datetime.datetime.utcfromtimestamp(unix_time)
return jinja2.Markup('<span class="shorttimestamp" data-epoch="%s">%s</span>' %
(unix_time, t.strftime('%m-%d %H:%M')))
def do_duration(seconds):
"""Convert a numeric duration in seconds into a human-readable string."""
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if hours:
return '%dh%dm' % (hours, minutes)
if minutes:
return '%dm%ds' % (minutes, seconds)
else:
if seconds < 10:
return '%.2fs' % seconds
return '%ds' % seconds
def do_slugify(inp):
"""Convert an arbitrary string into a url-safe slug."""
inp = re.sub(r'[^\w\s-]+', '', inp)
return re.sub(r'\s+', '-', inp).lower()
def do_linkify_stacktrace(inp, commit, repo):
"""Add links to a source code viewer for every mentioned source line."""
inp = unicode(jinja2.escape(inp))
if not commit:
return jinja2.Markup(inp) # this was already escaped, mark it safe!
def rep(m):
prefix, full, path, line = m.groups()
return '%s<a href="%s">%s</a>' % (
prefix,
GITHUB_VIEW_TEMPLATE % (repo, commit, path, line),
full)
return jinja2.Markup(LINKIFY_RE.sub(rep, inp))
def do_github_commit_link(commit, repo):
commit_url = jinja2.escape(GITHUB_COMMIT_TEMPLATE % (repo, commit))
return jinja2.Markup('<a href="%s">%s</a>' % (commit_url, commit[:8]))
def do_maybe_linkify(inp):
try:
if urlparse.urlparse(inp).scheme in ('http', 'https'):
inp = unicode(jinja2.escape(inp))
return jinja2.Markup('<a href="%s">%s</a>' % (inp, inp))
except (AttributeError, TypeError):
pass
return inp
def do_testcmd(name):
if name.startswith('k8s.io/'):
try:
pkg, name = name.split(' ')
except ValueError: # don't block the page render
logging.error('Unexpected Go unit test name %r', name)
return name
return 'go test -v %s -run %s$' % (pkg, name)
elif name.startswith('istio.io/'):
return ''
elif name.startswith('//'):
return 'bazel test %s' % name
elif name.startswith('verify '):
return 'make verify WHAT=%s' % name.split(' ')[1]
else:
name = re.sub(r'^\[k8s\.io\] ', '', name)
name_escaped = re.escape(name).replace('\\ ', '\\s')
test_args = ('--ginkgo.focus=%s$' % name_escaped)
return "go run hack/e2e.go -v --test --test_args='%s'" % test_args
def do_parse_pod_name(text):
"""Find the pod name from the failure and return the pod name."""
p = re.search(r' pod (\S+)', text)
if p:
return re.sub(r'[\'"\\:]', '', p.group(1))
else:
return ""
def do_label_attr(labels, name):
"""
>> do_label_attr(['needs-rebase', 'size/XS'], 'size')
'XS'
"""
name += '/'
for label in labels:
if label.startswith(name):
return label[len(name):]
return ''
def do_classify_size(payload):
"""
Determine the size class for a PR, based on either its labels or
on the magnitude of its changes.
"""
size = do_label_attr(payload['labels'], 'size')
if not size and 'additions' in payload and 'deletions' in payload:
lines = payload['additions'] + payload['deletions']
# based on mungegithub/mungers/size.go
for limit, label in [
(10, 'XS'),
(30, 'S'),
(100, 'M'),
(500, 'L'),
(1000, 'XL')
]:
if lines < limit:
return label
return 'XXL'
return size
def has_lgtm_without_missing_approval(payload, user):
labels = payload.get('labels', []) or []
return 'lgtm' in labels and not (
user in payload.get('approvers', [])
and 'approved' not in labels)
def do_render_status(payload, user):
states = set()
text = 'Pending'
if has_lgtm_without_missing_approval(payload, user):
text = 'LGTM'
elif user in payload.get('attn', {}):
text = payload['attn'][user].title()
if '#' in text: # strip start/end attn timestamps
text = text[:text.index('#')]
for ctx, (state, _url, desc) in payload.get('status', {}).items():
if ctx == 'Submit Queue' and state == 'pending':
if 'does not have lgtm' in desc.lower():
# Don't show overall status as pending when Submit
# won't continue without LGTM.
continue
if ctx == 'tide' and state == 'pending':
# Ignore pending tide statuses for now.
continue
if ctx == 'code-review/reviewable' and state == 'pending':
# Reviewable isn't a CI, so we don't care if it's pending.
# Its dashboard might replace all of this eventually.
continue
states.add(state)
icon = ''
title = ''
if 'failure' in states:
icon = 'x'
state = 'failure'
title = 'failing tests'
elif 'pending' in states:
icon = 'primitive-dot'
state = 'pending'
title = 'pending tests'
elif 'success' in states:
icon = 'check'
state = 'success'
title = 'tests passing'
if icon:
icon = '<span class="text-%s octicon octicon-%s" title="%s"></span>' % (
state, icon, title)
return jinja2.Markup('%s%s' % (icon, text))
def do_get_latest(payload, user):
text = payload.get('attn', {}).get(user)
if not text:
return None
if '#' not in text:
return None
_text, _start, latest = text.rsplit('#', 2)
return float(latest)
def do_ltrim(s, needle):
if s.startswith(needle):
return s[len(needle):]
return s
def do_select(seq, pred):
return filter(pred, seq)
def do_tg_url(testgrid_query, test_name=''):
if test_name:
regex = '^Overall$|' + re.escape(test_name)
testgrid_query += '&include-filter-by-regex=%s' % urllib.quote(regex)
return 'https://testgrid.k8s.io/%s' % testgrid_query
def do_gcs_browse_url(gcs_path):
if not gcs_path.endswith('/'):
gcs_path += '/'
return 'https://gcsweb.k8s.io/gcs' + gcs_path
static_hashes = {}
def do_static(filename):
filename = 'static/%s' % filename
if filename not in static_hashes:
data = open(filename).read()
static_hashes[filename] = hashlib.sha1(data).hexdigest()[:10]
return '/%s?%s' % (filename, static_hashes[filename])
do_basename = os.path.basename
do_dirname = os.path.dirname
do_quote_plus = urllib.quote_plus
def register(filters):
"""Register do_* functions in this module in a dictionary."""
for name, func in globals().items():
if name.startswith('do_'):
filters[name[3:]] = func
| apache-2.0 |
jsoref/django | django/db/backends/oracle/base.py | 20 | 24998 | """
Oracle database backend for Django.
Requires cx_Oracle: http://cx-oracle.sourceforge.net/
"""
from __future__ import unicode_literals
import datetime
import decimal
import os
import platform
import sys
import warnings
from django.conf import settings
from django.db import utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.base.validation import BaseDatabaseValidation
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.duration import duration_string
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import cached_property
def _setup_environment(environ):
# Cygwin requires some special voodoo to set the environment variables
# properly so that Oracle will see them.
if platform.system().upper().startswith('CYGWIN'):
try:
import ctypes
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading ctypes: %s; "
"the Oracle backend requires ctypes to "
"operate correctly under Cygwin." % e)
kernel32 = ctypes.CDLL('kernel32')
for name, value in environ:
kernel32.SetEnvironmentVariableA(name, value)
else:
os.environ.update(environ)
_setup_environment([
# Oracle takes client-side character set encoding from the environment.
('NLS_LANG', '.UTF8'),
# This prevents unicode from getting mangled by getting encoded into the
# potentially non-unicode database character set.
('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'),
])
try:
import cx_Oracle as Database
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e)
# Some of these import cx_Oracle, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
from .utils import Oracle_datetime, convert_unicode # isort:skip
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
class _UninitializedOperatorsDescriptor(object):
def __get__(self, instance, cls=None):
# If connection.operators is looked up before a connection has been
# created, transparently initialize connection.operators to avert an
# AttributeError.
if instance is None:
raise AttributeError("operators not available as class attribute")
# Creating a cursor will initialize the operators.
instance.cursor().close()
return instance.__dict__['operators']
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'oracle'
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BinaryField': 'BLOB',
'BooleanField': 'NUMBER(1)',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'DurationField': 'INTERVAL DAY(9) TO SECOND(6)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'GenericIPAddressField': 'VARCHAR2(39)',
'NullBooleanField': 'NUMBER(1)',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11)',
'PositiveSmallIntegerField': 'NUMBER(11)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
'UUIDField': 'VARCHAR2(32)',
}
data_type_check_constraints = {
'BooleanField': '%(qn_column)s IN (0,1)',
'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)',
'PositiveIntegerField': '%(qn_column)s >= 0',
'PositiveSmallIntegerField': '%(qn_column)s >= 0',
}
operators = _UninitializedOperatorsDescriptor()
_standard_operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'icontains': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'endswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'istartswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'iendswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
}
_likec_operators = _standard_operators.copy()
_likec_operators.update({
'contains': "LIKEC %s ESCAPE '\\'",
'icontains': "LIKEC UPPER(%s) ESCAPE '\\'",
'startswith': "LIKEC %s ESCAPE '\\'",
'endswith': "LIKEC %s ESCAPE '\\'",
'istartswith': "LIKEC UPPER(%s) ESCAPE '\\'",
'iendswith': "LIKEC UPPER(%s) ESCAPE '\\'",
})
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
_pattern_ops = {
'contains': "'%%' || {} || '%%'",
'icontains': "'%%' || UPPER({}) || '%%'",
'startswith': "{} || '%%'",
'istartswith': "UPPER({}) || '%%'",
'endswith': "'%%' || {}",
'iendswith': "'%%' || UPPER({})",
}
_standard_pattern_ops = {k: "LIKE TRANSLATE( " + v + " USING NCHAR_CS)"
" ESCAPE TRANSLATE('\\' USING NCHAR_CS)"
for k, v in _pattern_ops.items()}
_likec_pattern_ops = {k: "LIKEC " + v + " ESCAPE '\\'"
for k, v in _pattern_ops.items()}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True)
self.features.can_return_id_from_insert = use_returning_into
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def _connect_string(self):
settings_dict = self.settings_dict
if not settings_dict['HOST'].strip():
settings_dict['HOST'] = 'localhost'
if settings_dict['PORT'].strip():
dsn = Database.makedsn(settings_dict['HOST'],
int(settings_dict['PORT']),
settings_dict['NAME'])
else:
dsn = settings_dict['NAME']
return "%s/%s@%s" % (settings_dict['USER'],
settings_dict['PASSWORD'], dsn)
def get_connection_params(self):
conn_params = self.settings_dict['OPTIONS'].copy()
if 'use_returning_into' in conn_params:
del conn_params['use_returning_into']
return conn_params
def get_new_connection(self, conn_params):
conn_string = convert_unicode(self._connect_string())
return Database.connect(conn_string, **conn_params)
def init_connection_state(self):
cursor = self.create_cursor()
# Set the territory first. The territory overrides NLS_DATE_FORMAT
# and NLS_TIMESTAMP_FORMAT to the territory default. When all of
# these are set in single statement it isn't clear what is supposed
# to happen.
cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'")
# Set Oracle date to ANSI date format. This only needs to execute
# once when we create a new connection. We also set the Territory
# to 'AMERICA' which forces Sunday to evaluate to a '1' in
# TO_CHAR().
cursor.execute(
"ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
" NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'"
+ (" TIME_ZONE = 'UTC'" if settings.USE_TZ else ''))
cursor.close()
if 'operators' not in self.__dict__:
# Ticket #14149: Check whether our LIKE implementation will
# work for this connection or we need to fall back on LIKEC.
# This check is performed only once per DatabaseWrapper
# instance per thread, since subsequent connections will use
# the same settings.
cursor = self.create_cursor()
try:
cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s"
% self._standard_operators['contains'],
['X'])
except DatabaseError:
self.operators = self._likec_operators
self.pattern_ops = self._likec_pattern_ops
else:
self.operators = self._standard_operators
self.pattern_ops = self._standard_pattern_ops
cursor.close()
try:
self.connection.stmtcachesize = 20
except AttributeError:
# Django docs specify cx_Oracle version 4.3.1 or higher, but
# stmtcachesize is available only in 4.3.2 and up.
pass
# Ensure all changes are preserved even when AUTOCOMMIT is False.
if not self.get_autocommit():
self.commit()
def create_cursor(self):
return FormatStylePlaceholderCursor(self.connection)
def _commit(self):
if self.connection is not None:
try:
return self.connection.commit()
except Database.DatabaseError as e:
# cx_Oracle 5.0.4 raises a cx_Oracle.DatabaseError exception
# with the following attributes and values:
# code = 2091
# message = 'ORA-02091: transaction rolled back
# 'ORA-02291: integrity constraint (TEST_DJANGOTEST.SYS
# _C00102056) violated - parent key not found'
# We convert that particular case to our IntegrityError exception
x = e.args[0]
if hasattr(x, 'code') and hasattr(x, 'message') \
and x.code == 2091 and 'ORA-02291' in x.message:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
# Oracle doesn't support releasing savepoints. But we fake them when query
# logging is enabled to keep query counts consistent with other backends.
def _savepoint_commit(self, sid):
if self.queries_logged:
self.queries_log.append({
'sql': '-- RELEASE SAVEPOINT %s (faked)' % self.ops.quote_name(sid),
'time': '0.000',
})
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit = autocommit
def check_constraints(self, table_names=None):
"""
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
are returned to deferred.
"""
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
def is_usable(self):
try:
self.connection.ping()
except Database.Error:
return False
else:
return True
@cached_property
def oracle_full_version(self):
with self.temporary_connection():
return self.connection.version
@cached_property
def oracle_version(self):
try:
return int(self.oracle_full_version.split('.')[0])
except ValueError:
return None
class OracleParam(object):
"""
Wrapper object for formatting parameters for Oracle. If the string
representation of the value is large enough (greater than 4000 characters)
the input size needs to be set as CLOB. Alternatively, if the parameter
has an `input_size` attribute, then the value of the `input_size` attribute
will be used instead. Otherwise, no input size will be set for the
parameter when executing the query.
"""
def __init__(self, param, cursor, strings_only=False):
# With raw SQL queries, datetimes can reach this function
# without being converted by DateTimeField.get_db_prep_value.
if settings.USE_TZ and (isinstance(param, datetime.datetime) and
not isinstance(param, Oracle_datetime)):
if timezone.is_aware(param):
warnings.warn(
"The Oracle database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
param = param.astimezone(timezone.utc).replace(tzinfo=None)
param = Oracle_datetime.from_datetime(param)
if isinstance(param, datetime.timedelta):
param = duration_string(param)
if ' ' not in param:
param = '0 ' + param
string_size = 0
# Oracle doesn't recognize True and False correctly in Python 3.
# The conversion done below works both in 2 and 3.
if param is True:
param = 1
elif param is False:
param = 0
if hasattr(param, 'bind_parameter'):
self.force_bytes = param.bind_parameter(cursor)
elif isinstance(param, Database.Binary):
self.force_bytes = param
else:
# To transmit to the database, we need Unicode if supported
# To get size right, we must consider bytes.
self.force_bytes = convert_unicode(param, cursor.charset,
strings_only)
if isinstance(self.force_bytes, six.string_types):
# We could optimize by only converting up to 4000 bytes here
string_size = len(force_bytes(param, cursor.charset, strings_only))
if hasattr(param, 'input_size'):
# If parameter has `input_size` attribute, use that.
self.input_size = param.input_size
elif string_size > 4000:
# Mark any string param greater than 4000 characters as a CLOB.
self.input_size = Database.CLOB
else:
self.input_size = None
class VariableWrapper(object):
"""
An adapter class for cursor variables that prevents the wrapped object
from being converted into a string when used to instantiate an OracleParam.
This can be used generally for any other object that should be passed into
Cursor.execute as-is.
"""
def __init__(self, var):
self.var = var
def bind_parameter(self, cursor):
return self.var
def __getattr__(self, key):
return getattr(self.var, key)
def __setattr__(self, key, value):
if key == 'var':
self.__dict__[key] = value
else:
setattr(self.var, key, value)
class FormatStylePlaceholderCursor(object):
"""
Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var"
style. This fixes it -- but note that if you want to use a literal "%s" in
a query, you'll need to use "%%s".
We also do automatic conversion between Unicode on the Python side and
UTF-8 -- for talking to Oracle -- in here.
"""
charset = 'utf-8'
def __init__(self, connection):
self.cursor = connection.cursor()
# Necessary to retrieve decimal values without rounding error.
self.cursor.numbersAsStrings = True
# Default arraysize of 1 is highly sub-optimal.
self.cursor.arraysize = 100
def _format_params(self, params):
try:
return {k: OracleParam(v, self, True) for k, v in params.items()}
except AttributeError:
return tuple(OracleParam(p, self, True) for p in params)
def _guess_input_sizes(self, params_list):
# Try dict handling; if that fails, treat as sequence
if hasattr(params_list[0], 'keys'):
sizes = {}
for params in params_list:
for k, value in params.items():
if value.input_size:
sizes[k] = value.input_size
self.setinputsizes(**sizes)
else:
# It's not a list of dicts; it's a list of sequences
sizes = [None] * len(params_list[0])
for params in params_list:
for i, value in enumerate(params):
if value.input_size:
sizes[i] = value.input_size
self.setinputsizes(*sizes)
def _param_generator(self, params):
# Try dict handling; if that fails, treat as sequence
if hasattr(params, 'items'):
return {k: v.force_bytes for k, v in params.items()}
else:
return [p.force_bytes for p in params]
def _fix_for_params(self, query, params):
# cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it
# it does want a trailing ';' but not a trailing '/'. However, these
# characters must be included in the original query in case the query
# is being passed to SQL*Plus.
if query.endswith(';') or query.endswith('/'):
query = query[:-1]
if params is None:
params = []
query = convert_unicode(query, self.charset)
elif hasattr(params, 'keys'):
# Handle params as dict
args = {k: ":%s" % k for k in params.keys()}
query = convert_unicode(query % args, self.charset)
else:
# Handle params as sequence
args = [(':arg%d' % i) for i in range(len(params))]
query = convert_unicode(query % tuple(args), self.charset)
return query, self._format_params(params)
def execute(self, query, params=None):
query, params = self._fix_for_params(query, params)
self._guess_input_sizes([params])
try:
return self.cursor.execute(query, self._param_generator(params))
except Database.DatabaseError as e:
# cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400.
if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError):
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, params=None):
if not params:
# No params given, nothing to do
return None
# uniform treatment for sequences and iterables
params_iter = iter(params)
query, firstparams = self._fix_for_params(query, next(params_iter))
# we build a list of formatted params; as we're going to traverse it
# more than once, we can't make it lazy by using a generator
formatted = [firstparams] + [self._format_params(p) for p in params_iter]
self._guess_input_sizes(formatted)
try:
return self.cursor.executemany(query,
[self._param_generator(p) for p in formatted])
except Database.DatabaseError as e:
# cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400.
if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError):
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def fetchone(self):
row = self.cursor.fetchone()
if row is None:
return row
return _rowfactory(row, self.cursor)
def fetchmany(self, size=None):
if size is None:
size = self.arraysize
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchmany(size))
def fetchall(self):
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchall())
def close(self):
try:
self.cursor.close()
except Database.InterfaceError:
# already closed
pass
def var(self, *args):
return VariableWrapper(self.cursor.var(*args))
def arrayvar(self, *args):
return VariableWrapper(self.cursor.arrayvar(*args))
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return CursorIterator(self.cursor)
class CursorIterator(six.Iterator):
"""
Cursor iterator wrapper that invokes our custom row factory.
"""
def __init__(self, cursor):
self.cursor = cursor
self.iter = iter(cursor)
def __iter__(self):
return self
def __next__(self):
return _rowfactory(next(self.iter), self.cursor)
def _rowfactory(row, cursor):
# Cast numeric values as the appropriate Python type based upon the
# cursor description, and convert strings to unicode.
casted = []
for value, desc in zip(row, cursor.description):
if value is not None and desc[1] is Database.NUMBER:
precision, scale = desc[4:6]
if scale == -127:
if precision == 0:
# NUMBER column: decimal-precision floating point
# This will normally be an integer from a sequence,
# but it could be a decimal value.
if '.' in value:
value = decimal.Decimal(value)
else:
value = int(value)
else:
# FLOAT column: binary-precision floating point.
# This comes from FloatField columns.
value = float(value)
elif precision > 0:
# NUMBER(p,s) column: decimal-precision fixed point.
# This comes from IntField and DecimalField columns.
if scale == 0:
value = int(value)
else:
value = decimal.Decimal(value)
elif '.' in value:
# No type information. This normally comes from a
# mathematical expression in the SELECT list. Guess int
# or Decimal based on whether it has a decimal point.
value = decimal.Decimal(value)
else:
value = int(value)
elif desc[1] in (Database.STRING, Database.FIXED_CHAR,
Database.LONG_STRING):
value = to_unicode(value)
casted.append(value)
return tuple(casted)
def to_unicode(s):
"""
Convert strings to Unicode objects (and return all other data types
unchanged).
"""
if isinstance(s, six.string_types):
return force_text(s)
return s
| bsd-3-clause |
xuleiboy1234/autoTitle | tensorflow/tensorflow/contrib/predictor/saved_model_predictor.py | 55 | 6579 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `Predictor` constructed from a `SavedModel`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from tensorflow.contrib.predictor import predictor
from tensorflow.contrib.saved_model.python.saved_model import reader
from tensorflow.contrib.saved_model.python.saved_model import signature_def_utils
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import signature_constants
DEFAULT_TAGS = 'serve'
_DEFAULT_INPUT_ALTERNATIVE_FORMAT = 'default_input_alternative:{}'
def get_meta_graph_def(saved_model_dir, tags):
"""Gets `MetaGraphDef` from a directory containing a `SavedModel`.
Returns the `MetaGraphDef` for the given tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel.
tags: Comma separated list of tags used to identify the correct
`MetaGraphDef`.
Raises:
ValueError: An error when the given tags cannot be found.
Returns:
A `MetaGraphDef` corresponding to the given tags.
"""
saved_model = reader.read_saved_model(saved_model_dir)
set_of_tags = set([tag.strip() for tag in tags.split(',')])
for meta_graph_def in saved_model.meta_graphs:
if set(meta_graph_def.meta_info_def.tags) == set_of_tags:
return meta_graph_def
raise ValueError('Could not find MetaGraphDef with tags {}'.format(tags))
def _get_signature_def(signature_def_key, export_dir, tags):
"""Construct a `SignatureDef` proto."""
signature_def_key = (
signature_def_key or
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY)
metagraph_def = get_meta_graph_def(export_dir, tags)
try:
signature_def = signature_def_utils.get_signature_def_by_key(
metagraph_def,
signature_def_key)
except ValueError as e:
try:
formatted_key = _DEFAULT_INPUT_ALTERNATIVE_FORMAT.format(
signature_def_key)
signature_def = signature_def_utils.get_signature_def_by_key(
metagraph_def, formatted_key)
logging.warning('Could not find signature def "%s". '
'Using "%s" instead', signature_def_key, formatted_key)
except ValueError:
raise ValueError(
'Got signature_def_key "{}". Available signatures are {}. '
'Original error:\n{}'.format(
signature_def_key, list(metagraph_def.signature_def), e))
return signature_def
def _check_signature_arguments(signature_def_key,
signature_def,
input_names,
output_names):
"""Validates signature arguments for `SavedModelPredictor`."""
signature_def_key_specified = signature_def_key is not None
signature_def_specified = signature_def is not None
input_names_specified = input_names is not None
output_names_specified = output_names is not None
if input_names_specified != output_names_specified:
raise ValueError(
'input_names and output_names must both be specified or both be '
'unspecified.'
)
if (signature_def_key_specified + signature_def_specified +
input_names_specified > 1):
raise ValueError(
'You must specify at most one of signature_def_key OR signature_def OR'
'(input_names AND output_names).'
)
class SavedModelPredictor(predictor.Predictor):
"""A `Predictor` constructed from a `SavedModel`."""
def __init__(self,
export_dir,
signature_def_key=None,
signature_def=None,
input_names=None,
output_names=None,
tags=None,
graph=None):
"""Initialize a `CoreEstimatorPredictor`.
Args:
export_dir: a path to a directory containing a `SavedModel`.
signature_def_key: Optional string specifying the signature to use. If
`None`, then `DEFAULT_SERVING_SIGNATURE_DEF_KEY` is used. Only one of
`signature_def_key` and `signature_def` should be specified.
signature_def: A `SignatureDef` proto specifying the inputs and outputs
for prediction. Only one of `signature_def_key` and `signature_def`
should be specified.
input_names: A dictionary mapping strings to `Tensor`s in the `SavedModel`
that represent the input. The keys can be any string of the user's
choosing.
output_names: A dictionary mapping strings to `Tensor`s in the
`SavedModel` that represent the output. The keys can be any string of
the user's choosing.
tags: Optional. Comma separated list of tags that will be used to retrieve
the correct `SignatureDef`. Defaults to `DEFAULT_TAGS`.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
Raises:
ValueError: If more than one of signature_def_key OR signature_def OR
(input_names AND output_names) is specified.
"""
_check_signature_arguments(
signature_def_key, signature_def, input_names, output_names)
tags = tags or DEFAULT_TAGS
self._graph = graph or ops.Graph()
with self._graph.as_default():
self._session = session.Session()
loader.load(self._session, tags.split(','), export_dir)
if input_names is None:
if signature_def is None:
signature_def = _get_signature_def(signature_def_key, export_dir, tags)
input_names = {k: v.name for k, v in signature_def.inputs.items()}
output_names = {k: v.name for k, v in signature_def.outputs.items()}
self._feed_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in input_names.items()}
self._fetch_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in output_names.items()}
| mit |
lucc/alot | alot/widgets/search.py | 1 | 7118 | # Copyright (C) 2011-2012 Patrick Totzke <[email protected]>
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
"""
Widgets specific to search mode
"""
import urwid
from ..settings.const import settings
from ..helper import shorten_author_string
from .utils import AttrFlipWidget
from .globals import TagWidget
class ThreadlineWidget(urwid.AttrMap):
"""
selectable line widget that represents a :class:`~alot.db.Thread`
in the :class:`~alot.buffers.SearchBuffer`.
"""
def __init__(self, tid, dbman):
self.dbman = dbman
self.tid = tid
self.thread = None # will be set by refresh()
self.tag_widgets = []
self.structure = None
self.rebuild()
normal = self.structure['normal']
focussed = self.structure['focus']
urwid.AttrMap.__init__(self, self.columns, normal, focussed)
def rebuild(self):
self.thread = self.dbman.get_thread(self.tid)
self.widgets = []
self.structure = settings.get_threadline_theming(self.thread)
columns = []
# combine width info and widget into an urwid.Column entry
def add_column(width, part):
width_tuple = self.structure[partname]['width']
if width_tuple[0] == 'weight':
columnentry = width_tuple + (part,)
else:
columnentry = ('fixed', width, part)
columns.append(columnentry)
# create a column for every part of the threadline
for partname in self.structure['parts']:
# build widget(s) around this part's content and remember them so
# that self.render() may change local attributes.
if partname == 'tags':
width, part = build_tags_part(self.thread.get_tags(),
self.structure['tags']['normal'],
self.structure['tags']['focus'])
if part:
add_column(width, part)
for w in part.widget_list:
self.widgets.append(w)
else:
width, part = build_text_part(partname, self.thread,
self.structure[partname])
add_column(width, part)
self.widgets.append(part)
self.columns = urwid.Columns(columns, dividechars=1)
self.original_widget = self.columns
def render(self, size, focus=False):
for w in self.widgets:
w.set_map('focus' if focus else 'normal')
return urwid.AttrMap.render(self, size, focus)
def selectable(self):
return True
def keypress(self, size, key):
return key
def get_thread(self):
return self.thread
def build_tags_part(tags, attr_normal, attr_focus):
"""
create an urwid.Columns widget (wrapped in approproate Attributes)
to display a list of tag strings, as part of a threadline.
:param tags: list of tag strings to include
:type tags: list of str
:param attr_normal: urwid attribute to use if unfocussed
:param attr_focus: urwid attribute to use if focussed
:return: overall width in characters and a Columns widget.
:rtype: tuple[int, urwid.Columns]
"""
part_w = None
width = None
tag_widgets = []
cols = []
width = -1
# create individual TagWidgets and sort them
tag_widgets = [TagWidget(t, attr_normal, attr_focus) for t in tags]
tag_widgets = sorted(tag_widgets)
for tag_widget in tag_widgets:
if not tag_widget.hidden:
wrapped_tagwidget = tag_widget
tag_width = tag_widget.width()
cols.append(('fixed', tag_width, wrapped_tagwidget))
width += tag_width + 1
if cols:
part_w = urwid.Columns(cols, dividechars=1)
return width, part_w
def build_text_part(name, thread, struct):
"""
create an urwid.Text widget (wrapped in approproate Attributes)
to display a plain text parts in a threadline.
create an urwid.Columns widget (wrapped in approproate Attributes)
to display a list of tag strings, as part of a threadline.
:param name: id of part to build
:type name: str
:param thread: the thread to get local info for
:type thread: :class:`alot.db.thread.Thread`
:param struct: theming attributes for this part, as provided by
:class:`alot.settings.theme.Theme.get_threadline_theming`
:type struct: dict
:return: overall width (in characters) and a widget.
:rtype: tuple[int, AttrFliwWidget]
"""
part_w = None
width = None
# extract min and max allowed width from theme
minw = 0
maxw = None
width_tuple = struct['width']
if width_tuple is not None:
if width_tuple[0] == 'fit':
minw, maxw = width_tuple[1:]
content = prepare_string(name, thread, maxw)
# pad content if not long enough
if minw:
alignment = struct['alignment']
if alignment == 'left':
content = content.ljust(minw)
elif alignment == 'center':
content = content.center(minw)
else:
content = content.rjust(minw)
# define width and part_w
text = urwid.Text(content, wrap='clip')
width = text.pack()[0]
part_w = AttrFlipWidget(text, struct)
return width, part_w
def prepare_date_string(thread):
newest = None
newest = thread.get_newest_date()
if newest is not None:
datestring = settings.represent_datetime(newest)
return datestring
def prepare_mailcount_string(thread):
return "(%d)" % thread.get_total_messages()
def prepare_authors_string(thread):
return thread.get_authors_string() or '(None)'
def prepare_subject_string(thread):
return thread.get_subject() or ' '
def prepare_content_string(thread):
msgs = sorted(thread.get_messages().keys(),
key=lambda msg: msg.get_date(), reverse=True)
lastcontent = ' '.join(m.get_text_content() for m in msgs)
return lastcontent
def prepare_string(partname, thread, maxw):
"""
extract a content string for part 'partname' from 'thread' of maximal
length 'maxw'.
"""
# map part names to function extracting content string and custom shortener
prep = {
'mailcount': (prepare_mailcount_string, None),
'date': (prepare_date_string, None),
'authors': (prepare_authors_string, shorten_author_string),
'subject': (prepare_subject_string, None),
'content': (prepare_content_string, None),
}
s = ' ' # fallback value
if thread:
# get extractor and shortener
content, shortener = prep[partname]
# get string
s = content(thread)
# sanitize
s = s.replace('\n', ' ')
s = s.replace('\r', '')
# shorten if max width is requested
if maxw:
if len(s) > maxw and shortener:
s = shortener(s, maxw)
else:
s = s[:maxw]
return s
| gpl-3.0 |
rahku/coreclr | src/ToolBox/SOS/tests/t_cmd_bpmd_methoddesc.py | 43 | 1308 | import lldb
import re
import testutils as test
# bpmd -md <MethodDesc pointer>
def runScenario(assembly, debugger, target):
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci = debugger.GetCommandInterpreter()
# Run debugger, wait until libcoreclr is loaded,
# set breakpoint at Test.Main and stop there
test.stop_in_main(debugger, assembly)
md_addr = test.get_methoddesc(debugger, assembly, "Test.UnlikelyInlined")
ci.HandleCommand("bpmd -md %s" % md_addr, res)
out_msg = res.GetOutput()
err_msg = res.GetError()
print(out_msg)
print(err_msg)
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
# Output is not empty
# Should be at least 'Adding pending breakpoints...'
test.assertTrue(len(out_msg) > 0)
# Error message is empty
test.assertTrue(len(err_msg) == 0)
process.Continue()
# Process must be stopped at UnlinkelyInlined
test.assertEqual(process.GetState(), lldb.eStateStopped)
# The reason of this stop must be a breakpoint
test.assertEqual(process.GetSelectedThread().GetStopReason(),
lldb.eStopReasonBreakpoint)
#
# Continue current process and checks its exit code
test.exit_lldb(debugger, assembly)
| mit |
kkozarev/mwacme | synchrotron_fitting/GS_kappa_function.py | 1 | 2634 | import Get_MW
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import numpy as np
N=10 #number of frequencies
#These values are starting positions for coronal CME radio observations
ParmIn=29*[0] # input array
ParmIn[0] =8e19 # Area, cm^2
ParmIn[1] =5e9 # Depth, cm
ParmIn[2] =3e6 # T_0, K
ParmIn[3] =0.05 # \eps (not used in this example)
ParmIn[4] =6.0 # \kappa (not used in this example)
ParmIn[5] =16 # number of integration nodes
ParmIn[6] =0.1 # E_min, MeV
ParmIn[7] =10.0 # E_max, MeV
ParmIn[8] =1.0 # E_break, MeV (not used in this example)
ParmIn[9] =4.0 # \delta_1
ParmIn[10]=6.0 # \delta_2 (not used in this example)
ParmIn[11]=1e8 # n_0 - thermal electron density, cm^{-3}
ParmIn[12]=1e6 # n_b - nonthermal electron density, cm^{-3}
ParmIn[13]=5.0 # B - magnetic field, G
ParmIn[14]=60.0 # theta - the viewing angle, degrees
ParmIn[15]=8.e7 # starting frequency to calculate spectrum, Hz
ParmIn[16]=0.005 # logarithmic step in frequency
ParmIn[17]=6 # Index of distribution over energy (KAP is chosen)
ParmIn[18]=N # Number of frequencies (specified above)
ParmIn[19]=3 # Index of distribution over pitch-angle (GLC is chosen)
ParmIn[20]=90.0 # loss-cone boundary, degrees
ParmIn[21]=0.0 # beam direction (degrees) in GAU and SGA (not used in this example)
ParmIn[22]=0.2 # \Delta\mu
ParmIn[23]=0.0 # a_4 in SGA (not used in this example)
ParmIn[25]=12.0 # f^C_cr
ParmIn[26]=12.0 # f^WH_cr
ParmIn[27]=1 # matching on
ParmIn[28]=1 # Q-optimization on
def init_frequency_grid(startfreq,endfreq,numfreq=N):
Params = ParmIn
Params[16]=np.log10(endfreq/startfreq)/numfreq
Params[15]=startfreq*1.e6
Params[18]=numfreq
s=Get_MW.GET_MW(Params) # calling the main function
f=s[0] # emission frequency (GHz)
fmhz=[i*1000. for i in f]
return fmhz
def gs_kappa_func(freqgrid, temp=ParmIn[2],dens=ParmIn[11],kappa=ParmIn[4],emax=ParmIn[7],numfreq=N):
Params = ParmIn
Params[2]=temp
Params[4]=kappa
Params[7]=emax
Params[11]=dens
Params[15]=freqgrid[0]/1.e6
Params[17]=6
if not numfreq:
numfreq=len(freqgrid)
Params[16]=np.log10(freqgrid[-1]/freqgrid[0])/numfreq
ParmIn[18]=numfreq
s=Get_MW.GET_MW(ParmIn) # calling the main function
I_O=s[1] # observed (at the Earth) intensity, O-mode (sfu)
k_O=s[2] # exp(-tau), O-mode
#I_X=s[3] # observed (at the Earth) intensity, X-mode (sfu)
#k_X=s[4] # exp(-tau), X-mode
return I_O
| gpl-2.0 |
eenchev/idea-note-taking-app | env/lib/python2.7/site-packages/werkzeug/posixemulation.py | 364 | 3519 | # -*- coding: utf-8 -*-
r"""
werkzeug.posixemulation
~~~~~~~~~~~~~~~~~~~~~~~
Provides a POSIX emulation for some features that are relevant to
web applications. The main purpose is to simplify support for
systems such as Windows NT that are not 100% POSIX compatible.
Currently this only implements a :func:`rename` function that
follows POSIX semantics. Eg: if the target file already exists it
will be replaced without asking.
This module was introduced in 0.6.1 and is not a public interface.
It might become one in later versions of Werkzeug.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
import errno
import time
import random
from ._compat import to_unicode
from .filesystem import get_filesystem_encoding
can_rename_open_file = False
if os.name == 'nt': # pragma: no cover
_rename = lambda src, dst: False
_rename_atomic = lambda src, dst: False
try:
import ctypes
_MOVEFILE_REPLACE_EXISTING = 0x1
_MOVEFILE_WRITE_THROUGH = 0x8
_MoveFileEx = ctypes.windll.kernel32.MoveFileExW
def _rename(src, dst):
src = to_unicode(src, get_filesystem_encoding())
dst = to_unicode(dst, get_filesystem_encoding())
if _rename_atomic(src, dst):
return True
retry = 0
rv = False
while not rv and retry < 100:
rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING |
_MOVEFILE_WRITE_THROUGH)
if not rv:
time.sleep(0.001)
retry += 1
return rv
# new in Vista and Windows Server 2008
_CreateTransaction = ctypes.windll.ktmw32.CreateTransaction
_CommitTransaction = ctypes.windll.ktmw32.CommitTransaction
_MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW
_CloseHandle = ctypes.windll.kernel32.CloseHandle
can_rename_open_file = True
def _rename_atomic(src, dst):
ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Werkzeug rename')
if ta == -1:
return False
try:
retry = 0
rv = False
while not rv and retry < 100:
rv = _MoveFileTransacted(src, dst, None, None,
_MOVEFILE_REPLACE_EXISTING |
_MOVEFILE_WRITE_THROUGH, ta)
if rv:
rv = _CommitTransaction(ta)
break
else:
time.sleep(0.001)
retry += 1
return rv
finally:
_CloseHandle(ta)
except Exception:
pass
def rename(src, dst):
# Try atomic or pseudo-atomic rename
if _rename(src, dst):
return
# Fall back to "move away and replace"
try:
os.rename(src, dst)
except OSError as e:
if e.errno != errno.EEXIST:
raise
old = "%s-%08x" % (dst, random.randint(0, sys.maxint))
os.rename(dst, old)
os.rename(src, dst)
try:
os.unlink(old)
except Exception:
pass
else:
rename = os.rename
can_rename_open_file = True
| mit |
jcftang/ansible | lib/ansible/modules/cloud/ovirt/ovirt_affinity_labels.py | 12 | 6828 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from collections import defaultdict
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
create_connection,
ovirt_full_argument_spec,
)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_affinity_labels
short_description: Module to manage affinity labels in oVirt
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "This module manage affinity labels in oVirt. It can also manage assignments
of those labels to hosts and VMs."
options:
name:
description:
- "Name of the the affinity label to manage."
required: true
state:
description:
- "Should the affinity label be present or absent."
choices: ['present', 'absent']
default: present
cluster:
description:
- "Name of the cluster where vms and hosts resides."
vms:
description:
- "List of the VMs names, which should have assigned this affinity label."
hosts:
description:
- "List of the hosts names, which should have assigned this affinity label."
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Create(if not exists) and assign affinity label to vms vm1 and vm2 and host host1
- ovirt_affinity_labels:
name: mylabel
cluster: mycluster
vms:
- vm1
- vm2
hosts:
- host1
# To detach all VMs from label
- ovirt_affinity_labels:
name: mylabel
cluster: mycluster
vms: []
# Remove affinity label
- ovirt_affinity_labels:
state: absent
name: mylabel
'''
RETURN = '''
id:
description: ID of the affinity label which is managed
returned: On success if affinity label is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
affinity_label:
description: "Dictionary of all the affinity label attributes. Affinity label attributes can be found on your oVirt instance
at following url: https://ovirt.example.com/ovirt-engine/api/model#types/affinity_label."
returned: On success if affinity label is found.
'''
class AffinityLabelsModule(BaseModule):
def build_entity(self):
return otypes.AffinityLabel(name=self._module.params['name'])
def post_create(self, entity):
self.update_check(entity)
def pre_remove(self, entity):
self._module.params['vms'] = []
self._module.params['hosts'] = []
self.update_check(entity)
def _update_label_assignments(self, entity, name, label_obj_type):
objs_service = getattr(self._connection.system_service(), '%s_service' % name)()
if self._module.params[name] is not None:
objs = self._connection.follow_link(getattr(entity, name))
objs_names = defaultdict(list)
for obj in objs:
labeled_entity = objs_service.service(obj.id).get()
if self._module.params['cluster'] is None:
objs_names[labeled_entity.name].append(obj.id)
elif self._connection.follow_link(labeled_entity.cluster).name == self._module.params['cluster']:
objs_names[labeled_entity.name].append(obj.id)
for obj in self._module.params[name]:
if obj not in objs_names:
for obj_id in objs_service.list(
search='name=%s and cluster=%s' % (obj, self._module.params['cluster'])
):
label_service = getattr(self._service.service(entity.id), '%s_service' % name)()
if not self._module.check_mode:
label_service.add(**{
name[:-1]: label_obj_type(id=obj_id.id)
})
self.changed = True
for obj in objs_names:
if obj not in self._module.params[name]:
label_service = getattr(self._service.service(entity.id), '%s_service' % name)()
if not self._module.check_mode:
for obj_id in objs_names[obj]:
label_service.service(obj_id).remove()
self.changed = True
def update_check(self, entity):
self._update_label_assignments(entity, 'vms', otypes.Vm)
self._update_label_assignments(entity, 'hosts', otypes.Host)
return True
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
cluster=dict(default=None),
name=dict(default=None, required=True),
vms=dict(default=None, type='list'),
hosts=dict(default=None, type='list'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
('state', 'present', ['cluster']),
],
)
check_sdk(module)
try:
connection = create_connection(module.params.pop('auth'))
affinity_labels_service = connection.system_service().affinity_labels_service()
affinity_labels_module = AffinityLabelsModule(
connection=connection,
module=module,
service=affinity_labels_service,
)
state = module.params['state']
if state == 'present':
ret = affinity_labels_module.create()
elif state == 'absent':
ret = affinity_labels_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=False)
if __name__ == "__main__":
main()
| gpl-3.0 |
Gateworks/platform-external-chromium_org | tools/telemetry/telemetry/core/discover_unittest.py | 25 | 1836 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry.core import discover
from telemetry.core import util
class DiscoverTest(unittest.TestCase):
def setUp(self):
self._base_dir = util.GetUnittestDataDir()
self._start_dir = os.path.join(self._base_dir, 'discoverable_classes')
self._base_class = Exception
def testDiscoverClassesBasic(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'discover_dummyclass': 'DummyException',
'another_discover_dummyclass': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesWithPattern(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
pattern='another*')
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'another_discover_dummyclass': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesByClassName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
index_by_class_name=True)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception': 'DummyException',
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
| bsd-3-clause |
andersk/zulip | zerver/lib/cache.py | 2 | 28356 | # See https://zulip.readthedocs.io/en/latest/subsystems/caching.html for docs
import hashlib
import logging
import os
import re
import secrets
import sys
import time
import traceback
from functools import lru_cache, wraps
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterable,
List,
Optional,
Sequence,
Tuple,
TypeVar,
cast,
)
from django.conf import settings
from django.core.cache import cache as djcache
from django.core.cache import caches
from django.core.cache.backends.base import BaseCache
from django.db.models import Q
from django.http import HttpRequest
from zerver.lib.utils import make_safe_digest, statsd, statsd_key
if TYPE_CHECKING:
# These modules have to be imported for type annotations but
# they cannot be imported at runtime due to cyclic dependency.
from zerver.models import Message, Realm, UserProfile
MEMCACHED_MAX_KEY_LENGTH = 250
FuncT = TypeVar("FuncT", bound=Callable[..., object])
logger = logging.getLogger()
class NotFoundInCache(Exception):
pass
remote_cache_time_start = 0.0
remote_cache_total_time = 0.0
remote_cache_total_requests = 0
def get_remote_cache_time() -> float:
return remote_cache_total_time
def get_remote_cache_requests() -> int:
return remote_cache_total_requests
def remote_cache_stats_start() -> None:
global remote_cache_time_start
remote_cache_time_start = time.time()
def remote_cache_stats_finish() -> None:
global remote_cache_total_time
global remote_cache_total_requests
global remote_cache_time_start
remote_cache_total_requests += 1
remote_cache_total_time += time.time() - remote_cache_time_start
def get_or_create_key_prefix() -> str:
if settings.PUPPETEER_TESTS:
# This sets the prefix for the benefit of the Puppeteer tests.
#
# Having a fixed key is OK since we don't support running
# multiple copies of the Puppeteer tests at the same time anyway.
return "puppeteer_tests:"
elif settings.TEST_SUITE:
# The Python tests overwrite KEY_PREFIX on each test, but use
# this codepath as well, just to save running the more complex
# code below for reading the normal key prefix.
return "django_tests_unused:"
# directory `var` should exist in production
os.makedirs(os.path.join(settings.DEPLOY_ROOT, "var"), exist_ok=True)
filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix")
try:
with open(filename, "x") as f:
prefix = secrets.token_hex(16) + ":"
f.write(prefix + "\n")
except FileExistsError:
tries = 1
while tries < 10:
with open(filename) as f:
prefix = f.readline()[:-1]
if len(prefix) == 33:
break
tries += 1
prefix = ""
time.sleep(0.5)
if not prefix:
print("Could not read remote cache key prefix file")
sys.exit(1)
return prefix
KEY_PREFIX: str = get_or_create_key_prefix()
def bounce_key_prefix_for_testing(test_name: str) -> None:
global KEY_PREFIX
KEY_PREFIX = test_name + ":" + str(os.getpid()) + ":"
# We are taking the hash of the KEY_PREFIX to decrease the size of the key.
# Memcached keys should have a length of less than 250.
KEY_PREFIX = hashlib.sha1(KEY_PREFIX.encode("utf-8")).hexdigest() + ":"
def get_cache_backend(cache_name: Optional[str]) -> BaseCache:
if cache_name is None:
return djcache
return caches[cache_name]
def get_cache_with_key(
keyfunc: Callable[..., str],
cache_name: Optional[str] = None,
) -> Callable[[FuncT], FuncT]:
"""
The main goal of this function getting value from the cache like in the "cache_with_key".
A cache value can contain any data including the "None", so
here used exception for case if value isn't found in the cache.
"""
def decorator(func: FuncT) -> FuncT:
@wraps(func)
def func_with_caching(*args: object, **kwargs: object) -> object:
key = keyfunc(*args, **kwargs)
try:
val = cache_get(key, cache_name=cache_name)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
log_invalid_cache_keys(stack_trace, [key])
val = None
if val is not None:
return val[0]
raise NotFoundInCache()
return cast(FuncT, func_with_caching) # https://github.com/python/mypy/issues/1927
return decorator
def cache_with_key(
keyfunc: Callable[..., str],
cache_name: Optional[str] = None,
timeout: Optional[int] = None,
with_statsd_key: Optional[str] = None,
) -> Callable[[FuncT], FuncT]:
"""Decorator which applies Django caching to a function.
Decorator argument is a function which computes a cache key
from the original function's arguments. You are responsible
for avoiding collisions with other uses of this decorator or
other uses of caching."""
def decorator(func: FuncT) -> FuncT:
@wraps(func)
def func_with_caching(*args: object, **kwargs: object) -> object:
key = keyfunc(*args, **kwargs)
try:
val = cache_get(key, cache_name=cache_name)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
log_invalid_cache_keys(stack_trace, [key])
return func(*args, **kwargs)
extra = ""
if cache_name == "database":
extra = ".dbcache"
if with_statsd_key is not None:
metric_key = with_statsd_key
else:
metric_key = statsd_key(key)
status = "hit" if val is not None else "miss"
statsd.incr(f"cache{extra}.{metric_key}.{status}")
# Values are singleton tuples so that we can distinguish
# a result of None from a missing key.
if val is not None:
return val[0]
val = func(*args, **kwargs)
cache_set(key, val, cache_name=cache_name, timeout=timeout)
return val
return cast(FuncT, func_with_caching) # https://github.com/python/mypy/issues/1927
return decorator
class InvalidCacheKeyException(Exception):
pass
def log_invalid_cache_keys(stack_trace: str, key: List[str]) -> None:
logger.warning(
"Invalid cache key used: %s\nStack trace: %s\n",
key,
stack_trace,
)
def validate_cache_key(key: str) -> None:
if not key.startswith(KEY_PREFIX):
key = KEY_PREFIX + key
# Theoretically memcached can handle non-ascii characters
# and only "control" characters are strictly disallowed, see:
# https://github.com/memcached/memcached/blob/master/doc/protocol.txt
# However, limiting the characters we allow in keys simiplifies things,
# and anyway we use make_safe_digest when forming some keys to ensure
# the resulting keys fit the regex below.
# The regex checks "all characters between ! and ~ in the ascii table",
# which happens to be the set of all "nice" ascii characters.
if not bool(re.fullmatch(r"([!-~])+", key)):
raise InvalidCacheKeyException("Invalid characters in the cache key: " + key)
if len(key) > MEMCACHED_MAX_KEY_LENGTH:
raise InvalidCacheKeyException(f"Cache key too long: {key} Length: {len(key)}")
def cache_set(
key: str, val: Any, cache_name: Optional[str] = None, timeout: Optional[int] = None
) -> None:
final_key = KEY_PREFIX + key
validate_cache_key(final_key)
remote_cache_stats_start()
cache_backend = get_cache_backend(cache_name)
cache_backend.set(final_key, (val,), timeout=timeout)
remote_cache_stats_finish()
def cache_get(key: str, cache_name: Optional[str] = None) -> Any:
final_key = KEY_PREFIX + key
validate_cache_key(final_key)
remote_cache_stats_start()
cache_backend = get_cache_backend(cache_name)
ret = cache_backend.get(final_key)
remote_cache_stats_finish()
return ret
def cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
keys = [KEY_PREFIX + key for key in keys]
for key in keys:
validate_cache_key(key)
remote_cache_stats_start()
ret = get_cache_backend(cache_name).get_many(keys)
remote_cache_stats_finish()
return {key[len(KEY_PREFIX) :]: value for key, value in ret.items()}
def safe_cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
"""Variant of cache_get_many that drops any keys that fail
validation, rather than throwing an exception visible to the
caller."""
try:
# Almost always the keys will all be correct, so we just try
# to do normal cache_get_many to avoid the overhead of
# validating all the keys here.
return cache_get_many(keys, cache_name)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
good_keys, bad_keys = filter_good_and_bad_keys(keys)
log_invalid_cache_keys(stack_trace, bad_keys)
return cache_get_many(good_keys, cache_name)
def cache_set_many(
items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None
) -> None:
new_items = {}
for key in items:
new_key = KEY_PREFIX + key
validate_cache_key(new_key)
new_items[new_key] = items[key]
items = new_items
remote_cache_stats_start()
get_cache_backend(cache_name).set_many(items, timeout=timeout)
remote_cache_stats_finish()
def safe_cache_set_many(
items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None
) -> None:
"""Variant of cache_set_many that drops saving any keys that fail
validation, rather than throwing an exception visible to the
caller."""
try:
# Almost always the keys will all be correct, so we just try
# to do normal cache_set_many to avoid the overhead of
# validating all the keys here.
return cache_set_many(items, cache_name, timeout)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
good_keys, bad_keys = filter_good_and_bad_keys(list(items.keys()))
log_invalid_cache_keys(stack_trace, bad_keys)
good_items = {key: items[key] for key in good_keys}
return cache_set_many(good_items, cache_name, timeout)
def cache_delete(key: str, cache_name: Optional[str] = None) -> None:
final_key = KEY_PREFIX + key
validate_cache_key(final_key)
remote_cache_stats_start()
get_cache_backend(cache_name).delete(final_key)
remote_cache_stats_finish()
def cache_delete_many(items: Iterable[str], cache_name: Optional[str] = None) -> None:
keys = [KEY_PREFIX + item for item in items]
for key in keys:
validate_cache_key(key)
remote_cache_stats_start()
get_cache_backend(cache_name).delete_many(keys)
remote_cache_stats_finish()
def filter_good_and_bad_keys(keys: List[str]) -> Tuple[List[str], List[str]]:
good_keys = []
bad_keys = []
for key in keys:
try:
validate_cache_key(key)
good_keys.append(key)
except InvalidCacheKeyException:
bad_keys.append(key)
return good_keys, bad_keys
# Generic_bulk_cached fetch and its helpers. We start with declaring
# a few type variables that help define its interface.
# Type for the cache's keys; will typically be int or str.
ObjKT = TypeVar("ObjKT")
# Type for items to be fetched from the database (e.g. a Django model object)
ItemT = TypeVar("ItemT")
# Type for items to be stored in the cache (e.g. a dictionary serialization).
# Will equal ItemT unless a cache_transformer is specified.
CacheItemT = TypeVar("CacheItemT")
# Type for compressed items for storage in the cache. For
# serializable objects, will be the object; if encoded, bytes.
CompressedItemT = TypeVar("CompressedItemT")
# Required arguments are as follows:
# * object_ids: The list of object ids to look up
# * cache_key_function: object_id => cache key
# * query_function: [object_ids] => [objects from database]
# * setter: Function to call before storing items to cache (e.g. compression)
# * extractor: Function to call on items returned from cache
# (e.g. decompression). Should be the inverse of the setter
# function.
# * id_fetcher: Function mapping an object from database => object_id
# (in case we're using a key more complex than obj.id)
# * cache_transformer: Function mapping an object from database =>
# value for cache (in case the values that we're caching are some
# function of the objects, not the objects themselves)
def generic_bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
object_ids: Sequence[ObjKT],
*,
extractor: Callable[[CompressedItemT], CacheItemT],
setter: Callable[[CacheItemT], CompressedItemT],
id_fetcher: Callable[[ItemT], ObjKT],
cache_transformer: Callable[[ItemT], CacheItemT],
) -> Dict[ObjKT, CacheItemT]:
if len(object_ids) == 0:
# Nothing to fetch.
return {}
cache_keys: Dict[ObjKT, str] = {}
for object_id in object_ids:
cache_keys[object_id] = cache_key_function(object_id)
cached_objects_compressed: Dict[str, Tuple[CompressedItemT]] = safe_cache_get_many(
[cache_keys[object_id] for object_id in object_ids],
)
cached_objects: Dict[str, CacheItemT] = {}
for (key, val) in cached_objects_compressed.items():
cached_objects[key] = extractor(cached_objects_compressed[key][0])
needed_ids = [
object_id for object_id in object_ids if cache_keys[object_id] not in cached_objects
]
# Only call query_function if there are some ids to fetch from the database:
if len(needed_ids) > 0:
db_objects = query_function(needed_ids)
else:
db_objects = []
items_for_remote_cache: Dict[str, Tuple[CompressedItemT]] = {}
for obj in db_objects:
key = cache_keys[id_fetcher(obj)]
item = cache_transformer(obj)
items_for_remote_cache[key] = (setter(item),)
cached_objects[key] = item
if len(items_for_remote_cache) > 0:
safe_cache_set_many(items_for_remote_cache)
return {
object_id: cached_objects[cache_keys[object_id]]
for object_id in object_ids
if cache_keys[object_id] in cached_objects
}
def transformed_bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
object_ids: Sequence[ObjKT],
*,
id_fetcher: Callable[[ItemT], ObjKT],
cache_transformer: Callable[[ItemT], CacheItemT],
) -> Dict[ObjKT, CacheItemT]:
return generic_bulk_cached_fetch(
cache_key_function,
query_function,
object_ids,
extractor=lambda obj: obj,
setter=lambda obj: obj,
id_fetcher=id_fetcher,
cache_transformer=cache_transformer,
)
def bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
object_ids: Sequence[ObjKT],
*,
id_fetcher: Callable[[ItemT], ObjKT],
) -> Dict[ObjKT, ItemT]:
return transformed_bulk_cached_fetch(
cache_key_function,
query_function,
object_ids,
id_fetcher=id_fetcher,
cache_transformer=lambda obj: obj,
)
def preview_url_cache_key(url: str) -> str:
return f"preview_url:{make_safe_digest(url)}"
def display_recipient_cache_key(recipient_id: int) -> str:
return f"display_recipient_dict:{recipient_id}"
def display_recipient_bulk_get_users_by_id_cache_key(user_id: int) -> str:
# Cache key function for a function for bulk fetching users, used internally
# by display_recipient code.
return "bulk_fetch_display_recipients:" + user_profile_by_id_cache_key(user_id)
def user_profile_cache_key_id(email: str, realm_id: int) -> str:
return f"user_profile:{make_safe_digest(email.strip())}:{realm_id}"
def user_profile_cache_key(email: str, realm: "Realm") -> str:
return user_profile_cache_key_id(email, realm.id)
def user_profile_delivery_email_cache_key(delivery_email: str, realm: "Realm") -> str:
return f"user_profile_by_delivery_email:{make_safe_digest(delivery_email.strip())}:{realm.id}"
def bot_profile_cache_key(email: str) -> str:
return f"bot_profile:{make_safe_digest(email.strip())}"
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
return f"user_profile_by_id:{user_profile_id}"
def user_profile_by_api_key_cache_key(api_key: str) -> str:
return f"user_profile_by_api_key:{api_key}"
realm_user_dict_fields: List[str] = [
"id",
"full_name",
"email",
"avatar_source",
"avatar_version",
"is_active",
"role",
"is_billing_admin",
"is_bot",
"realm_id",
"timezone",
"date_joined",
"bot_owner_id",
"delivery_email",
"bot_type",
"long_term_idle",
]
def realm_user_dicts_cache_key(realm_id: int) -> str:
return f"realm_user_dicts:{realm_id}"
def get_muting_users_cache_key(muted_user_id: int) -> str:
return f"muting_users_list:{muted_user_id}"
def get_realm_used_upload_space_cache_key(realm: "Realm") -> str:
return f"realm_used_upload_space:{realm.id}"
def active_user_ids_cache_key(realm_id: int) -> str:
return f"active_user_ids:{realm_id}"
def active_non_guest_user_ids_cache_key(realm_id: int) -> str:
return f"active_non_guest_user_ids:{realm_id}"
bot_dict_fields: List[str] = [
"api_key",
"avatar_source",
"avatar_version",
"bot_owner_id",
"bot_type",
"default_all_public_streams",
"default_events_register_stream__name",
"default_sending_stream__name",
"email",
"full_name",
"id",
"is_active",
"realm_id",
]
def bot_dicts_in_realm_cache_key(realm: "Realm") -> str:
return f"bot_dicts_in_realm:{realm.id}"
def get_stream_cache_key(stream_name: str, realm_id: int) -> str:
return f"stream_by_realm_and_name:{realm_id}:{make_safe_digest(stream_name.strip().lower())}"
def delete_user_profile_caches(user_profiles: Iterable["UserProfile"]) -> None:
# Imported here to avoid cyclic dependency.
from zerver.lib.users import get_all_api_keys
from zerver.models import is_cross_realm_bot_email
keys = []
for user_profile in user_profiles:
keys.append(user_profile_by_id_cache_key(user_profile.id))
for api_key in get_all_api_keys(user_profile):
keys.append(user_profile_by_api_key_cache_key(api_key))
keys.append(user_profile_cache_key(user_profile.email, user_profile.realm))
keys.append(
user_profile_delivery_email_cache_key(user_profile.delivery_email, user_profile.realm)
)
if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email):
# Handle clearing system bots from their special cache.
keys.append(bot_profile_cache_key(user_profile.email))
cache_delete_many(keys)
def delete_display_recipient_cache(user_profile: "UserProfile") -> None:
from zerver.models import Subscription # We need to import here to avoid cyclic dependency.
recipient_ids = Subscription.objects.filter(user_profile=user_profile)
recipient_ids = recipient_ids.values_list("recipient_id", flat=True)
keys = [display_recipient_cache_key(rid) for rid in recipient_ids]
keys.append(display_recipient_bulk_get_users_by_id_cache_key(user_profile.id))
cache_delete_many(keys)
def changed(kwargs: Any, fields: List[str]) -> bool:
if kwargs.get("update_fields") is None:
# adds/deletes should invalidate the cache
return True
update_fields = set(kwargs["update_fields"])
for f in fields:
if f in update_fields:
return True
return False
# Called by models.py to flush the user_profile cache whenever we save
# a user_profile object
def flush_user_profile(sender: Any, **kwargs: Any) -> None:
user_profile = kwargs["instance"]
delete_user_profile_caches([user_profile])
# Invalidate our active_users_in_realm info dict if any user has changed
# the fields in the dict or become (in)active
if changed(kwargs, realm_user_dict_fields):
cache_delete(realm_user_dicts_cache_key(user_profile.realm_id))
if changed(kwargs, ["is_active"]):
cache_delete(active_user_ids_cache_key(user_profile.realm_id))
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
if changed(kwargs, ["role"]):
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
if changed(kwargs, ["email", "full_name", "id", "is_mirror_dummy"]):
delete_display_recipient_cache(user_profile)
# Invalidate our bots_in_realm info dict if any bot has
# changed the fields in the dict or become (in)active
if user_profile.is_bot and changed(kwargs, bot_dict_fields):
cache_delete(bot_dicts_in_realm_cache_key(user_profile.realm))
def flush_muting_users_cache(sender: Any, **kwargs: Any) -> None:
mute_object = kwargs["instance"]
cache_delete(get_muting_users_cache_key(mute_object.muted_user_id))
# Called by models.py to flush various caches whenever we save
# a Realm object. The main tricky thing here is that Realm info is
# generally cached indirectly through user_profile objects.
def flush_realm(sender: Any, from_deletion: bool = False, **kwargs: Any) -> None:
realm = kwargs["instance"]
users = realm.get_active_users()
delete_user_profile_caches(users)
if (
from_deletion
or realm.deactivated
or (kwargs["update_fields"] is not None and "string_id" in kwargs["update_fields"])
):
cache_delete(realm_user_dicts_cache_key(realm.id))
cache_delete(active_user_ids_cache_key(realm.id))
cache_delete(bot_dicts_in_realm_cache_key(realm))
cache_delete(realm_alert_words_cache_key(realm))
cache_delete(realm_alert_words_automaton_cache_key(realm))
cache_delete(active_non_guest_user_ids_cache_key(realm.id))
cache_delete(realm_rendered_description_cache_key(realm))
cache_delete(realm_text_description_cache_key(realm))
elif changed(kwargs, ["description"]):
cache_delete(realm_rendered_description_cache_key(realm))
cache_delete(realm_text_description_cache_key(realm))
def realm_alert_words_cache_key(realm: "Realm") -> str:
return f"realm_alert_words:{realm.string_id}"
def realm_alert_words_automaton_cache_key(realm: "Realm") -> str:
return f"realm_alert_words_automaton:{realm.string_id}"
def realm_rendered_description_cache_key(realm: "Realm") -> str:
return f"realm_rendered_description:{realm.string_id}"
def realm_text_description_cache_key(realm: "Realm") -> str:
return f"realm_text_description:{realm.string_id}"
# Called by models.py to flush the stream cache whenever we save a stream
# object.
def flush_stream(sender: Any, **kwargs: Any) -> None:
from zerver.models import UserProfile
stream = kwargs["instance"]
items_for_remote_cache = {}
if kwargs.get("update_fields") is None:
cache_delete(get_stream_cache_key(stream.name, stream.realm_id))
else:
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
cache_set_many(items_for_remote_cache)
if (
kwargs.get("update_fields") is None
or "name" in kwargs["update_fields"]
and UserProfile.objects.filter(
Q(default_sending_stream=stream) | Q(default_events_register_stream=stream)
).exists()
):
cache_delete(bot_dicts_in_realm_cache_key(stream.realm))
def flush_used_upload_space_cache(sender: Any, **kwargs: Any) -> None:
attachment = kwargs["instance"]
if kwargs.get("created") is None or kwargs.get("created") is True:
cache_delete(get_realm_used_upload_space_cache_key(attachment.owner.realm))
def to_dict_cache_key_id(message_id: int) -> str:
return f"message_dict:{message_id}"
def to_dict_cache_key(message: "Message", realm_id: Optional[int] = None) -> str:
return to_dict_cache_key_id(message.id)
def open_graph_description_cache_key(content: bytes, request: HttpRequest) -> str:
return "open_graph_description_path:{}".format(make_safe_digest(request.META["PATH_INFO"]))
def flush_message(sender: Any, **kwargs: Any) -> None:
message = kwargs["instance"]
cache_delete(to_dict_cache_key_id(message.id))
def flush_submessage(sender: Any, **kwargs: Any) -> None:
submessage = kwargs["instance"]
# submessages are not cached directly, they are part of their
# parent messages
message_id = submessage.message_id
cache_delete(to_dict_cache_key_id(message_id))
def ignore_unhashable_lru_cache(
maxsize: int = 128, typed: bool = False
) -> Callable[[FuncT], FuncT]:
"""
This is a wrapper over lru_cache function. It adds following features on
top of lru_cache:
* It will not cache result of functions with unhashable arguments.
* It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes.
"""
internal_decorator = lru_cache(maxsize=maxsize, typed=typed)
def decorator(user_function: FuncT) -> FuncT:
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
# In the development environment, we want every file
# change to refresh the source files from disk.
return user_function
# Casting to Any since we're about to monkey-patch this.
cache_enabled_user_function: Any = internal_decorator(user_function)
def wrapper(*args: object, **kwargs: object) -> object:
if not hasattr(cache_enabled_user_function, "key_prefix"):
cache_enabled_user_function.key_prefix = KEY_PREFIX
if cache_enabled_user_function.key_prefix != KEY_PREFIX:
# Clear cache when cache.KEY_PREFIX changes. This is used in
# tests.
cache_enabled_user_function.cache_clear()
cache_enabled_user_function.key_prefix = KEY_PREFIX
try:
return cache_enabled_user_function(*args, **kwargs)
except TypeError:
# args or kwargs contains an element which is unhashable. In
# this case we don't cache the result.
pass
# Deliberately calling this function from outside of exception
# handler to get a more descriptive traceback. Otherwise traceback
# can include the exception from cached_enabled_user_function as
# well.
return user_function(*args, **kwargs)
setattr(wrapper, "cache_info", cache_enabled_user_function.cache_info)
setattr(wrapper, "cache_clear", cache_enabled_user_function.cache_clear)
return cast(FuncT, wrapper) # https://github.com/python/mypy/issues/1927
return decorator
def dict_to_items_tuple(user_function: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper that converts any dict args to dict item tuples."""
def dict_to_tuple(arg: Any) -> Any:
if isinstance(arg, dict):
return tuple(sorted(arg.items()))
return arg
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = (dict_to_tuple(arg) for arg in args)
return user_function(*new_args, **kwargs)
return wrapper
def items_tuple_to_dict(user_function: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper that converts any dict items tuple args to dicts."""
def dict_items_to_dict(arg: Any) -> Any:
if isinstance(arg, tuple):
try:
return dict(arg)
except TypeError:
pass
return arg
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = (dict_items_to_dict(arg) for arg in args)
new_kwargs = {key: dict_items_to_dict(val) for key, val in kwargs.items()}
return user_function(*new_args, **new_kwargs)
return wrapper
| apache-2.0 |
leoliujie/odoo | addons/l10n_hn/__openerp__.py | 343 | 2260 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009-2010 Salvatore Josué Trimarchi Pinto <[email protected]>
# (http://trigluu.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# This module provides a minimal Honduran chart of accounts that can be use
# to build upon a more complex one. It also includes a chart of taxes and
# the Lempira currency.
#
# This module is based on the Guatemalan chart of accounts:
# Copyright (c) 2009-2010 Soluciones Tecnologócias Prisma S.A. All Rights Reserved.
# José Rodrigo Fernández Menegazzo, Soluciones Tecnologócias Prisma S.A.
# (http://www.solucionesprisma.com)
#
# This module works with OpenERP 6.0 to 8.0
#
{
'name': 'Honduras - Accounting',
'version': '0.1',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Honduras.
====================================================================
Agrega una nomenclatura contable para Honduras. También incluye impuestos y la
moneda Lempira. -- Adds accounting chart for Honduras. It also includes taxes
and the Lempira currency.""",
'author': 'Salvatore Josue Trimarchi Pinto',
'website': 'http://trigluu.com',
'depends': ['base', 'account', 'account_chart'],
'data': [
'account_types.xml',
'account_chart.xml',
'account_tax.xml',
'l10n_hn_base.xml',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jonfoster/pyxb-upstream-mirror | doc/conf.py | 3 | 6355 | # -*- coding: utf-8 -*-
#
# PyXB documentation build configuration file, created by
# sphinx-quickstart on Tue May 19 03:28:52 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'extapi' ]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyXB'
copyright = u'2009-2013, Peter A. Bigot'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2'
# The full version, including alpha/beta/rc tags.
release = '1.2.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = [ 'W3C', 'api', 'html', 'Images', '_templates' ]
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'PyXBdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'PyXB.tex', u'PyXB Documentation',
u'Peter A. Bigot', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| apache-2.0 |
Moriadry/tensorflow | tensorflow/python/lib/io/python_io.py | 112 | 1273 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python functions for directly manipulating TFRecord-formatted files.
See the @{$python/python_io} guide.
@@TFRecordWriter
@@tf_record_iterator
@@TFRecordCompressionType
@@TFRecordOptions
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.lib.io.tf_record import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = []
remove_undocumented(__name__, _allowed_symbols)
| apache-2.0 |
z1gm4/desarrollo_web_udp | env/lib/python2.7/site-packages/django/conf/locale/cs/formats.py | 504 | 1702 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. E Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '04:30:59'
'%H.%M', # '04.30'
'%H:%M', # '04:30'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
'%d.%m.%Y', # '05.01.2006'
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
'%d. %m. %Y', # '05. 01. 2006'
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| gpl-3.0 |
akash1808/glance | glance/tests/unit/v2/test_image_tags_resource.py | 18 | 4136 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
import glance.api.v2.image_tags
from glance.common import exception
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
import glance.tests.unit.v2.test_image_data_resource as image_data_tests
import glance.tests.utils as test_utils
class TestImageTagsController(base.IsolatedUnitTest):
def setUp(self):
super(TestImageTagsController, self).setUp()
self.db = unit_test_utils.FakeDB()
self.controller = glance.api.v2.image_tags.Controller(self.db)
def test_create_tag(self):
request = unit_test_utils.get_fake_request()
self.controller.update(request, unit_test_utils.UUID1, 'dink')
context = request.context
tags = self.db.image_tag_get_all(context, unit_test_utils.UUID1)
self.assertEqual(1, len([tag for tag in tags if tag == 'dink']))
def test_create_too_many_tags(self):
self.config(image_tag_quota=0)
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.update,
request, unit_test_utils.UUID1, 'dink')
def test_create_duplicate_tag_ignored(self):
request = unit_test_utils.get_fake_request()
self.controller.update(request, unit_test_utils.UUID1, 'dink')
self.controller.update(request, unit_test_utils.UUID1, 'dink')
context = request.context
tags = self.db.image_tag_get_all(context, unit_test_utils.UUID1)
self.assertEqual(1, len([tag for tag in tags if tag == 'dink']))
def test_update_tag_of_non_existing_image(self):
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
request, "abcd", "dink")
def test_delete_tag_forbidden(self):
def fake_get(self):
raise exception.Forbidden()
image_repo = image_data_tests.FakeImageRepo()
image_repo.get = fake_get
def get_fake_repo(self):
return image_repo
self.controller.gateway.get_repo = get_fake_repo
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.update,
request, unit_test_utils.UUID1, "ping")
def test_delete_tag(self):
request = unit_test_utils.get_fake_request()
self.controller.delete(request, unit_test_utils.UUID1, 'ping')
def test_delete_tag_not_found(self):
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
request, unit_test_utils.UUID1, 'what')
def test_delete_tag_of_non_existing_image(self):
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
request, "abcd", "dink")
class TestImagesSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesSerializer, self).setUp()
self.serializer = glance.api.v2.image_tags.ResponseSerializer()
def test_create_tag(self):
response = webob.Response()
self.serializer.update(response, None)
self.assertEqual(204, response.status_int)
def test_delete_tag(self):
response = webob.Response()
self.serializer.delete(response, None)
self.assertEqual(204, response.status_int)
| apache-2.0 |
grdlok/UStar-dl | src/youtube_dl/extractor/screencast.py | 12 | 4228 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
compat_parse_qs,
compat_urllib_request,
)
class ScreencastIE(InfoExtractor):
_VALID_URL = r'https?://www\.screencast\.com/t/(?P<id>[a-zA-Z0-9]+)'
_TESTS = [{
'url': 'http://www.screencast.com/t/3ZEjQXlT',
'md5': '917df1c13798a3e96211dd1561fded83',
'info_dict': {
'id': '3ZEjQXlT',
'ext': 'm4v',
'title': 'Color Measurement with Ocean Optics Spectrometers',
'description': 'md5:240369cde69d8bed61349a199c5fb153',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/V2uXehPJa1ZI',
'md5': 'e8e4b375a7660a9e7e35c33973410d34',
'info_dict': {
'id': 'V2uXehPJa1ZI',
'ext': 'mov',
'title': 'The Amadeus Spectrometer',
'description': 're:^In this video, our friends at.*To learn more about Amadeus, visit',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/aAB3iowa',
'md5': 'dedb2734ed00c9755761ccaee88527cd',
'info_dict': {
'id': 'aAB3iowa',
'ext': 'mp4',
'title': 'Google Earth Export',
'description': 'Provides a demo of a CommunityViz export to Google Earth, one of the 3D viewing options.',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/X3ddTrYh',
'md5': '669ee55ff9c51988b4ebc0877cc8b159',
'info_dict': {
'id': 'X3ddTrYh',
'ext': 'wmv',
'title': 'Toolkit 6 User Group Webinar (2014-03-04) - Default Judgment and First Impression',
'description': 'md5:7b9f393bc92af02326a5c5889639eab0',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(
r'<embed name="Video".*?src="([^"]+)"', webpage,
'QuickTime embed', default=None)
if video_url is None:
flash_vars_s = self._html_search_regex(
r'<param name="flashVars" value="([^"]+)"', webpage, 'flash vars',
default=None)
if not flash_vars_s:
flash_vars_s = self._html_search_regex(
r'<param name="initParams" value="([^"]+)"', webpage, 'flash vars',
default=None)
if flash_vars_s:
flash_vars_s = flash_vars_s.replace(',', '&')
if flash_vars_s:
flash_vars = compat_parse_qs(flash_vars_s)
video_url_raw = compat_urllib_request.quote(
flash_vars['content'][0])
video_url = video_url_raw.replace('http%3A', 'http:')
if video_url is None:
video_meta = self._html_search_meta(
'og:video', webpage, default=None)
if video_meta:
video_url = self._search_regex(
r'src=(.*?)(?:$|&)', video_meta,
'meta tag video URL', default=None)
if video_url is None:
raise ExtractorError('Cannot find video')
title = self._og_search_title(webpage, default=None)
if title is None:
title = self._html_search_regex(
[r'<b>Title:</b> ([^<]*)</div>',
r'class="tabSeperator">></span><span class="tabText">(.*?)<'],
webpage, 'title')
thumbnail = self._og_search_thumbnail(webpage)
description = self._og_search_description(webpage, default=None)
if description is None:
description = self._html_search_meta('description', webpage)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
| unlicense |
willingc/oh-mainline | vendor/packages/python-social-auth/social/backends/rdio.py | 79 | 2468 | """
Rdio OAuth1 and OAuth2 backends, docs at:
http://psa.matiasaguirre.net/docs/backends/rdio.html
"""
from social.backends.oauth import BaseOAuth1, BaseOAuth2, OAuthAuth
RDIO_API = 'https://www.rdio.com/api/1/'
class BaseRdio(OAuthAuth):
ID_KEY = 'key'
def get_user_details(self, response):
fullname, first_name, last_name = self.get_user_names(
fullname=response['displayName'],
first_name=response['firstName'],
last_name=response['lastName']
)
return {
'username': response['username'],
'fullname': fullname,
'first_name': first_name,
'last_name': last_name
}
class RdioOAuth1(BaseRdio, BaseOAuth1):
"""Rdio OAuth authentication backend"""
name = 'rdio-oauth1'
REQUEST_TOKEN_URL = 'http://api.rdio.com/oauth/request_token'
AUTHORIZATION_URL = 'https://www.rdio.com/oauth/authorize'
ACCESS_TOKEN_URL = 'http://api.rdio.com/oauth/access_token'
EXTRA_DATA = [
('key', 'rdio_id'),
('icon', 'rdio_icon_url'),
('url', 'rdio_profile_url'),
('username', 'rdio_username'),
('streamRegion', 'rdio_stream_region'),
]
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
params = {'method': 'currentUser',
'extras': 'username,displayName,streamRegion'}
request = self.oauth_request(access_token, RDIO_API,
params, method='POST')
return self.get_json(request.url, method='POST',
data=request.to_postdata())['result']
class RdioOAuth2(BaseRdio, BaseOAuth2):
name = 'rdio-oauth2'
AUTHORIZATION_URL = 'https://www.rdio.com/oauth2/authorize'
ACCESS_TOKEN_URL = 'https://www.rdio.com/oauth2/token'
ACCESS_TOKEN_METHOD = 'POST'
EXTRA_DATA = [
('key', 'rdio_id'),
('icon', 'rdio_icon_url'),
('url', 'rdio_profile_url'),
('username', 'rdio_username'),
('streamRegion', 'rdio_stream_region'),
('refresh_token', 'refresh_token', True),
('token_type', 'token_type', True),
]
def user_data(self, access_token, *args, **kwargs):
return self.get_json(RDIO_API, method='POST', data={
'method': 'currentUser',
'extras': 'username,displayName,streamRegion',
'access_token': access_token
})['result']
| agpl-3.0 |
roshan/thrift | lib/py/src/protocol/TProtocolDecorator.py | 145 | 1540 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.protocol.TProtocol import TProtocolBase
from types import *
class TProtocolDecorator():
def __init__(self, protocol):
TProtocolBase(protocol)
self.protocol = protocol
def __getattr__(self, name):
if hasattr(self.protocol, name):
member = getattr(self.protocol, name)
if type(member) in [MethodType, UnboundMethodType, FunctionType, LambdaType, BuiltinFunctionType, BuiltinMethodType]:
return lambda *args, **kwargs: self._wrap(member, args, kwargs)
else:
return member
raise AttributeError(name)
def _wrap(self, func, args, kwargs):
if type(func) == MethodType:
result = func(*args, **kwargs)
else:
result = func(self.protocol, *args, **kwargs)
return result
| apache-2.0 |
hgijeon/the_PLAY | test_gameapi.py | 1 | 2612 | import pygame as gameapi
import pygame.midi as piano
import sys, random
import pygame.locals as apiVar
gameapi.init()
fpsClock = gameapi.time.Clock()
windowSurfaceObj = gameapi.display.set_mode((640, 480))
gameapi.display.set_caption('set_caption')
redColor = gameapi.Color(255,0,0)
greenColor = gameapi.Color(0,255,0)
blueColor = gameapi.Color(0,0,255)
mousex, mousey = 0,0
fontObj = gameapi.font.Font('freesansbold.ttf', 32)
mouseposMsg = ""
keypressMsg = "asdfasdfasdf"
piano.init()
piano_id = piano.get_default_input_id()
print (piano_id)
print (piano.get_count())
print(piano.get_device_info(3))
midiInput = piano.Input(3)
gameapi.fastevent.init()
while True:
windowSurfaceObj.fill(greenColor)
randomColor = gameapi.Color(random.randint(0,255),random.randint(0,255),random.randint(0,255))
gameapi.draw.polygon(windowSurfaceObj, redColor, ((0,0), (10, 10), (10,0)))
gameapi.draw.rect(windowSurfaceObj, redColor, (20, 40, 10, 10))
pixArr = gameapi.PixelArray(windowSurfaceObj)
for x in range(100,200,4):
for y in range(100,200,4):
pixArr[x][y] = redColor
del pixArr
msgSurfaceObj = fontObj.render(keypressMsg, False, blueColor)
msgRectobj = msgSurfaceObj.get_rect()
msgRectobj.topleft = (0,0)
windowSurfaceObj.blit(msgSurfaceObj, msgRectobj)
mouseposSurfaceObj = fontObj.render(mouseposMsg, True, randomColor)
windowSurfaceObj.blit(mouseposSurfaceObj, (mousex, mousey))
while midiInput.poll():
midiEvents = midiInput.read(10)
for e in piano.midis2events(midiEvents, piano_id):
gameapi.fastevent.post(e)
for event in gameapi.fastevent.get():
if event.type == apiVar.QUIT:
gameapi.quit()
sys.exit()
elif event.type == piano.MIDIIN:
print(event)
'''
print (event.data1)
print (event.data2)
print (event.data3)
print (event.timestamp)
print (event.vice_id)
'''
elif event.type == apiVar.MOUSEMOTION:
mousex, mousey = event.pos
mouseposMsg = str((mousex, mousey))
elif event.type == apiVar.KEYDOWN:
if event.key in (apiVar.K_LEFT, apiVar.K_RIGHT, apiVar.K_UP, apiVar.K_DOWN):
keypressMsg = 'Arrow key pressed'
elif event.key == apiVar.K_ESCAPE:
gameapi.event.post(gameapi.event.Event(apiVar.QUIT))
else:
keypressMsg = str(event.key)
gameapi.display.update()
fpsClock.tick(30)
| mit |
amwelch/a10sdk-python | a10sdk/core/logging/logging_host.py | 2 | 2576 | from a10sdk.common.A10BaseClass import A10BaseClass
class Host(A10BaseClass):
"""Class Description::
Set remote syslog host DNS name or ip address.
Class host supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param ipv6addr_list: {"minItems": 1, "items": {"type": "ipv6addr"}, "uniqueItems": true, "array": [{"required": ["host-ipv6"], "properties": {"host-ipv6": {"optional": false, "type": "string", "description": "Set syslog host ipv6 address", "format": "ipv6-address"}, "tcp": {"description": "Use TCP as transport protocol", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}, "port": {"description": "Set remote syslog port number", "format": "number", "default": 514, "optional": true, "maximum": 32767, "minimum": 1, "type": "number"}, "use-mgmt-port": {"description": "Use management port for connections", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}}}], "type": "array", "$ref": "/axapi/v3/logging/host/ipv6addr/{host-ipv6}"}
:param ipv4addr_list: {"minItems": 1, "items": {"type": "ipv4addr"}, "uniqueItems": true, "array": [{"required": ["host-ipv4"], "properties": {"tcp": {"description": "Use TCP as transport protocol", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}, "host-ipv4": {"optional": false, "type": "string", "description": "Set syslog host ip address", "format": "host"}, "port": {"description": "Set remote syslog port number", "format": "number", "default": 514, "optional": true, "maximum": 32767, "minimum": 1, "type": "number"}, "use-mgmt-port": {"description": "Use management port for connections", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}}}], "type": "array", "$ref": "/axapi/v3/logging/host/ipv4addr/{host-ipv4}"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/logging/host`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "host"
self.a10_url="/axapi/v3/logging/host"
self.DeviceProxy = ""
self.ipv6addr_list = []
self.partition = {}
self.ipv4addr_list = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| apache-2.0 |
lshabc1231/noring-kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
xujun10110/MITMf | core/sslstrip/ServerConnectionFactory.py | 26 | 1930 | # Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
from core.logger import logger
from twisted.internet.protocol import ClientFactory
formatter = logging.Formatter("%(asctime)s [ServerConnectionFactory] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("ServerConnectionFactory", formatter)
class ServerConnectionFactory(ClientFactory):
def __init__(self, command, uri, postData, headers, client):
self.command = command
self.uri = uri
self.postData = postData
self.headers = headers
self.client = client
def buildProtocol(self, addr):
return self.protocol(self.command, self.uri, self.postData, self.headers, self.client)
def clientConnectionFailed(self, connector, reason):
log.debug("Server connection failed.")
destination = connector.getDestination()
if (destination.port != 443):
log.debug("Retrying via SSL")
self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443)
else:
try:
self.client.finish()
except:
pass
| gpl-3.0 |
Kvle/ardupilot | mk/PX4/Tools/genmsg/src/genmsg/command_line.py | 217 | 1887 | # Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
def includepath_to_dict(includepath):
search_path = {}
if includepath:
for path in includepath:
key = path[:path.find(':')]
value = path[path.find(':')+1:]
if value:
search_path.setdefault(key, []).append(value)
return search_path
| gpl-3.0 |
daniel198730/reposdmgv | proyectofinal/app/personaMain.py | 1 | 1667 | '''
Created on 19/2/2015
@author: PC29
'''
from app import app
from ec.edu.itsae.dao import PersonaDAO
from flask import render_template, request, redirect, url_for
@app.route("/mainPersona")
def personamain():
objR=PersonaDAO.PersonaDAO().reportarPersona()
return render_template("prueba.html", data=objR)
@app.route("/addPersona", methods=['POST'])
def addPersona():
nombre=request.form.get('nombre', type=str)
apaterno=request.form.get('apaterno', type=str)
amaterno=request.form.get('amaterno', type=str)
cedula=request.form.get('cedula', type=str)
fnacimiento=request.form.get('fnacimiento', type=str)
sexo=request.form.get('sexo', type=str)
direccion=request.form.get('direccion', type=str)
celular=request.form.get('celular', type=str)
estado=request.form.get('estado', type=int)
PersonaDAO.PersonaDAO().insertarPersona(nombre, apaterno, amaterno, cedula, fnacimiento, sexo, direccion, celular, estado)
return redirect(url_for('personamain'))
PersonaDAO.PersonaDAO().eliminarPersona(nombre, apaterno, amaterno, cedula, fnacimiento, sexo, direccion, celular, estado)
return redirect(url_for('personamain'))
@app.route("/buscarauto")
def buscarPersonaAuto():
nombre=str(request.args.get('term'))
objR=PersonaDAO.PersonaDAO().buscarPersonaNombre(nombre)
return objR
@app.route("/buscarDato")
def buscarPersonaDato():
nombre=str(request.args.get('bnombre'))
objR=PersonaDAO.PersonaDAO().buscarPersonaDato(nombre)
return render_template("prueba.html", data=objR)
return objR | lgpl-2.1 |
venicegeo/eventkit-cloud | eventkit_cloud/utils/stats/geomutils.py | 1 | 3906 | from mapproxy import grid as mapproxy_grid
from eventkit_cloud.tasks.models import ExportRun
import logging
import json
import math
logger = logging.getLogger(__name__)
_dbg_geom_cache_misses = 0
def _create_cache_geom_entry(job):
"""
Constructs a geometry cache entry
:param job: job contains the geometry
"""
orm_geom = job.the_geom
geojson = json.loads(orm_geom.json)
bbox = orm_geom.extent
cache_entry = {
"bbox": bbox,
"bbox_area": get_area_bbox(bbox),
"geometry": geojson,
"area": get_area_geojson(geojson),
}
return cache_entry
def lookup_cache_geometry(run, geom_cache):
"""
Cache area information to avoid repeated and expensive database lookups to Job when requesting
area for ExportTasks, DataProviderTasks, or ExportRuns
:param run: A run
:param geom_cache: Object holding cached values, lookup by run.id
:return:
"""
cache_entry = geom_cache.get(run.id)
if not cache_entry:
global _dbg_geom_cache_misses
_dbg_geom_cache_misses += 1
# Important that we only touch 'job' on cache miss
cache_entry = _create_cache_geom_entry(run.job)
geom_cache[run.id] = cache_entry
return cache_entry
def get_area_geojson(geojson, earth_r=6371):
"""
Given a GeoJSON string or object, return an approximation of its geodesic area in km².
The geometry must contain a single polygon with a single ring, no holes.
Based on Chamberlain and Duquette's algorithm: https://trs.jpl.nasa.gov/bitstream/handle/2014/41271/07-0286.pdf
:param geojson: GeoJSON selection area
:param earth_r: Earth radius in km
:return: area of geojson ring in square kilometers
"""
def rad(d):
return math.pi * d / 180
if isinstance(geojson, str):
geojson = json.loads(geojson)
if hasattr(geojson, "geometry"):
geojson = geojson["geometry"]
geom_type = geojson["type"].lower()
if geom_type == "polygon":
polys = [geojson["coordinates"]]
elif geom_type == "multipolygon":
polys = geojson["coordinates"]
else:
return RuntimeError("Invalid geometry type: %s" % geom_type)
a = 0
for poly in polys:
ring = poly[0]
if len(ring) < 4:
continue
ring.append(ring[-2]) # convenient for circular indexing
for i in range(len(ring) - 2):
a += (rad(ring[i + 1][0]) - rad(ring[i - 1][0])) * math.sin(rad(ring[i][1]))
area = abs(a * (earth_r ** 2) / 2)
return area
def get_area_bbox(bbox):
"""
:param bbox: bounding box tuple (w, s, e, n)
:return: The area of the bounding box
"""
w, s, e, n = bbox
return get_area_geojson({"type": "Polygon", "coordinates": [[[w, s], [e, s], [e, n], [w, n], [w, s]]]})
def get_bbox_intersect(one, two):
"""
Finds the intersection of two bounding boxes in the same SRS
:param one: The first bbox tuple (w, s, e, n)
:param two: The second bbox tuple (w, s, e, n)
:return: A bounding box tuple where one and two overlap, or None if there is no overlap
"""
a_x0, a_y0, a_x1, a_y1 = one
b_x0, b_y0, b_x1, b_y1 = two
if mapproxy_grid.bbox_intersects(one, two):
return max(a_x0, b_x0), max(a_y0, b_y0), min(a_x1, b_x1), min(a_y1, b_y1)
else:
return None
def prefetch_geometry_cache(geom_cache):
"""
Populates geom_cache with all geometries information from all Jobs indexed by ExportRun.id
:param geom_cache:
"""
for er in ExportRun.objects.select_related("job").only("id", "job__the_geom").all():
geom_cache[er.id] = _create_cache_geom_entry(er.job)
def get_estimate_cache_key(bbox, srs, min_zoom, max_zoom, slug):
estimate_tuple = (tuple(bbox), int(srs), int(min_zoom), int(max_zoom), str(slug))
hash_val = hash(estimate_tuple)
return str(hash_val)
| bsd-3-clause |
jmarshallnz/xbmc | tools/EventClients/lib/python/ps3/keymaps.py | 245 | 2329 | # -*- coding: utf-8 -*-
# Copyright (C) 2008-2013 Team XBMC
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# PS3 Remote and Controller Keymaps
keymap_remote = {
"16": 'power' ,#EJECT
"64": None ,#AUDIO
"65": None ,#ANGLE
"63": 'subtitle' ,#SUBTITLE
"0f": None ,#CLEAR
"28": None ,#TIME
"00": 'one' ,#1
"01": 'two' ,#2
"02": 'three' ,#3
"03": 'four' ,#4
"04": 'five' ,#5
"05": 'six' ,#6
"06": 'seven' ,#7
"07": 'eight' ,#8
"08": 'nine' ,#9
"09": 'zero' ,#0
"81": 'mytv' ,#RED
"82": 'mymusic' ,#GREEN
"80": 'mypictures' ,#BLUE
"83": 'myvideo' ,#YELLOW
"70": 'display' ,#DISPLAY
"1a": None ,#TOP MENU
"40": 'menu' ,#POP UP/MENU
"0e": None ,#RETURN
"5c": 'menu' ,#OPTIONS/TRIANGLE
"5d": 'back' ,#BACK/CIRCLE
"5e": 'info' ,#X
"5f": 'title' ,#VIEW/SQUARE
"54": 'up' ,#UP
"55": 'right' ,#RIGHT
"56": 'down' ,#DOWN
"57": 'left' ,#LEFT
"0b": 'select' ,#ENTER
"5a": 'volumeplus' ,#L1
"58": 'volumeminus' ,#L2
"51": 'Mute' ,#L3
"5b": 'pageplus' ,#R1
"59": 'pageminus' ,#R2
"52": None ,#R3
"43": None ,#PLAYSTATION
"50": None ,#SELECT
"53": None ,#START
"33": 'reverse' ,#<-SCAN
"34": 'forward' ,# SCAN->
"30": 'skipminus' ,#PREV
"31": 'skipplus' ,#NEXT
"60": None ,#<-SLOW/STEP
"61": None ,# SLOW/STEP->
"32": 'play' ,#PLAY
"38": 'stop' ,#STOP
"39": 'pause' ,#PAUSE
}
| gpl-2.0 |
WeblateOrg/weblate | weblate/trans/stats.py | 2 | 1465 | #
# Copyright © 2012 - 2021 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
def get_project_stats(project):
"""Return stats for project."""
return [
{
"language": str(tup.language),
"code": tup.language.code,
"total": tup.all,
"translated": tup.translated,
"translated_percent": tup.translated_percent,
"total_words": tup.all_words,
"translated_words": tup.translated_words,
"translated_words_percent": tup.translated_words_percent,
"total_chars": tup.all_chars,
"translated_chars": tup.translated_chars,
"translated_chars_percent": tup.translated_chars_percent,
}
for tup in project.stats.get_language_stats()
]
| gpl-3.0 |
ravi-sharma/python-api-library | src/kayako/tests/test_api.py | 3 | 6831 | # -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2011, Evan Leis
#
# Distributed under the terms of the Lesser GNU General Public License (LGPL)
#-----------------------------------------------------------------------------
'''
Created on May 5, 2011
@author: evan
'''
from kayako.tests import KayakoAPITest
class TestKayakoAPI(KayakoAPITest):
def test_init_without_url(self):
from kayako.api import KayakoAPI
from kayako.exception import KayakoInitializationError
self.assertRaises(KayakoInitializationError, KayakoAPI, None, 'key', 'secret')
def test_init_without_key(self):
from kayako.api import KayakoAPI
from kayako.exception import KayakoInitializationError
self.assertRaises(KayakoInitializationError, KayakoAPI, 'url', None, 'secret')
def test_init_without_secret(self):
from kayako.api import KayakoAPI
from kayako.exception import KayakoInitializationError
self.assertRaises(KayakoInitializationError, KayakoAPI, 'url', 'key', None)
def test__sanitize_paramter_list(self):
api = self.api
self.assertEqual(api._sanitize_parameter(['a', 'b', '', None, 'c']), ['a', 'b', 'c'])
def test__sanitize_paramter_number(self):
api = self.api
self.assertEqual(api._sanitize_parameter(123), '123')
def test__sanitize_paramter_none(self):
api = self.api
self.assertEqual(api._sanitize_parameter(None), '')
def test__post_data_none(self):
api = self.api
sanitized = api._sanitize_parameters(data=None)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=')
def test__post_data_array(self):
api = self.api
sanitized = api._sanitize_parameters(data=['abc', '', None, '123'])
results = api._post_data(**sanitized)
self.assertEqual(results, 'data[]=abc&data[]=123')
def test__post_data_empty_array(self):
api = self.api
sanitized = api._sanitize_parameters(data=['', None])
results = api._post_data(**sanitized)
self.assertEqual(results, 'data[]=')
def test__post_data_date(self):
import time
from datetime import datetime
api = self.api
date = datetime(2011, 5, 11, 12, 42, 46, 977079)
timestamp = int(time.mktime(date.timetuple()))
sanitized = api._sanitize_parameters(date=date)
results = api._post_data(**sanitized)
self.assertEqual(results, 'date=%s' % timestamp)
def test__post_data_FOREVER(self):
from kayako.core.lib import FOREVER
api = self.api
sanitized = api._sanitize_parameters(date=FOREVER)
results = api._post_data(**sanitized)
self.assertEqual(results, 'date=0')
def test__post_data_int(self):
api = self.api
sanitized = api._sanitize_parameters(data=123)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=123')
def test__post_data_str(self):
api = self.api
sanitized = api._sanitize_parameters(data='abc')
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=abc')
def test__post_data_true(self):
api = self.api
sanitized = api._sanitize_parameters(data=True)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=1')
def test__post_data_false(self):
api = self.api
sanitized = api._sanitize_parameters(data=False)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=0')
def test_signature(self):
''' Test the signature generation process '''
import hmac
import base64
import urllib
import hashlib
secretkey = "secretkey"
# Generates a random string of ten digits
salt = '1234567890'
# Computes the signature by hashing the salt with the secret key as the key
signature = hmac.new(secretkey, msg=salt, digestmod=hashlib.sha256).digest()
# base64 encode...
encoded_signature = base64.b64encode(signature)
# urlencode...
url_encoded_signature = urllib.quote(encoded_signature)
assert url_encoded_signature == 'VKjt8M54liY6xq1UuhUYH5BFp1RUqHekqytgLPrVEA0%3D'
def test_get(self):
r = self.api._request('/Core/TestAPI', 'GET')
assert r.read()
assert r.getcode() == 200, r.getcode()
r = self.api._request('/Core/TestAPI', 'GET', test='just a test')
assert r.read()
assert r.getcode() == 200, r.getcode()
r = self.api._request('/Core/TestAPI/1', 'GET')
assert r.read()
assert r.getcode() == 200, r.getcode()
r = self.api._request('/Core/TestAPI/1', 'GET', test='just a test')
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_post(self):
r = self.api._request('/Core/TestAPI', 'POST')
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_put(self):
r = self.api._request('/Core/TestAPI/1', 'PUT', x=234)
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_delete(self):
r = self.api._request('/Core/TestAPI/1', 'DELETE')
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_get_department(self):
from kayako.objects import Department
d = self.api.get(Department, 1)
self.assertEqual(d.id, 1)
def test_create_department(self):
from kayako.core.lib import UnsetParameter
from kayako.objects import Department
d = self.api.create(Department)
self.assertEqual(d.id, UnsetParameter)
def test_creat_with_kwargs(self):
from kayako.objects import Department
d = self.api.create(Department, title='test_dept')
assert d.title == 'test_dept'
def test_creat_with_bad_kwargs(self):
from kayako.objects import Department
self.assertRaises(TypeError, self.api.create, Department, bad_kwarg='bad_kwarg')
def test_invalid_url(self):
from kayako import KayakoAPI
from kayako.exception import KayakoRequestError
api = KayakoAPI('http://this.is.just.a.test.1293847987flsjclksjckn32.com', 'api_key', 'secret_key')
raised = False
try:
api._request('testing testing', 'GET')
except KayakoRequestError, error:
self.log(error)
raised = True
assert raised
def test_ticket_search(self):
assert isinstance(self.api.ticket_search('testonly', ticketid=True), list)
def test_ticket_search_full(self):
assert isinstance(self.api.ticket_search_full('testonly'), list)
| bsd-2-clause |
kylepjohnson/cltk | tests/test_morphology.py | 4 | 28253 | """Test cltk.morphology."""
import unittest
from cltk.core.exceptions import CLTKException
from cltk.morphology.lat import CollatinusDecliner
class TestMorphology(unittest.TestCase):
def test_collatinus_decline(self):
""" Ensure lemmatization works well """
decliner = CollatinusDecliner()
def sort_result(result):
return {key: sorted(val) for key, val in result.items()}
self.maxDiff = None
self.assertEqual(
decliner.decline("via", collatinus_dict=True),
{
1: ["via"],
2: ["via"],
3: ["viam"],
4: ["viae"],
5: ["viae"],
6: ["via"],
7: ["viae"],
8: ["viae"],
9: ["vias"],
10: ["viarum"],
11: ["viis"],
12: ["viis"],
},
"Declination of via should be right",
)
self.assertEqual(
decliner.decline("doctus", collatinus_dict=True),
{
13: ["doctus"],
14: ["docte"],
15: ["doctum"],
16: ["docti"],
17: ["docto"],
18: ["docto"],
19: ["docti"],
20: ["docti"],
21: ["doctos"],
22: ["doctorum"],
23: ["doctis"],
24: ["doctis"],
25: ["docta"],
26: ["docta"],
27: ["doctam"],
28: ["doctae"],
29: ["doctae"],
30: ["docta"],
31: ["doctae"],
32: ["doctae"],
33: ["doctas"],
34: ["doctarum"],
35: ["doctis"],
36: ["doctis"],
37: ["doctum"],
38: ["doctum"],
39: ["doctum"],
40: ["docti"],
41: ["docto"],
42: ["docto"],
43: ["docta"],
44: ["docta"],
45: ["docta"],
46: ["doctorum"],
47: ["doctis"],
48: ["doctis"],
49: ["doctior"],
50: ["doctior"],
51: ["doctiorem"],
52: ["doctioris"],
53: ["doctiori"],
54: ["doctiore"],
55: ["doctiores"],
56: ["doctiores"],
57: ["doctiores"],
58: ["doctiorum"],
59: ["doctioribus"],
60: ["doctioribus"],
61: ["doctior"],
62: ["doctior"],
63: ["doctiorem"],
64: ["doctioris"],
65: ["doctiori"],
66: ["doctiore"],
67: ["doctiores"],
68: ["doctiores"],
69: ["doctiores"],
70: ["doctiorum"],
71: ["doctioribus"],
72: ["doctioribus"],
73: ["doctius"],
74: ["doctius"],
75: ["doctius"],
76: ["doctioris"],
77: ["doctiori"],
78: ["doctiore"],
79: ["doctiora"],
80: ["doctiora"],
81: ["doctiora"],
82: ["doctiorum"],
83: ["doctioribus"],
84: ["doctioribus"],
85: ["doctissimus"],
86: ["doctissime"],
87: ["doctissimum"],
88: ["doctissimi"],
89: ["doctissimo"],
90: ["doctissimo"],
91: ["doctissimi"],
92: ["doctissimi"],
93: ["doctissimos"],
94: ["doctissimorum"],
95: ["doctissimis"],
96: ["doctissimis"],
97: ["doctissima"],
98: ["doctissima"],
99: ["doctissimam"],
100: ["doctissimae"],
101: ["doctissimae"],
102: ["doctissima"],
103: ["doctissimae"],
104: ["doctissimae"],
105: ["doctissimas"],
106: ["doctissimarum"],
107: ["doctissimis"],
108: ["doctissimis"],
109: ["doctissimum"],
110: ["doctissimum"],
111: ["doctissimum"],
112: ["doctissimi"],
113: ["doctissimo"],
114: ["doctissimo"],
115: ["doctissima"],
116: ["doctissima"],
117: ["doctissima"],
118: ["doctissimorum"],
119: ["doctissimis"],
120: ["doctissimis"],
},
"Doctus has three radicals and lots of forms",
)
self.assertEqual(
sort_result(decliner.decline("verbex", collatinus_dict=True)),
{
1: ["berbex", "verbex", "vervex"],
2: ["berbex", "verbex", "vervex"],
3: ["berbecem", "verbecem", "vervecem"],
4: ["berbecis", "verbecis", "vervecis"],
5: ["berbeci", "verbeci", "verveci"],
6: ["berbece", "verbece", "vervece"],
7: ["berbeces", "verbeces", "verveces"],
8: ["berbeces", "verbeces", "verveces"],
9: ["berbeces", "verbeces", "verveces"],
10: ["berbecum", "verbecum", "vervecum"],
11: ["berbecibus", "verbecibus", "vervecibus"],
12: ["berbecibus", "verbecibus", "vervecibus"],
}, # Missing 12 ?
"Verbex has two different roots : checking they are taken into account",
)
self.assertEqual(
sort_result(decliner.decline("vendo", collatinus_dict=True)),
{
121: ["vendo"],
122: ["vendis"],
123: ["vendit"],
124: ["vendimus"],
125: ["venditis"],
126: ["vendunt"],
127: ["vendebam"],
128: ["vendebas"],
129: ["vendebat"],
130: ["vendebamus"],
131: ["vendebatis"],
132: ["vendebant"],
133: ["vendam"],
134: ["vendes"],
135: ["vendet"],
136: ["vendemus"],
137: ["vendetis"],
138: ["vendent"],
139: ["vendavi", "vendidi"],
140: ["vendavisti", "vendidisti"],
141: ["vendavit", "vendidit"],
142: ["vendavimus", "vendidimus"],
143: ["vendavistis", "vendidistis"],
144: ["vendavere", "vendaverunt", "vendidere", "vendiderunt"],
145: ["vendaveram", "vendideram"],
146: ["vendaveras", "vendideras"],
147: ["vendaverat", "vendiderat"],
148: ["vendaveramus", "vendideramus"],
149: ["vendaveratis", "vendideratis"],
150: ["vendaverant", "vendiderant"],
151: ["vendavero", "vendidero"],
152: ["vendaveris", "vendideris"],
153: ["vendaverit", "vendiderit"],
154: ["vendaverimus", "vendiderimus"],
155: ["vendaveritis", "vendideritis"],
156: ["vendaverint", "vendiderint"],
157: ["vendam"],
158: ["vendas"],
159: ["vendat"],
160: ["vendamus"],
161: ["vendatis"],
162: ["vendant"],
163: ["venderem"],
164: ["venderes"],
165: ["venderet"],
166: ["venderemus"],
167: ["venderetis"],
168: ["venderent"],
169: ["vendaverim", "vendiderim"],
170: ["vendaveris", "vendideris"],
171: ["vendaverit", "vendiderit"],
172: ["vendaverimus", "vendiderimus"],
173: ["vendaveritis", "vendideritis"],
174: ["vendaverint", "vendiderint"],
175: ["vendavissem", "vendidissem"],
176: ["vendavisses", "vendidisses"],
177: ["vendavisset", "vendidisset"],
178: ["vendavissemus", "vendidissemus"],
179: ["vendavissetis", "vendidissetis"],
180: ["vendavissent", "vendidissent"],
181: ["vende"],
182: ["vendite"],
183: ["vendito"],
184: ["vendito"],
185: ["venditote"],
186: ["vendunto"],
187: ["vendere"],
188: ["vendavisse", "vendidisse"],
189: ["vendens"],
190: ["vendens"],
191: ["vendentem"],
192: ["vendentis"],
193: ["vendenti"],
194: ["vendente"],
195: ["vendentes"],
196: ["vendentes"],
197: ["vendentes"],
198: ["vendentium", "vendentum"],
199: ["vendentibus"],
200: ["vendentibus"],
201: ["vendens"],
202: ["vendens"],
203: ["vendentem"],
204: ["vendentis"],
205: ["vendenti"],
206: ["vendente"],
207: ["vendentes"],
208: ["vendentes"],
209: ["vendentes"],
210: ["vendentium", "vendentum"],
211: ["vendentibus"],
212: ["vendentibus"],
213: ["vendens"],
214: ["vendens"],
215: ["vendens"],
216: ["vendentis"],
217: ["vendenti"],
218: ["vendente"],
219: ["vendentia"],
220: ["vendentia"],
221: ["vendentia"],
222: ["vendentium", "vendentum"],
223: ["vendentibus"],
224: ["vendentibus"],
225: ["vendaturus", "venditurus"],
226: ["vendature", "venditure"],
227: ["vendaturum", "venditurum"],
228: ["vendaturi", "vendituri"],
229: ["vendaturo", "vendituro"],
230: ["vendaturo", "vendituro"],
231: ["vendaturi", "vendituri"],
232: ["vendaturi", "vendituri"],
233: ["vendaturos", "vendituros"],
234: ["vendaturorum", "venditurorum"],
235: ["vendaturis", "vendituris"],
236: ["vendaturis", "vendituris"],
237: ["vendatura", "venditura"],
238: ["vendatura", "venditura"],
239: ["vendaturam", "vendituram"],
240: ["vendaturae", "venditurae"],
241: ["vendaturae", "venditurae"],
242: ["vendatura", "venditura"],
243: ["vendaturae", "venditurae"],
244: ["vendaturae", "venditurae"],
245: ["vendaturas", "vendituras"],
246: ["vendaturarum", "venditurarum"],
247: ["vendaturis", "vendituris"],
248: ["vendaturis", "vendituris"],
249: ["vendaturum", "venditurum"],
250: ["vendaturum", "venditurum"],
251: ["vendaturum", "venditurum"],
252: ["vendaturi", "vendituri"],
253: ["vendaturo", "vendituro"],
254: ["vendaturo", "vendituro"],
255: ["vendatura", "venditura"],
256: ["vendatura", "venditura"],
257: ["vendatura", "venditura"],
258: ["vendaturorum", "venditurorum"],
259: ["vendaturis", "vendituris"],
260: ["vendaturis", "vendituris"],
261: ["vendendum"],
262: ["vendendi"],
263: ["vendendo"],
264: ["vendendo"],
265: ["vendatum", "venditum"],
266: ["vendatu", "venditu"],
267: ["vendor"],
268: ["vendere", "venderis"],
269: ["venditur"],
270: ["vendimur"],
271: ["vendimini"],
272: ["venduntur"],
273: ["vendebar"],
274: ["vendebare", "vendebaris"],
275: ["vendebatur"],
276: ["vendebamur"],
277: ["vendebamini"],
278: ["vendebantur"],
279: ["vendar"],
280: ["vendere", "venderis"],
281: ["vendetur"],
282: ["vendemur"],
283: ["vendemini"],
284: ["vendentur"],
285: ["vendar"],
286: ["vendare", "vendaris"],
287: ["vendatur"],
288: ["vendamur"],
289: ["vendamini"],
290: ["vendantur"],
291: ["venderer"],
292: ["venderere", "vendereris"],
293: ["venderetur"],
294: ["venderemur"],
295: ["venderemini"],
296: ["venderentur"],
297: ["vendere"],
298: ["vendimini"],
299: ["venditor"],
300: ["venditor"],
301: ["venduntor"],
302: ["vendi"],
303: ["vendatus", "venditus"],
304: ["vendate", "vendite"],
305: ["vendatum", "venditum"],
306: ["vendati", "venditi"],
307: ["vendato", "vendito"],
308: ["vendato", "vendito"],
309: ["vendati", "venditi"],
310: ["vendati", "venditi"],
311: ["vendatos", "venditos"],
312: ["vendatorum", "venditorum"],
313: ["vendatis", "venditis"],
314: ["vendatis", "venditis"],
315: ["vendata", "vendita"],
316: ["vendata", "vendita"],
317: ["vendatam", "venditam"],
318: ["vendatae", "venditae"],
319: ["vendatae", "venditae"],
320: ["vendata", "vendita"],
321: ["vendatae", "venditae"],
322: ["vendatae", "venditae"],
323: ["vendatas", "venditas"],
324: ["vendatarum", "venditarum"],
325: ["vendatis", "venditis"],
326: ["vendatis", "venditis"],
327: ["vendatum", "venditum"],
328: ["vendatum", "venditum"],
329: ["vendatum", "venditum"],
330: ["vendati", "venditi"],
331: ["vendato", "vendito"],
332: ["vendato", "vendito"],
333: ["vendata", "vendita"],
334: ["vendata", "vendita"],
335: ["vendata", "vendita"],
336: ["vendatorum", "venditorum"],
337: ["vendatis", "venditis"],
338: ["vendatis", "venditis"],
339: ["vendendus"],
340: ["vendende"],
341: ["vendendum"],
342: ["vendendi"],
343: ["vendendo"],
344: ["vendendo"],
345: ["vendendi"],
346: ["vendendi"],
347: ["vendendos"],
348: ["vendendorum"],
349: ["vendendis"],
350: ["vendendis"],
351: ["vendenda"],
352: ["vendenda"],
353: ["vendendam"],
354: ["vendendae"],
355: ["vendendae"],
356: ["vendenda"],
357: ["vendendae"],
358: ["vendendae"],
359: ["vendendas"],
360: ["vendendarum"],
361: ["vendendis"],
362: ["vendendis"],
363: ["vendendum"],
364: ["vendendum"],
365: ["vendendum"],
366: ["vendendi"],
367: ["vendendo"],
368: ["vendendo"],
369: ["vendenda"],
370: ["vendenda"],
371: ["vendenda"],
372: ["vendendorum"],
373: ["vendendis"],
374: ["vendendis"],
},
"Check verb vendo declines well",
)
self.assertEqual(
decliner.decline("poesis", collatinus_dict=True),
{
1: ["poesis"],
2: ["poesis"],
3: ["poesem", "poesin", "poesim"],
4: ["poesis", "poeseos"],
5: ["poesi"],
6: ["poese"],
7: ["poeses"],
8: ["poeses"],
9: ["poeses", "poesis"],
10: ["poesium"],
11: ["poesibus"],
12: ["poesibus"],
},
"Duplicity of forms should be accepted",
)
self.assertEqual(
sort_result(decliner.decline("hic", collatinus_dict=True)),
{
13: ["hic", "hice", "hicine"],
15: ["hunc"],
16: ["hujus", "hujusce"],
17: ["huic"],
18: ["hoc", "hocine"],
19: ["hi"],
21: ["hos", "hosce"],
22: ["horum"],
23: ["his", "hisce"],
24: ["his", "hisce"],
25: ["haec", "haeccine", "haece", "haecine"],
27: ["hanc"],
28: ["hujus", "hujusce"],
29: ["huic"],
30: ["hac"],
31: ["hae"],
33: ["has", "hasce"],
34: ["harum"],
35: ["his", "hisce"],
36: ["his", "hisce"],
37: ["hoc", "hocine"],
39: ["hoc", "hocine"],
40: ["hujus", "hujusce"],
41: ["huic"],
42: ["hoc", "hocine"],
43: ["haec", "haeccine", "haecine"],
45: ["haec", "haeccine", "haecine"],
46: ["horum"],
47: ["his", "hisce"],
48: ["his", "hisce"],
},
"Check that suffixes are well added",
)
self.assertEqual(
sort_result(decliner.decline("quicumque", collatinus_dict=True)),
{
13: ["quicumque", "quicunque"],
15: ["quemcumque", "quemcunque"],
16: ["cujuscumque", "cujuscunque", "quojuscumque", "quojuscunque"],
17: ["cuicumque", "cuicunque", "quoicumque", "quoicunque"],
18: ["quocumque", "quocunque"],
19: ["quicumque", "quicunque"],
21: ["quoscumque", "quoscunque"],
22: ["quorumcumque", "quorumcunque"],
23: ["quibuscumque", "quibuscunque"],
24: ["quibuscumque", "quibuscunque"],
25: ["quaecumque", "quaecunque"],
27: ["quamcumque", "quamcunque"],
28: ["cujuscumque", "cujuscunque", "quojuscumque", "quojuscunque"],
29: ["cuicumque", "cuicunque", "quoicumque", "quoicunque"],
30: ["quacumque", "quacunque"],
31: ["quaecumque", "quaecunque"],
33: ["quascumque", "quascunque"],
34: ["quarumcumque", "quarumcunque"],
35: ["quibuscumque", "quibuscunque"],
36: ["quibuscumque", "quibuscunque"],
37: ["quodcumque", "quodcunque"],
39: ["quodcumque", "quodcunque"],
40: ["cujuscumque", "cujuscunque", "quojuscumque", "quojuscunque"],
41: ["cuicumque", "cuicunque", "quoicumque", "quoicunque"],
42: ["quocumque", "quocunque"],
43: ["quaecumque", "quaecunque"],
45: ["quaecumque", "quaecunque"],
46: ["quorumcumque", "quorumcunque"],
47: ["quibuscumque", "quibuscunque"],
48: ["quibuscumque", "quibuscunque"],
},
"Constant suffix should be added",
)
self.assertEqual(
decliner.decline("plerique", collatinus_dict=True),
{
19: ["plerique"],
20: ["plerique"],
21: ["plerosque"],
22: ["plerorumque"],
23: ["plerisque"],
24: ["plerisque"],
31: ["pleraeque"],
32: ["pleraeque"],
33: ["plerasque"],
34: ["plerarumque"],
35: ["plerisque"],
36: ["plerisque"],
43: ["pleraque"],
44: ["pleraque"],
45: ["pleraque"],
46: ["plerorumque"],
47: ["plerisque"],
48: ["plerisque"],
},
"Checking abs is applied correctly",
)
self.assertEqual(
decliner.decline("edo", collatinus_dict=True)[122]
+ decliner.decline("edo", collatinus_dict=True)[163],
["edis", "es"] + ["ederem", "essem"],
"Alternative desisences should be added, even with different root",
)
self.assertEqual(
decliner.decline("aggero2")[0],
("aggero", "v1spia---"),
"Lemma with disambiguation indexes should not fail their declension [aggero and not aggeroo]",
)
def test_collatinus_flatten_decline(self):
""" Ensure that flattening decline result is consistant"""
decliner = CollatinusDecliner()
self.assertEqual(
decliner.decline("via", flatten=True),
[
"via",
"via",
"viam",
"viae",
"viae",
"via",
"viae",
"viae",
"vias",
"viarum",
"viis",
"viis",
],
"Declination of via should be right",
)
self.assertEqual(
decliner.decline("poesis", flatten=True),
[
"poesis",
"poesis",
"poesem",
"poesin",
"poesim",
"poesis",
"poeseos",
"poesi",
"poese",
"poeses",
"poeses",
"poeses",
"poesis",
"poesium",
"poesibus",
"poesibus",
],
"Duplicity of forms should be accepted",
)
def test_collatinus_POS_decline(self):
""" Ensure that POS decline result is consistant"""
decliner = CollatinusDecliner()
self.assertEqual(
decliner.decline("via"),
[
("via", "--s----n-"),
("via", "--s----v-"),
("viam", "--s----a-"),
("viae", "--s----g-"),
("viae", "--s----d-"),
("via", "--s----b-"),
("viae", "--p----n-"),
("viae", "--p----v-"),
("vias", "--p----a-"),
("viarum", "--p----g-"),
("viis", "--p----d-"),
("viis", "--p----b-"),
],
"Declination of via should be right",
)
self.assertEqual(
decliner.decline("poesis"),
[
("poesis", "--s----n-"),
("poesis", "--s----v-"),
("poesem", "--s----a-"),
("poesin", "--s----a-"),
("poesim", "--s----a-"),
("poesis", "--s----g-"),
("poeseos", "--s----g-"),
("poesi", "--s----d-"),
("poese", "--s----b-"),
("poeses", "--p----n-"),
("poeses", "--p----v-"),
("poeses", "--p----a-"),
("poesis", "--p----a-"),
("poesium", "--p----g-"),
("poesibus", "--p----d-"),
("poesibus", "--p----b-"),
],
"Duplicity of forms should be accepted",
)
def test_collatinus_multiple_radicals(self):
coll = CollatinusDecliner()
self.assertEqual(
sorted(coll.decline("sandaraca")[:3], key=lambda x: x[0]),
[
("sandaraca", "--s----n-"),
("sandaracha", "--s----n-"),
("sanderaca", "--s----n-"),
],
)
jajunitas = [form for form, _ in coll.decline("jajunitas")]
self.assertIn("jajunitas", jajunitas)
self.assertIn("jejunitas", jajunitas)
self.assertIn("jajunitatem", jajunitas)
self.assertIn("jejunitatem", jajunitas)
def test_collatinus_raise(self):
""" Unknown lemma should raise exception """
def decline():
decliner = CollatinusDecliner()
decliner.decline("this lemma will never exist")
self.assertRaises(CLTKException, decline)
| mit |
SuperTango/TangoLogger | Uploader/requests/requests/compat.py | 1039 | 1469 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it throws a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| gpl-3.0 |
nilqed/root | tutorials/pyroot/first.py | 28 | 1524 | from ROOT import TCanvas, TF1, TPaveLabel, TPad, TText
from ROOT import gROOT
nut = TCanvas( 'nut', 'FirstSession', 100, 10, 700, 900 )
nut.Range( 0, 0, 20, 24 )
nut.SetFillColor( 10 )
nut.SetBorderSize( 2 )
pl = TPaveLabel( 3, 22, 17, 23.7, 'My first PyROOT interactive session', 'br' )
pl.SetFillColor( 18 )
pl.Draw()
t = TText( 0, 0, 'a' )
t.SetTextFont( 62 )
t.SetTextSize( 0.025 )
t.SetTextAlign( 12 )
t.DrawText( 2, 20.3, 'PyROOT provides ROOT bindings for Python, a powerful interpreter.' )
t.DrawText( 2, 19.3, 'Blocks of lines can be entered typographically.' )
t.DrawText( 2, 18.3, 'Previous typed lines can be recalled.' )
t.SetTextFont( 72 )
t.SetTextSize( 0.026 )
t.DrawText( 3, 17, r'>>> x, y = 5, 7' )
t.DrawText( 3, 16, r'>>> import math; x*math.sqrt(y)' )
t.DrawText( 3, 14, r'>>> for i in range(2,7): print "sqrt(%d) = %f" % (i,math.sqrt(i))' )
t.DrawText( 3, 10, r'>>> import ROOT; f1 = ROOT.TF1( "f1", "sin(x)/x", 0, 10 )' )
t.DrawText( 3, 9, r'>>> f1.Draw()' )
t.SetTextFont( 81 )
t.SetTextSize( 0.018 )
t.DrawText( 4, 15, '13.228756555322953' )
t.DrawText( 4, 13.3, 'sqrt(2) = 1.414214' )
t.DrawText( 4, 12.7, 'sqrt(3) = 1.732051' )
t.DrawText( 4, 12.1, 'sqrt(4) = 2.000000' )
t.DrawText( 4, 11.5, 'sqrt(5) = 2.236068' )
t.DrawText( 4, 10.9, 'sqrt(6) = 2.449490' )
pad = TPad( 'pad', 'pad', .2, .05, .8, .35 )
pad.SetFillColor( 42 )
pad.SetFrameFillColor( 33 )
pad.SetBorderSize( 10 )
pad.Draw()
pad.cd()
pad.SetGrid()
f1 = TF1( 'f1', 'sin(x)/x', 0, 10 )
f1.Draw()
nut.cd()
nut.Update()
| lgpl-2.1 |
auready/django | django/contrib/auth/context_processors.py | 3 | 1824 | # PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict:
def __init__(self, user, app_label):
self.user, self.app_label = user, app_label
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
def __iter__(self):
# To fix 'item in perms.someapp' and __getitem__ interaction we need to
# define __iter__. See #18979 for details.
raise TypeError("PermLookupDict is not iterable.")
def __bool__(self):
return self.user.has_module_perms(self.app_label)
class PermWrapper:
def __init__(self, user):
self.user = user
def __getitem__(self, app_label):
return PermLookupDict(self.user, app_label)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
def __contains__(self, perm_name):
"""
Lookup by "someapp" or "someapp.someperm" in perms.
"""
if '.' not in perm_name:
# The name refers to module.
return bool(self[perm_name])
app_label, perm_name = perm_name.split('.', 1)
return self[app_label][perm_name]
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
If there is no 'user' attribute in the request, uses AnonymousUser (from
django.contrib.auth).
"""
if hasattr(request, 'user'):
user = request.user
else:
from django.contrib.auth.models import AnonymousUser
user = AnonymousUser()
return {
'user': user,
'perms': PermWrapper(user),
}
| bsd-3-clause |
ZhangXinNan/tensorflow | tensorflow/contrib/tpu/python/tpu/tpu_system_metadata.py | 13 | 5734 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================
"""TPU system metadata and associated tooling."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
from tensorflow.contrib.tpu.python.tpu import tpu
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
_PINGING_MASTER_TIMEOUT_IN_MS = 60 * 1000 # 1 min
_RETRY_TIMES = 120
_INITIAL_TPU_SYSTEM_TIMEOUT_IN_MS = 300 * 1000 # 5 mins
_TPU_DEVICE_REG = re.compile(r'.*task:(\d+)/.*device:TPU:(\d+)$')
# _TPUSystemMetadata is used by TPUEstimator to hold TPU configuration,
# including num_cores and num_hosts.
_TPUSystemMetadata = collections.namedtuple('_TPUSystemMetadata', [
'num_cores',
'num_hosts',
'num_of_cores_per_host',
'topology',
'devices',
])
def _query_tpu_system_metadata(master_address, cluster_def=None,
query_topology=False):
"""Automatically detects the TPU system metadata in the system."""
tpu_core_count = 0
devices = []
device_dict = collections.defaultdict(list)
retry_count = 1
while True:
logging.info('Querying Tensorflow master (%s) for TPU system metadata.',
master_address)
try:
with ops.Graph().as_default():
with session_lib.Session(
master_address,
config=get_session_config_with_timeout(
_PINGING_MASTER_TIMEOUT_IN_MS,
cluster_def)) as sess:
devices = sess.list_devices()
for device in devices:
match = _TPU_DEVICE_REG.match(device.name)
if match:
host_id = match.group(1)
core_id = match.group(2)
device_dict[host_id].append(core_id)
tpu_core_count += 1
break
except errors.DeadlineExceededError:
msg = ('Failed to connect to the Tensorflow master. The TPU worker may '
'not be ready (still scheduling) or the Tensorflow master address '
'is incorrect: got (%s).' %
(master_address))
# TODO(xiejw): For local or grpc master we might not need retry logic
# here.
if retry_count <= _RETRY_TIMES:
logging.warning('%s', msg)
logging.warning('Retrying (%d/%d).', retry_count, _RETRY_TIMES)
retry_count += 1
else:
raise ValueError(msg)
num_of_cores_per_host = 0
if tpu_core_count:
num_cores_per_host_set = set(
[len(core_ids) for core_ids in device_dict.values()])
if len(num_cores_per_host_set) != 1:
raise RuntimeError(
'TPU cores on each host is not same. This should not happen!. '
'devices: {}'.format(devices))
num_of_cores_per_host = num_cores_per_host_set.pop()
topology = None
if query_topology:
if not tpu_core_count:
raise RuntimeError(
'Cannot find any TPU cores in the system (master address {}). '
'This usually means the master address is incorrect or the '
'TPU worker has some problems. Available devices: {}'.format(
master_address, devices))
topology = _obtain_topology(master_address, cluster_def)
metadata = _TPUSystemMetadata(
num_cores=tpu_core_count,
num_hosts=len(device_dict),
num_of_cores_per_host=num_of_cores_per_host,
topology=topology,
devices=devices)
if tpu_core_count:
logging.info('Found TPU system:')
logging.info('*** Num TPU Cores: %d', metadata.num_cores)
logging.info('*** Num TPU Workers: %d', metadata.num_hosts)
logging.info('*** Num TPU Cores Per Worker: %d',
metadata.num_of_cores_per_host)
for device in metadata.devices:
logging.info('*** Available Device: %s', device)
else:
logging.info('Failed to find TPU: %s', metadata)
return metadata
def _obtain_topology(master_address, cluster_def):
"""Obtains TPU fabric topology."""
try:
logging.info('Initializing TPU system (master: %s) to fetch topology '
'for model parallelism. This might take a while.',
master_address)
with ops.Graph().as_default():
session_config = get_session_config_with_timeout(
_INITIAL_TPU_SYSTEM_TIMEOUT_IN_MS, cluster_def)
with session_lib.Session(
master_address, config=session_config) as sess:
topology = sess.run(tpu.initialize_system())
return topology
except errors.DeadlineExceededError:
raise ValueError(
'Fail to initialize TPU system with master (%s). '
'Please double check the TPU system is functional.' % (
master_address))
def get_session_config_with_timeout(timeout_in_secs, cluster_def):
"""Returns a session given a timeout and a cluster configuration."""
config = config_pb2.ConfigProto(
operation_timeout_in_ms=timeout_in_secs, cluster_def=cluster_def)
return config
| apache-2.0 |
samueldotj/TeeRISC-Simulator | tests/configs/realview-switcheroo-full.py | 6 | 2411 | # Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
import switcheroo
root = LinuxArmFSSwitcheroo(
cpu_classes=(AtomicSimpleCPU, TimingSimpleCPU, DerivO3CPU)
).create_root()
# Setup a custom test method that uses the switcheroo tester that
# switches between CPU models.
run_test = switcheroo.run_test
| bsd-3-clause |
YihaoLu/statsmodels | statsmodels/tools/grouputils.py | 25 | 22518 | # -*- coding: utf-8 -*-
"""Tools for working with groups
This provides several functions to work with groups and a Group class that
keeps track of the different representations and has methods to work more
easily with groups.
Author: Josef Perktold,
Author: Nathaniel Smith, recipe for sparse_dummies on scipy user mailing list
Created on Tue Nov 29 15:44:53 2011 : sparse_dummies
Created on Wed Nov 30 14:28:24 2011 : combine_indices
changes: add Group class
Notes
~~~~~
This reverses the class I used before, where the class was for the data and
the group was auxiliary. Here, it is only the group, no data is kept.
sparse_dummies needs checking for corner cases, e.g.
what if a category level has zero elements? This can happen with subset
selection even if the original groups where defined as arange.
Not all methods and options have been tried out yet after refactoring
need more efficient loop if groups are sorted -> see GroupSorted.group_iter
"""
from __future__ import print_function
from statsmodels.compat.python import lrange, lzip, range
import numpy as np
import pandas as pd
from statsmodels.compat.numpy import npc_unique
import statsmodels.tools.data as data_util
from pandas.core.index import Index, MultiIndex
def combine_indices(groups, prefix='', sep='.', return_labels=False):
"""use np.unique to get integer group indices for product, intersection
"""
if isinstance(groups, tuple):
groups = np.column_stack(groups)
else:
groups = np.asarray(groups)
dt = groups.dtype
is2d = (groups.ndim == 2) # need to store
if is2d:
ncols = groups.shape[1]
if not groups.flags.c_contiguous:
groups = np.array(groups, order='C')
groups_ = groups.view([('', groups.dtype)] * groups.shape[1])
else:
groups_ = groups
uni, uni_idx, uni_inv = npc_unique(groups_, return_index=True,
return_inverse=True)
if is2d:
uni = uni.view(dt).reshape(-1, ncols)
# avoiding a view would be
# for t in uni.dtype.fields.values():
# assert (t[0] == dt)
#
# uni.dtype = dt
# uni.shape = (uni.size//ncols, ncols)
if return_labels:
label = [(prefix+sep.join(['%s']*len(uni[0]))) % tuple(ii)
for ii in uni]
return uni_inv, uni_idx, uni, label
else:
return uni_inv, uni_idx, uni
# written for and used in try_covariance_grouploop.py
def group_sums(x, group, use_bincount=True):
"""simple bincount version, again
group : array, integer
assumed to be consecutive integers
no dtype checking because I want to raise in that case
uses loop over columns of x
for comparison, simple python loop
"""
x = np.asarray(x)
if x.ndim == 1:
x = x[:, None]
elif x.ndim > 2 and use_bincount:
raise ValueError('not implemented yet')
if use_bincount:
# re-label groups or bincount takes too much memory
if np.max(group) > 2 * x.shape[0]:
group = pd.factorize(group)[0]
return np.array([np.bincount(group, weights=x[:, col])
for col in range(x.shape[1])])
else:
uniques = np.unique(group)
result = np.zeros([len(uniques)] + list(x.shape[1:]))
for ii, cat in enumerate(uniques):
result[ii] = x[g == cat].sum(0)
return result
def group_sums_dummy(x, group_dummy):
"""sum by groups given group dummy variable
group_dummy can be either ndarray or sparse matrix
"""
if data_util._is_using_ndarray_type(group_dummy, None):
return np.dot(x.T, group_dummy)
else: # check for sparse
return x.T * group_dummy
def dummy_sparse(groups):
"""create a sparse indicator from a group array with integer labels
Parameters
----------
groups: ndarray, int, 1d (nobs,)
an array of group indicators for each observation. Group levels are
assumed to be defined as consecutive integers, i.e. range(n_groups)
where n_groups is the number of group levels. A group level with no
observations for it will still produce a column of zeros.
Returns
-------
indi : ndarray, int8, 2d (nobs, n_groups)
an indicator array with one row per observation, that has 1 in the
column of the group level for that observation
Examples
--------
>>> g = np.array([0, 0, 2, 1, 1, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi
<7x3 sparse matrix of type '<type 'numpy.int8'>'
with 7 stored elements in Compressed Sparse Row format>
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
current behavior with missing groups
>>> g = np.array([0, 0, 2, 0, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
"""
from scipy import sparse
indptr = np.arange(len(groups)+1)
data = np.ones(len(groups), dtype=np.int8)
indi = sparse.csr_matrix((data, g, indptr))
return indi
class Group(object):
def __init__(self, group, name=''):
# self.group = np.asarray(group) # TODO: use checks in combine_indices
self.name = name
uni, uni_idx, uni_inv = combine_indices(group)
# TODO: rename these to something easier to remember
self.group_int, self.uni_idx, self.uni = uni, uni_idx, uni_inv
self.n_groups = len(self.uni)
# put this here so they can be overwritten before calling labels
self.separator = '.'
self.prefix = self.name
if self.prefix:
self.prefix = self.prefix + '='
# cache decorator
def counts(self):
return np.bincount(self.group_int)
# cache_decorator
def labels(self):
# is this only needed for product of groups (intersection)?
prefix = self.prefix
uni = self.uni
sep = self.separator
if uni.ndim > 1:
label = [(prefix+sep.join(['%s']*len(uni[0]))) % tuple(ii)
for ii in uni]
else:
label = [prefix + '%s' % ii for ii in uni]
return label
def dummy(self, drop_idx=None, sparse=False, dtype=int):
"""
drop_idx is only available if sparse=False
drop_idx is supposed to index into uni
"""
uni = self.uni
if drop_idx is not None:
idx = lrange(len(uni))
del idx[drop_idx]
uni = uni[idx]
group = self.group
if not sparse:
return (group[:, None] == uni[None, :]).astype(dtype)
else:
return dummy_sparse(self.group_int)
def interaction(self, other):
if isinstance(other, self.__class__):
other = other.group
return self.__class__((self, other))
def group_sums(self, x, use_bincount=True):
return group_sums(x, self.group_int, use_bincount=use_bincount)
def group_demean(self, x, use_bincount=True):
nobs = float(len(x))
means_g = group_sums(x / nobs, self.group_int,
use_bincount=use_bincount)
x_demeaned = x - means_g[self.group_int] # check reverse_index?
return x_demeaned, means_g
class GroupSorted(Group):
def __init__(self, group, name=''):
super(self.__class__, self).__init__(group, name=name)
idx = (np.nonzero(np.diff(group))[0]+1).tolist()
self.groupidx = lzip([0] + idx, idx + [len(group)])
def group_iter(self):
for low, upp in self.groupidx:
yield slice(low, upp)
def lag_indices(self, lag):
"""return the index array for lagged values
Warning: if k is larger then the number of observations for an
individual, then no values for that individual are returned.
TODO: for the unbalanced case, I should get the same truncation for
the array with lag=0. From the return of lag_idx we wouldn't know
which individual is missing.
TODO: do I want the full equivalent of lagmat in tsa?
maxlag or lag or lags.
not tested yet
"""
lag_idx = np.asarray(self.groupidx)[:, 1] - lag # asarray or already?
mask_ok = (lag <= lag_idx)
# still an observation that belongs to the same individual
return lag_idx[mask_ok]
def _is_hierarchical(x):
"""
Checks if the first item of an array-like object is also array-like
If so, we have a MultiIndex and returns True. Else returns False.
"""
item = x[0]
# is there a better way to do this?
if isinstance(item, (list, tuple, np.ndarray, pd.Series, pd.DataFrame)):
return True
else:
return False
def _make_hierarchical_index(index, names):
return MultiIndex.from_tuples(*[index], names=names)
def _make_generic_names(index):
n_names = len(index.names)
pad = str(len(str(n_names))) # number of digits
return [("group{0:0"+pad+"}").format(i) for i in range(n_names)]
class Grouping(object):
def __init__(self, index, names=None):
"""
index : index-like
Can be pandas MultiIndex or Index or array-like. If array-like
and is a MultipleIndex (more than one grouping variable),
groups are expected to be in each row. E.g., [('red', 1),
('red', 2), ('green', 1), ('green', 2)]
names : list or str, optional
The names to use for the groups. Should be a str if only
one grouping variable is used.
Notes
-----
If index is already a pandas Index then there is no copy.
"""
if isinstance(index, (Index, MultiIndex)):
if names is not None:
if hasattr(index, 'set_names'): # newer pandas
index.set_names(names, inplace=True)
else:
index.names = names
self.index = index
else: # array-like
if _is_hierarchical(index):
self.index = _make_hierarchical_index(index, names)
else:
self.index = Index(index, name=names)
if names is None:
names = _make_generic_names(self.index)
if hasattr(self.index, 'set_names'):
self.index.set_names(names, inplace=True)
else:
self.index.names = names
self.nobs = len(self.index)
self.nlevels = len(self.index.names)
self.slices = None
@property
def index_shape(self):
if hasattr(self.index, 'levshape'):
return self.index.levshape
else:
return self.index.shape
@property
def levels(self):
if hasattr(self.index, 'levels'):
return self.index.levels
else:
return pd.Categorical(self.index).levels
@property
def labels(self):
# this was index_int, but that's not a very good name...
if hasattr(self.index, 'labels'):
return self.index.labels
else: # pandas version issue here
# Compat code for the labels -> codes change in pandas 0.15
# FIXME: use .codes directly when we don't want to support
# pandas < 0.15
tmp = pd.Categorical(self.index)
try:
labl = tmp.codes
except AttributeError:
labl = tmp.labels # Old pandsd
return labl[None]
@property
def group_names(self):
return self.index.names
def reindex(self, index=None, names=None):
"""
Resets the index in-place.
"""
# NOTE: this isn't of much use if the rest of the data doesn't change
# This needs to reset cache
if names is None:
names = self.group_names
self = Grouping(index, names)
def get_slices(self, level=0):
"""
Sets the slices attribute to be a list of indices of the sorted
groups for the first index level. I.e., self.slices[0] is the
index where each observation is in the first (sorted) group.
"""
# TODO: refactor this
groups = self.index.get_level_values(level).unique()
groups.sort()
if isinstance(self.index, MultiIndex):
self.slices = [self.index.get_loc_level(x, level=level)[0]
for x in groups]
else:
self.slices = [self.index.get_loc(x) for x in groups]
def count_categories(self, level=0):
"""
Sets the attribute counts to equal the bincount of the (integer-valued)
labels.
"""
# TODO: refactor this not to set an attribute. Why would we do this?
self.counts = np.bincount(self.labels[level])
def check_index(self, is_sorted=True, unique=True, index=None):
"""Sanity checks"""
if not index:
index = self.index
if is_sorted:
test = pd.DataFrame(lrange(len(index)), index=index)
test_sorted = test.sort()
if not test.index.equals(test_sorted.index):
raise Exception('Data is not be sorted')
if unique:
if len(index) != len(index.unique()):
raise Exception('Duplicate index entries')
def sort(self, data, index=None):
"""Applies a (potentially hierarchical) sort operation on a numpy array
or pandas series/dataframe based on the grouping index or a
user-supplied index. Returns an object of the same type as the
original data as well as the matching (sorted) Pandas index.
"""
if index is None:
index = self.index
if data_util._is_using_ndarray_type(data, None):
if data.ndim == 1:
out = pd.Series(data, index=index, copy=True)
out = out.sort_index()
else:
out = pd.DataFrame(data, index=index)
out = out.sort(inplace=False) # copies
return np.array(out), out.index
elif data_util._is_using_pandas(data, None):
out = data
out = out.reindex(index) # copies?
out = out.sort_index()
return out, out.index
else:
msg = 'data must be a Numpy array or a Pandas Series/DataFrame'
raise ValueError(msg)
def transform_dataframe(self, dataframe, function, level=0, **kwargs):
"""Apply function to each column, by group
Assumes that the dataframe already has a proper index"""
if dataframe.shape[0] != self.nobs:
raise Exception('dataframe does not have the same shape as index')
out = dataframe.groupby(level=level).apply(function, **kwargs)
if 1 in out.shape:
return np.ravel(out)
else:
return np.array(out)
def transform_array(self, array, function, level=0, **kwargs):
"""Apply function to each column, by group
"""
if array.shape[0] != self.nobs:
raise Exception('array does not have the same shape as index')
dataframe = pd.DataFrame(array, index=self.index)
return self.transform_dataframe(dataframe, function, level=level,
**kwargs)
def transform_slices(self, array, function, level=0, **kwargs):
"""Apply function to each group. Similar to transform_array but does
not coerce array to a DataFrame and back and only works on a 1D or 2D
numpy array. function is called function(group, group_idx, **kwargs).
"""
array = np.asarray(array)
if array.shape[0] != self.nobs:
raise Exception('array does not have the same shape as index')
# always reset because level is given. need to refactor this.
self.get_slices(level=level)
processed = []
for s in self.slices:
if array.ndim == 2:
subset = array[s, :]
elif array.ndim == 1:
subset = array[s]
processed.append(function(subset, s, **kwargs))
processed = np.array(processed)
return processed.reshape(-1, processed.shape[-1])
# TODO: this isn't general needs to be a PanelGrouping object
def dummies_time(self):
self.dummy_sparse(level=1)
return self._dummies
def dummies_groups(self, level=0):
self.dummy_sparse(level=level)
return self._dummies
def dummy_sparse(self, level=0):
"""create a sparse indicator from a group array with integer labels
Parameters
----------
groups: ndarray, int, 1d (nobs,) an array of group indicators for each
observation. Group levels are assumed to be defined as consecutive
integers, i.e. range(n_groups) where n_groups is the number of
group levels. A group level with no observations for it will still
produce a column of zeros.
Returns
-------
indi : ndarray, int8, 2d (nobs, n_groups)
an indicator array with one row per observation, that has 1 in the
column of the group level for that observation
Examples
--------
>>> g = np.array([0, 0, 2, 1, 1, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi
<7x3 sparse matrix of type '<type 'numpy.int8'>'
with 7 stored elements in Compressed Sparse Row format>
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
current behavior with missing groups
>>> g = np.array([0, 0, 2, 0, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
"""
from scipy import sparse
groups = self.labels[level]
indptr = np.arange(len(groups)+1)
data = np.ones(len(groups), dtype=np.int8)
self._dummies = sparse.csr_matrix((data, groups, indptr))
if __name__ == '__main__':
# ---------- examples combine_indices
from numpy.testing import assert_equal
np.random.seed(985367)
groups = np.random.randint(0, 2, size=(10, 2))
uv, ux, u, label = combine_indices(groups, return_labels=True)
uv, ux, u, label = combine_indices(groups, prefix='g1,g2=', sep=',',
return_labels=True)
group0 = np.array(['sector0', 'sector1'])[groups[:, 0]]
group1 = np.array(['region0', 'region1'])[groups[:, 1]]
uv, ux, u, label = combine_indices((group0, group1),
prefix='sector,region=',
sep=',',
return_labels=True)
uv, ux, u, label = combine_indices((group0, group1), prefix='', sep='.',
return_labels=True)
group_joint = np.array(label)[uv]
group_joint_expected = np.array(['sector1.region0', 'sector0.region1',
'sector0.region0', 'sector0.region1',
'sector1.region1', 'sector0.region0',
'sector1.region0', 'sector1.region0',
'sector0.region1', 'sector0.region0'],
dtype='|S15')
assert_equal(group_joint, group_joint_expected)
"""
>>> uv
array([2, 1, 0, 0, 1, 0, 2, 0, 1, 0])
>>> label
['sector0.region0', 'sector1.region0', 'sector1.region1']
>>> np.array(label)[uv]
array(['sector1.region1', 'sector1.region0', 'sector0.region0',
'sector0.region0', 'sector1.region0', 'sector0.region0',
'sector1.region1', 'sector0.region0', 'sector1.region0',
'sector0.region0'],
dtype='|S15')
>>> np.column_stack((group0, group1))
array([['sector1', 'region1'],
['sector1', 'region0'],
['sector0', 'region0'],
['sector0', 'region0'],
['sector1', 'region0'],
['sector0', 'region0'],
['sector1', 'region1'],
['sector0', 'region0'],
['sector1', 'region0'],
['sector0', 'region0']],
dtype='|S7')
"""
# ------------- examples sparse_dummies
from scipy import sparse
g = np.array([0, 0, 1, 2, 1, 1, 2, 0])
u = lrange(3)
indptr = np.arange(len(g)+1)
data = np.ones(len(g), dtype=np.int8)
a = sparse.csr_matrix((data, g, indptr))
print(a.todense())
print(np.all(a.todense() == (g[:, None] == np.arange(3)).astype(int)))
x = np.arange(len(g)*3).reshape(len(g), 3, order='F')
print('group means')
print(x.T * a)
print(np.dot(x.T, g[:, None] == np.arange(3)))
print(np.array([np.bincount(g, weights=x[:, col]) for col in range(3)]))
for cat in u:
print(x[g == cat].sum(0))
for cat in u:
x[g == cat].sum(0)
cc = sparse.csr_matrix([[0, 1, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 1],
[0, 0, 0, 0, 0, 1, 0, 1, 0]])
# ------------- groupsums
print(group_sums(np.arange(len(g)*3*2).reshape(len(g), 3, 2), g,
use_bincount=False).T)
print(group_sums(np.arange(len(g)*3*2).reshape(len(g), 3, 2)[:, :, 0], g))
print(group_sums(np.arange(len(g)*3*2).reshape(len(g), 3, 2)[:, :, 1], g))
# ------------- examples class
x = np.arange(len(g)*3).reshape(len(g), 3, order='F')
mygroup = Group(g)
print(mygroup.group_int)
print(mygroup.group_sums(x))
print(mygroup.labels())
| bsd-3-clause |
mgrygoriev/CloudFerry | tests/cloudferrylib/os/actions/test_keypair_migration.py | 1 | 6887 | # Copyright 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import keystoneclient
import mock
from cloudferrylib.os.compute import keypairs
from tests import test
from cloudferrylib.os.actions import transport_compute_resources as tcr
from cloudferrylib.utils import utils as utl
class KeyPairObjectTestCase(test.TestCase):
def test_key_pair_does_not_include_autoincrement_fields(self):
kp_db = (
"Jan 1st 1970", # created_at
None, # updated_at
None, # deleted_at
"keypair-id", # id
"keypair-name", # name
"user-id", # user_id
"aa:bb:cc:dd:ee:ff", # fingerprint
"public-key-data", # public_key
0, # deleted
)
kp = keypairs.KeyPair.from_tuple(kp_db)
kp_dict = kp.to_dict(allow_auto_fields=False)
self.assertTrue('id' not in kp_dict.keys())
def test_all_fields_are_accessible_through_attributes(self):
kp = keypairs.KeyPair()
try:
for field in kp.FIELDS:
getattr(kp, field)
except AttributeError:
self.fail("KeyPair object must have all fields accessible as "
"attributes")
def test_value_error_is_risen_in_case_db_value_is_incorrect(self):
# user id, fingerprint, public key and deleted keys missing
db_kp = ("datetime", None, None, "id", "keypair name")
self.assertRaises(ValueError, keypairs.KeyPair.from_tuple, db_kp)
db_kp = ("datetime", None, None, "id", "keypair name", "user id",
"fingerprint", "public key", 0, "invalid argument")
self.assertRaises(ValueError, keypairs.KeyPair.from_tuple, db_kp)
def test_fields_are_settable_as_attributes(self):
try:
kp = keypairs.KeyPair()
public_key_value = "random public key"
fingerprint_value = "fingerprint"
deleted_value = 1
kp.public_key = public_key_value
kp.fingerprint = fingerprint_value
kp.deleted = deleted_value
self.assertEqual(kp.public_key, public_key_value)
self.assertEqual(kp.fingerprint, fingerprint_value)
self.assertEqual(kp.deleted, deleted_value)
except AttributeError:
self.fail("Key pair fields must be settable as attributes")
def test_key_pair_has_dict_support(self):
try:
kp = keypairs.KeyPair()
public_key_value = "random public key"
fingerprint_value = "fingerprint"
deleted_value = 1
kp['public_key'] = public_key_value
kp['fingerprint'] = fingerprint_value
kp['deleted'] = deleted_value
self.assertEqual(kp['public_key'], public_key_value)
self.assertEqual(kp['fingerprint'], fingerprint_value)
self.assertEqual(kp['deleted'], deleted_value)
except KeyError:
self.fail("Key pair fields must be settable as dict item")
class KeyPairMigrationTestCase(test.TestCase):
@mock.patch('cloudferrylib.os.identity.keystone.'
'get_dst_user_from_src_user_id')
def test_non_existing_user_does_not_break_migration(self, _):
try:
db_broker = mock.Mock()
db_broker.get_all_keypairs.return_value = [keypairs.KeyPair(),
keypairs.KeyPair()]
tkp = tcr.TransportKeyPairs(init=mock.MagicMock(),
kp_db_broker=db_broker)
tkp.src_cloud = mock.MagicMock()
tkp.dst_cloud = mock.MagicMock()
tkp.cfg = mock.Mock()
tkp.cfg.migrate.skip_orphaned_keypairs = True
src_users = tkp.src_cloud.resources[
utl.IDENTITY_RESOURCE].keystone_client.users
src_users.find.side_effect = keystoneclient.exceptions.NotFound
dst_users = tkp.dst_cloud.resources[
utl.IDENTITY_RESOURCE].keystone_client.users
dst_users.find.side_effect = keystoneclient.exceptions.NotFound
tkp.run()
except Exception as e:
self.fail("Unexpected exception caught: %s" % e)
def test_update_sql_gets_called_for_each_keypair(self):
num_keypairs = 5
db_broker = mock.Mock()
db_broker.get_all_keypairs.return_value = [
keypairs.KeyPair() for _ in xrange(num_keypairs)]
db_broker.store_keypair = mock.Mock()
tkp = tcr.TransportKeyPairs(init=mock.MagicMock(),
kp_db_broker=db_broker)
tkp.src_cloud = mock.MagicMock()
tkp.dst_cloud = mock.MagicMock()
tkp.cfg = mock.Mock()
tkp.cfg.migrate.skip_orphaned_keypairs = True
tkp.run()
self.assertTrue(db_broker.store_keypair.call_count == num_keypairs)
class KeyPairForInstancesTestCase(test.TestCase):
def test_does_nothing_if_no_info_provided(self):
db_broker = mock.Mock()
task = tcr.SetKeyPairsForInstances(init=mock.MagicMock(),
kp_db_broker=db_broker)
task.run()
self.assertFalse(db_broker.add_keypair_to_instance.called)
def test_keypair_is_added_to_instance(self):
db_broker = mock.Mock()
num_instances_with_keys = 5
num_instances_without_keys = 5
instances = {
'instance1%d' % i: {
'instance': {
'key_name': 'key%d' % i,
'user_id': 'user%d' % i
}
} for i in xrange(num_instances_with_keys)
}
instances.update({
'instance2%d' % j: {
'instance': {
'user_id': 'user%d' % j
}
} for j in xrange(num_instances_without_keys)}
)
info = {utl.INSTANCES_TYPE: instances}
task = tcr.SetKeyPairsForInstances(init=mock.MagicMock(),
kp_db_broker=db_broker)
task.run(info=info)
self.assertTrue(db_broker.add_keypair_to_instance.called)
self.assertEqual(db_broker.add_keypair_to_instance.call_count,
num_instances_with_keys)
| apache-2.0 |
AstroPrint/AstroBox | src/ext/sockjs/tornado/transports/jsonp.py | 9 | 3642 | # -*- coding: utf-8 -*-
"""
sockjs.tornado.transports.jsonp
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
JSONP transport implementation.
"""
import logging
from tornado.web import asynchronous
from sockjs.tornado import proto
from sockjs.tornado.transports import pollingbase
from sockjs.tornado.util import bytes_to_str, unquote_plus
LOG = logging.getLogger("tornado.general")
class JSONPTransport(pollingbase.PollingTransportBase):
name = 'jsonp'
@asynchronous
def get(self, session_id):
# Start response
self.handle_session_cookie()
self.disable_cache()
# Grab callback parameter
self.callback = self.get_argument('c', None)
if not self.callback:
self.write('"callback" parameter required')
self.set_status(500)
self.finish()
return
# Get or create session without starting heartbeat
if not self._attach_session(session_id, False):
return
# Might get already detached because connection was closed in on_open
if not self.session:
return
if not self.session.send_queue:
self.session.start_heartbeat()
else:
self.session.flush()
def send_pack(self, message, binary=False):
if binary:
raise Exception('binary not supported for JSONPTransport')
self.active = False
try:
# TODO: Just escape
msg = '%s(%s);\r\n' % (self.callback, proto.json_encode(message))
self.set_header('Content-Type', 'application/javascript; charset=UTF-8')
self.set_header('Content-Length', len(msg))
# TODO: Fix me
self.set_header('Etag', 'dummy')
self.write(msg)
self.flush(callback=self.send_complete)
except IOError:
# If connection dropped, make sure we close offending session instead
# of propagating error all way up.
self.session.delayed_close()
class JSONPSendHandler(pollingbase.PollingTransportBase):
def post(self, session_id):
self.preflight()
self.handle_session_cookie()
self.disable_cache()
session = self._get_session(session_id)
if session is None or session.is_closed:
self.set_status(404)
return
data = bytes_to_str(self.request.body)
ctype = self.request.headers.get('Content-Type', '').lower()
if ctype == 'application/x-www-form-urlencoded':
if not data.startswith('d='):
LOG.exception('jsonp_send: Invalid payload.')
self.write("Payload expected.")
self.set_status(500)
return
data = unquote_plus(data[2:])
if not data:
LOG.debug('jsonp_send: Payload expected.')
self.write("Payload expected.")
self.set_status(500)
return
try:
messages = proto.json_decode(data)
except:
# TODO: Proper error handling
LOG.debug('jsonp_send: Invalid json encoding')
self.write("Broken JSON encoding.")
self.set_status(500)
return
try:
session.on_messages(messages)
except Exception:
LOG.exception('jsonp_send: on_message() failed')
session.close()
self.write('Message handler failed.')
self.set_status(500)
return
self.write('ok')
self.set_header('Content-Type', 'text/plain; charset=UTF-8')
self.set_status(200)
| agpl-3.0 |
EvgeneOskin/taiga-back | taiga/projects/userstories/apps.py | 14 | 5623 | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from django.apps import apps
from django.db.models import signals
from taiga.projects import signals as generic_handlers
from taiga.projects.custom_attributes import signals as custom_attributes_handlers
from . import signals as handlers
def connect_userstories_signals():
# Cached prev object version
signals.pre_save.connect(handlers.cached_prev_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="cached_prev_us")
# Role Points
signals.post_save.connect(handlers.update_role_points_when_create_or_edit_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_role_points_when_create_or_edit_us")
# Tasks
signals.post_save.connect(handlers.update_milestone_of_tasks_when_edit_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_milestone_of_tasks_when_edit_us")
# Open/Close US and Milestone
signals.post_save.connect(handlers.try_to_close_or_open_us_and_milestone_when_create_or_edit_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="try_to_close_or_open_us_and_milestone_when_create_or_edit_us")
signals.post_delete.connect(handlers.try_to_close_milestone_when_delete_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="try_to_close_milestone_when_delete_us")
# Tags
signals.pre_save.connect(generic_handlers.tags_normalization,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="tags_normalization_user_story")
signals.post_save.connect(generic_handlers.update_project_tags_when_create_or_edit_taggable_item,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_user_story")
signals.post_delete.connect(generic_handlers.update_project_tags_when_delete_taggable_item,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_project_tags_when_delete_taggable_item_user_story")
def connect_userstories_custom_attributes_signals():
signals.post_save.connect(custom_attributes_handlers.create_custom_attribute_value_when_create_user_story,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="create_custom_attribute_value_when_create_user_story")
def connect_all_userstories_signals():
connect_userstories_signals()
connect_userstories_custom_attributes_signals()
def disconnect_userstories_signals():
signals.pre_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="cached_prev_us")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_role_points_when_create_or_edit_us")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_milestone_of_tasks_when_edit_us")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="try_to_close_or_open_us_and_milestone_when_create_or_edit_us")
signals.post_delete.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="try_to_close_milestone_when_delete_us")
signals.pre_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="tags_normalization_user_story")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_user_story")
signals.post_delete.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_project_tags_when_delete_taggable_item_user_story")
def disconnect_userstories_custom_attributes_signals():
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="create_custom_attribute_value_when_create_user_story")
def disconnect_all_userstories_signals():
disconnect_userstories_signals()
disconnect_userstories_custom_attributes_signals()
class UserStoriesAppConfig(AppConfig):
name = "taiga.projects.userstories"
verbose_name = "User Stories"
def ready(self):
connect_all_userstories_signals()
| agpl-3.0 |
endlessm/chromium-browser | third_party/angle/src/libANGLE/gen_overlay_fonts.py | 5 | 10022 | #!/usr/bin/env vpython
#
# [VPYTHON:BEGIN]
# wheel: <
# name: "infra/python/wheels/freetype-py/${vpython_platform}"
# version: "version:2.1.0.post1"
# >
# [VPYTHON:END]
# Copyright 2019 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# gen_vk_overlay_fonts.py:
# Code generation for overlay fonts. Should be run if the font file under overlay/ is changed,
# or the font sizes declared in this file are modified. The font is assumed to be monospace.
# The output will contain ASCII characters in order from ' ' to '~'. The output will be images
# with 3 rows of 32 characters each.
# NOTE: don't run this script directly. Run scripts/run_code_generation.py.
from datetime import date
import sys
if len(sys.argv) < 2:
from freetype import *
out_file_cpp = 'Overlay_font_autogen.cpp'
out_file_h = 'Overlay_font_autogen.h'
font_file = 'overlay/DejaVuSansMono-Bold.ttf'
template_out_file_h = u"""// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using {font_file}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {out_file_name}:
// Autogenerated overlay font data.
#include "libANGLE/Overlay.h"
namespace gl
{{
namespace overlay
{{
constexpr int kFontCount = {font_count};
constexpr int kFontGlyphWidths[kFontCount] = {{ {font_glyph_widths} }};
constexpr int kFontGlyphHeights[kFontCount] = {{ {font_glyph_heights} }};
constexpr int kFontCharactersPerRow = 32;
constexpr int kFontCharactersPerCol = 3;
constexpr int kFontCharacters = kFontCharactersPerRow * kFontCharactersPerCol;
constexpr int kFontImageWidth = {max_font_width} * kFontCharactersPerRow;
constexpr int kFontImageHeight = {max_font_height} * kFontCharactersPerCol;
{font_layers}
}} // namespace overlay
}} // namespace gl
"""
template_out_file_cpp = u"""// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using images from {font_file}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {out_file_name}:
// Autogenerated overlay font data.
#include "libANGLE/Overlay.h"
#include "libANGLE/Overlay_font_autogen.h"
#include <numeric>
namespace gl
{{
using namespace overlay;
// Save binary size if the font images are never to be used.
#if ANGLE_ENABLE_OVERLAY
namespace
{{
constexpr int kFontWidths[kFontCount] = {{ {font_layer_widths} }};
constexpr int kFontHeights[kFontCount] = {{ {font_layer_heights} }};
{font_data}
// Returns a bit with the value of the pixel.
template<int kFontWidth, int kFontHeight>
uint32_t GetFontLayerPixel(const uint32_t fontImage[kFontHeight][kFontWidth / 32], int x, int y)
{{
ASSERT(x >= 0 && x < kFontWidth && y >= 0 && y < kFontHeight);
return fontImage[y][x / 32] >> (x % 32) & 1;
}}
inline uint32_t GetFontPixel(int layer, int x, int y)
{{
switch (layer)
{{
{get_font_layer_pixel}
default:
UNREACHABLE();
return 0;
}}
}}
}} // anonymous namespace
void OverlayState::initFontData(uint8_t *fontData) const
{{
constexpr int kFontDataLayerSize = kFontImageWidth * kFontImageHeight;
// Unpack the font bitmap into R8_UNORM format. Border pixels are given a 0.5 value for better
// font visibility.
for (int layer = 0; layer < kFontCount; ++layer)
{{
memset(fontData, 0, kFontDataLayerSize);
for (int y = 0; y < kFontHeights[layer]; ++y)
{{
for (int x = 0; x < kFontWidths[layer]; ++x)
{{
uint32_t src = GetFontPixel(layer, x, y);
uint8_t dstValue = src ? 255 : 0;
fontData[y * kFontImageWidth + x] = dstValue;
}}
}}
fontData += kFontDataLayerSize;
}}
}}
#else
void OverlayState::initFontData(uint8_t *fontData) const
{{
memset(fontData, 0, kFontCount * kFontImageWidth * kFontImageHeight * sizeof(*fontData));
}}
#endif
}} // namespace gl
"""
template_get_font_layer_pixel = u"""case {layer}:
return GetFontLayerPixel<kFontWidths[{layer}], kFontHeights[{layer}]>({font_image}, x, y);
"""
def main():
if len(sys.argv) == 2 and sys.argv[1] == 'inputs':
# disabled because of issues on Windows. http://anglebug.com/3892
# print(font_file)
return
if len(sys.argv) == 2 and sys.argv[1] == 'outputs':
print(','.join([out_file_cpp, out_file_h]))
return
font_defs = [('large', 36), ('medium', 23), ('small', 14)]
chars = ' !"#$%&\'()*+,-./0123456789:;<=>?' + \
'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_' + \
'`abcdefghijklmnopqrstuvwxyz{|}~ '
output_rows = 3
output_cols = 32
assert (len(chars) == output_rows * output_cols)
font_glyph_widths = []
font_glyph_heights = []
font_layers = []
font_data = []
get_font_layer_pixel = []
current_font_layer = 0
# Load the font file.
face = Face(font_file)
assert (face.is_fixed_width)
for font_name, font_size in font_defs:
# Since the font is fixed width, we can retrieve its size right away.
face.set_char_size(font_size << 6)
glyph_width = face.size.max_advance >> 6
glyph_ascender = face.size.ascender >> 6
glyph_descender = face.size.descender >> 6
glyph_height = glyph_ascender - glyph_descender
font_tag = font_name.capitalize()
font_layer = str(current_font_layer)
font_layer_symbol = 'kFontLayer' + font_tag
font_array_name = 'kFontImage' + font_tag
font_width = 'kFontWidths[' + font_layer_symbol + ']'
font_height = 'kFontHeights[' + font_layer_symbol + ']'
# Font pixels are packed in 32-bit values.
font_array_width = output_cols * glyph_width / 32
font_array_height = output_rows * glyph_height
font_array = [[0] * font_array_width for i in range(font_array_height)]
for charIndex in range(len(chars)):
char = chars[charIndex]
base_x = (charIndex % output_cols) * glyph_width
base_y = (charIndex / output_cols) * glyph_height
# Render the character.
face.load_char(char)
bitmap = face.glyph.bitmap
left = face.glyph.bitmap_left
top = face.glyph.bitmap_top
width = bitmap.width
rows = bitmap.rows
pitch = bitmap.pitch
offset_x = left
offset_y = glyph_height - (top - glyph_descender)
# '#' in the smallest font generates a larger glyph than the "fixed" font width.
if offset_x + width > glyph_width:
offset_x = glyph_width - width
if offset_x < 0:
width += offset_x
offset_x = 0
base_x += offset_x
base_y += offset_y
assert (offset_x + width <= glyph_width)
assert (offset_y + rows <= glyph_height)
# Write the character bitmap in the font image.
for y in range(rows):
for x in range(width):
pixel_value = bitmap.buffer[y * pitch + x]
output_bit = 1 if pixel_value >= 122 else 0
font_array_row = base_y + y
font_array_col = (base_x + x) / 32
font_array_bit = (base_x + x) % 32
font_array[font_array_row][font_array_col] |= output_bit << font_array_bit
# Output the image to a C array.
data = 'constexpr uint32_t ' + font_array_name + '[' + font_height + '][' + font_width + '/32] = {\n'
for y in range(font_array_height):
data += '{'
for x in range(font_array_width):
data += '0x{:08X}, '.format(font_array[y][x])
data += '},\n'
data += '};\n'
font_glyph_widths.append(glyph_width)
font_glyph_heights.append(glyph_height)
font_layers.append('constexpr int ' + font_layer_symbol + ' = ' + font_layer + ';')
font_data.append(data)
get_font_layer_pixel.append(
template_get_font_layer_pixel.format(
layer=font_layer_symbol, font_image=font_array_name))
current_font_layer += 1
with open(out_file_h, 'w') as outfile:
outfile.write(
template_out_file_h.format(
script_name=__file__,
font_file=font_file,
copyright_year=date.today().year,
out_file_name=out_file_h,
font_count=len(font_data),
font_glyph_widths=','.join(map(str, font_glyph_widths)),
font_glyph_heights=','.join(map(str, font_glyph_heights)),
max_font_width=max(font_glyph_widths),
max_font_height=max(font_glyph_heights),
font_layers='\n'.join(font_layers)))
outfile.close()
font_layer_widths = [
'kFontGlyphWidths[' + str(layer) + '] * kFontCharactersPerRow'
for layer in range(len(font_data))
]
font_layer_heights = [
'kFontGlyphHeights[' + str(layer) + '] * kFontCharactersPerCol'
for layer in range(len(font_data))
]
with open(out_file_cpp, 'w') as outfile:
outfile.write(
template_out_file_cpp.format(
script_name=__file__,
font_file=font_file,
copyright_year=date.today().year,
out_file_name=out_file_cpp,
font_layer_widths=','.join(font_layer_widths),
font_layer_heights=','.join(font_layer_heights),
font_data='\n'.join(font_data),
get_font_layer_pixel=''.join(get_font_layer_pixel)))
outfile.close()
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
lucasmiqueias/speakerfight-1 | deck/migrations/0001_initial.py | 20 | 3687 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('jury', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', django_extensions.db.fields.AutoSlugField(editable=False, populate_from=b'title', max_length=60, blank=True, unique=True, overwrite=True)),
('description', models.TextField(max_length=400, verbose_name='Description')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('is_published', models.BooleanField(default=False, verbose_name='Publish')),
('allow_public_voting', models.BooleanField(default=True, verbose_name='Allow Public Voting')),
('due_date', models.DateTimeField(null=True, blank=True)),
('author', models.ForeignKey(related_name='events', to=settings.AUTH_USER_MODEL)),
('jury', models.OneToOneField(related_name='event', null=True, blank=True, to='jury.Jury')),
],
options={
'verbose_name': 'Event',
'verbose_name_plural': 'Events',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Proposal',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', django_extensions.db.fields.AutoSlugField(editable=False, populate_from=b'title', max_length=60, blank=True, unique=True, overwrite=True)),
('description', models.TextField(max_length=400, verbose_name='Description')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('is_published', models.BooleanField(default=False, verbose_name='Publish')),
('author', models.ForeignKey(related_name='proposals', to=settings.AUTH_USER_MODEL)),
('event', models.ForeignKey(related_name='proposals', to='deck.Event')),
],
options={
'verbose_name': 'Proposal',
'verbose_name_plural': 'Proposals',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('rate', models.SmallIntegerField(blank=True, null=True, verbose_name='Rate Index', choices=[(-1, b'angry'), (0, b'sleepy'), (1, b'sad'), (2, b'happy'), (3, b'laughing')])),
('proposal', models.ForeignKey(related_name='votes', to='deck.Proposal')),
('user', models.ForeignKey(related_name='votes', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Vote',
'verbose_name_plural': 'Votes',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='vote',
unique_together=set([('proposal', 'user')]),
),
]
| mit |
sonnyhu/scikit-learn | sklearn/externals/joblib/numpy_pickle.py | 37 | 23222 | """Utilities for fast persistence of big data, with optional compression."""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import pickle
import os
import sys
import warnings
try:
from pathlib import Path
except ImportError:
Path = None
from .numpy_pickle_utils import _COMPRESSORS
from .numpy_pickle_utils import BinaryZlibFile
from .numpy_pickle_utils import Unpickler, Pickler
from .numpy_pickle_utils import _read_fileobject, _write_fileobject
from .numpy_pickle_utils import _read_bytes, BUFFER_SIZE
from .numpy_pickle_compat import load_compatibility
from .numpy_pickle_compat import NDArrayWrapper
# For compatibility with old versions of joblib, we need ZNDArrayWrapper
# to be visible in the current namespace.
# Explicitly skipping next line from flake8 as it triggers an F401 warning
# which we don't care.
from .numpy_pickle_compat import ZNDArrayWrapper # noqa
from ._compat import _basestring, PY3_OR_LATER
###############################################################################
# Utility objects for persistence.
class NumpyArrayWrapper(object):
"""An object to be persisted instead of numpy arrays.
This object is used to hack into the pickle machinery and read numpy
array data from our custom persistence format.
More precisely, this object is used for:
* carrying the information of the persisted array: subclass, shape, order,
dtype. Those ndarray metadata are used to correctly reconstruct the array
with low level numpy functions.
* determining if memmap is allowed on the array.
* reading the array bytes from a file.
* reading the array using memorymap from a file.
* writing the array bytes to a file.
Attributes
----------
subclass: numpy.ndarray subclass
Determine the subclass of the wrapped array.
shape: numpy.ndarray shape
Determine the shape of the wrapped array.
order: {'C', 'F'}
Determine the order of wrapped array data. 'C' is for C order, 'F' is
for fortran order.
dtype: numpy.ndarray dtype
Determine the data type of the wrapped array.
allow_mmap: bool
Determine if memory mapping is allowed on the wrapped array.
Default: False.
"""
def __init__(self, subclass, shape, order, dtype, allow_mmap=False):
"""Constructor. Store the useful information for later."""
self.subclass = subclass
self.shape = shape
self.order = order
self.dtype = dtype
self.allow_mmap = allow_mmap
def write_array(self, array, pickler):
"""Write array bytes to pickler file handle.
This function is an adaptation of the numpy write_array function
available in version 1.10.1 in numpy/lib/format.py.
"""
# Set buffer size to 16 MiB to hide the Python loop overhead.
buffersize = max(16 * 1024 ** 2 // array.itemsize, 1)
if array.dtype.hasobject:
# We contain Python objects so we cannot write out the data
# directly. Instead, we will pickle it out with version 2 of the
# pickle protocol.
pickle.dump(array, pickler.file_handle, protocol=2)
else:
for chunk in pickler.np.nditer(array,
flags=['external_loop',
'buffered',
'zerosize_ok'],
buffersize=buffersize,
order=self.order):
pickler.file_handle.write(chunk.tostring('C'))
def read_array(self, unpickler):
"""Read array from unpickler file handle.
This function is an adaptation of the numpy read_array function
available in version 1.10.1 in numpy/lib/format.py.
"""
if len(self.shape) == 0:
count = 1
else:
count = unpickler.np.multiply.reduce(self.shape)
# Now read the actual data.
if self.dtype.hasobject:
# The array contained Python objects. We need to unpickle the data.
array = pickle.load(unpickler.file_handle)
else:
if (not PY3_OR_LATER and
unpickler.np.compat.isfileobj(unpickler.file_handle)):
# In python 2, gzip.GzipFile is considered as a file so one
# can use numpy.fromfile().
# For file objects, use np.fromfile function.
# This function is faster than the memory-intensive
# method below.
array = unpickler.np.fromfile(unpickler.file_handle,
dtype=self.dtype, count=count)
else:
# This is not a real file. We have to read it the
# memory-intensive way.
# crc32 module fails on reads greater than 2 ** 32 bytes,
# breaking large reads from gzip streams. Chunk reads to
# BUFFER_SIZE bytes to avoid issue and reduce memory overhead
# of the read. In non-chunked case count < max_read_count, so
# only one read is performed.
max_read_count = BUFFER_SIZE // min(BUFFER_SIZE,
self.dtype.itemsize)
array = unpickler.np.empty(count, dtype=self.dtype)
for i in range(0, count, max_read_count):
read_count = min(max_read_count, count - i)
read_size = int(read_count * self.dtype.itemsize)
data = _read_bytes(unpickler.file_handle,
read_size, "array data")
array[i:i + read_count] = \
unpickler.np.frombuffer(data, dtype=self.dtype,
count=read_count)
del data
if self.order == 'F':
array.shape = self.shape[::-1]
array = array.transpose()
else:
array.shape = self.shape
return array
def read_mmap(self, unpickler):
"""Read an array using numpy memmap."""
offset = unpickler.file_handle.tell()
if unpickler.mmap_mode == 'w+':
unpickler.mmap_mode = 'r+'
marray = unpickler.np.memmap(unpickler.filename,
dtype=self.dtype,
shape=self.shape,
order=self.order,
mode=unpickler.mmap_mode,
offset=offset)
# update the offset so that it corresponds to the end of the read array
unpickler.file_handle.seek(offset + marray.nbytes)
return marray
def read(self, unpickler):
"""Read the array corresponding to this wrapper.
Use the unpickler to get all information to correctly read the array.
Parameters
----------
unpickler: NumpyUnpickler
Returns
-------
array: numpy.ndarray
"""
# When requested, only use memmap mode if allowed.
if unpickler.mmap_mode is not None and self.allow_mmap:
array = self.read_mmap(unpickler)
else:
array = self.read_array(unpickler)
# Manage array subclass case
if (hasattr(array, '__array_prepare__') and
self.subclass not in (unpickler.np.ndarray,
unpickler.np.memmap)):
# We need to reconstruct another subclass
new_array = unpickler.np.core.multiarray._reconstruct(
self.subclass, (0,), 'b')
return new_array.__array_prepare__(array)
else:
return array
###############################################################################
# Pickler classes
class NumpyPickler(Pickler):
"""A pickler to persist big data efficiently.
The main features of this object are:
* persistence of numpy arrays in a single file.
* optional compression with a special care on avoiding memory copies.
Attributes
----------
fp: file
File object handle used for serializing the input object.
protocol: int
Pickle protocol used. Default is pickle.DEFAULT_PROTOCOL under
python 3, pickle.HIGHEST_PROTOCOL otherwise.
"""
dispatch = Pickler.dispatch.copy()
def __init__(self, fp, protocol=None):
self.file_handle = fp
self.buffered = isinstance(self.file_handle, BinaryZlibFile)
# By default we want a pickle protocol that only changes with
# the major python version and not the minor one
if protocol is None:
protocol = (pickle.DEFAULT_PROTOCOL if PY3_OR_LATER
else pickle.HIGHEST_PROTOCOL)
Pickler.__init__(self, self.file_handle, protocol=protocol)
# delayed import of numpy, to avoid tight coupling
try:
import numpy as np
except ImportError:
np = None
self.np = np
def _create_array_wrapper(self, array):
"""Create and returns a numpy array wrapper from a numpy array."""
order = 'F' if (array.flags.f_contiguous and
not array.flags.c_contiguous) else 'C'
allow_mmap = not self.buffered and not array.dtype.hasobject
wrapper = NumpyArrayWrapper(type(array),
array.shape, order, array.dtype,
allow_mmap=allow_mmap)
return wrapper
def save(self, obj):
"""Subclass the Pickler `save` method.
This is a total abuse of the Pickler class in order to use the numpy
persistence function `save` instead of the default pickle
implementation. The numpy array is replaced by a custom wrapper in the
pickle persistence stack and the serialized array is written right
after in the file. Warning: the file produced does not follow the
pickle format. As such it can not be read with `pickle.load`.
"""
if self.np is not None and type(obj) in (self.np.ndarray,
self.np.matrix,
self.np.memmap):
if type(obj) is self.np.memmap:
# Pickling doesn't work with memmapped arrays
obj = self.np.asanyarray(obj)
# The array wrapper is pickled instead of the real array.
wrapper = self._create_array_wrapper(obj)
Pickler.save(self, wrapper)
# A framer was introduced with pickle protocol 4 and we want to
# ensure the wrapper object is written before the numpy array
# buffer in the pickle file.
# See https://www.python.org/dev/peps/pep-3154/#framing to get
# more information on the framer behavior.
if self.proto >= 4:
self.framer.commit_frame(force=True)
# And then array bytes are written right after the wrapper.
wrapper.write_array(obj, self)
return
return Pickler.save(self, obj)
class NumpyUnpickler(Unpickler):
"""A subclass of the Unpickler to unpickle our numpy pickles.
Attributes
----------
mmap_mode: str
The memorymap mode to use for reading numpy arrays.
file_handle: file_like
File object to unpickle from.
filename: str
Name of the file to unpickle from. It should correspond to file_handle.
This parameter is required when using mmap_mode.
np: module
Reference to numpy module if numpy is installed else None.
"""
dispatch = Unpickler.dispatch.copy()
def __init__(self, filename, file_handle, mmap_mode=None):
# The next line is for backward compatibility with pickle generated
# with joblib versions less than 0.10.
self._dirname = os.path.dirname(filename)
self.mmap_mode = mmap_mode
self.file_handle = file_handle
# filename is required for numpy mmap mode.
self.filename = filename
self.compat_mode = False
Unpickler.__init__(self, self.file_handle)
try:
import numpy as np
except ImportError:
np = None
self.np = np
def load_build(self):
"""Called to set the state of a newly created object.
We capture it to replace our place-holder objects, NDArrayWrapper or
NumpyArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
NDArrayWrapper is used for backward compatibility with joblib <= 0.9.
"""
Unpickler.load_build(self)
# For backward compatibility, we support NDArrayWrapper objects.
if isinstance(self.stack[-1], (NDArrayWrapper, NumpyArrayWrapper)):
if self.np is None:
raise ImportError("Trying to unpickle an ndarray, "
"but numpy didn't import correctly")
array_wrapper = self.stack.pop()
# If any NDArrayWrapper is found, we switch to compatibility mode,
# this will be used to raise a DeprecationWarning to the user at
# the end of the unpickling.
if isinstance(array_wrapper, NDArrayWrapper):
self.compat_mode = True
self.stack.append(array_wrapper.read(self))
# Be careful to register our new method.
if PY3_OR_LATER:
dispatch[pickle.BUILD[0]] = load_build
else:
dispatch[pickle.BUILD] = load_build
###############################################################################
# Utility functions
def dump(value, filename, compress=0, protocol=None, cache_size=None):
"""Persist an arbitrary Python object into one file.
Parameters
-----------
value: any Python object
The object to store to disk.
filename: str or pathlib.Path
The path of the file in which it is to be stored. The compression
method corresponding to one of the supported filename extensions ('.z',
'.gz', '.bz2', '.xz' or '.lzma') will be used automatically.
compress: int from 0 to 9 or bool or 2-tuple, optional
Optional compression level for the data. 0 or False is no compression.
Higher value means more compression, but also slower read and
write times. Using a value of 3 is often a good compromise.
See the notes for more details.
If compress is True, the compression level used is 3.
If compress is a 2-tuple, the first element must correspond to a string
between supported compressors (e.g 'zlib', 'gzip', 'bz2', 'lzma'
'xz'), the second element must be an integer from 0 to 9, corresponding
to the compression level.
protocol: positive int
Pickle protocol, see pickle.dump documentation for more details.
cache_size: positive int, optional
This option is deprecated in 0.10 and has no effect.
Returns
-------
filenames: list of strings
The list of file names in which the data is stored. If
compress is false, each array is stored in a different file.
See Also
--------
joblib.load : corresponding loader
Notes
-----
Memmapping on load cannot be used for compressed files. Thus
using compression can significantly slow down loading. In
addition, compressed files take extra extra memory during
dump and load.
"""
if Path is not None and isinstance(filename, Path):
filename = str(filename)
is_filename = isinstance(filename, _basestring)
is_fileobj = hasattr(filename, "write")
compress_method = 'zlib' # zlib is the default compression method.
if compress is True:
# By default, if compress is enabled, we want to be using 3 by default
compress_level = 3
elif isinstance(compress, tuple):
# a 2-tuple was set in compress
if len(compress) != 2:
raise ValueError(
'Compress argument tuple should contain exactly 2 elements: '
'(compress method, compress level), you passed {0}'
.format(compress))
compress_method, compress_level = compress
else:
compress_level = compress
if compress_level is not False and compress_level not in range(10):
# Raising an error if a non valid compress level is given.
raise ValueError(
'Non valid compress level given: "{0}". Possible values are '
'{1}.'.format(compress_level, list(range(10))))
if compress_method not in _COMPRESSORS:
# Raising an error if an unsupported compression method is given.
raise ValueError(
'Non valid compression method given: "{0}". Possible values are '
'{1}.'.format(compress_method, _COMPRESSORS))
if not is_filename and not is_fileobj:
# People keep inverting arguments, and the resulting error is
# incomprehensible
raise ValueError(
'Second argument should be a filename or a file-like object, '
'%s (type %s) was given.'
% (filename, type(filename))
)
if is_filename and not isinstance(compress, tuple):
# In case no explicit compression was requested using both compression
# method and level in a tuple and the filename has an explicit
# extension, we select the corresponding compressor.
if filename.endswith('.z'):
compress_method = 'zlib'
elif filename.endswith('.gz'):
compress_method = 'gzip'
elif filename.endswith('.bz2'):
compress_method = 'bz2'
elif filename.endswith('.lzma'):
compress_method = 'lzma'
elif filename.endswith('.xz'):
compress_method = 'xz'
else:
# no matching compression method found, we unset the variable to
# be sure no compression level is set afterwards.
compress_method = None
if compress_method in _COMPRESSORS and compress_level == 0:
# we choose a default compress_level of 3 in case it was not given
# as an argument (using compress).
compress_level = 3
if not PY3_OR_LATER and compress_method in ('lzma', 'xz'):
raise NotImplementedError("{0} compression is only available for "
"python version >= 3.3. You are using "
"{1}.{2}".format(compress_method,
sys.version_info[0],
sys.version_info[1]))
if cache_size is not None:
# Cache size is deprecated starting from version 0.10
warnings.warn("Please do not set 'cache_size' in joblib.dump, "
"this parameter has no effect and will be removed. "
"You used 'cache_size={0}'".format(cache_size),
DeprecationWarning, stacklevel=2)
if compress_level != 0:
with _write_fileobject(filename, compress=(compress_method,
compress_level)) as f:
NumpyPickler(f, protocol=protocol).dump(value)
elif is_filename:
with open(filename, 'wb') as f:
NumpyPickler(f, protocol=protocol).dump(value)
else:
NumpyPickler(filename, protocol=protocol).dump(value)
# If the target container is a file object, nothing is returned.
if is_fileobj:
return
# For compatibility, the list of created filenames (e.g with one element
# after 0.10.0) is returned by default.
return [filename]
def _unpickle(fobj, filename="", mmap_mode=None):
"""Internal unpickling function."""
# We are careful to open the file handle early and keep it open to
# avoid race-conditions on renames.
# That said, if data is stored in companion files, which can be
# the case with the old persistence format, moving the directory
# will create a race when joblib tries to access the companion
# files.
unpickler = NumpyUnpickler(filename, fobj, mmap_mode=mmap_mode)
obj = None
try:
obj = unpickler.load()
if unpickler.compat_mode:
warnings.warn("The file '%s' has been generated with a "
"joblib version less than 0.10. "
"Please regenerate this pickle file."
% filename,
DeprecationWarning, stacklevel=3)
except UnicodeDecodeError as exc:
# More user-friendly error message
if PY3_OR_LATER:
new_exc = ValueError(
'You may be trying to read with '
'python 3 a joblib pickle generated with python 2. '
'This feature is not supported by joblib.')
new_exc.__cause__ = exc
raise new_exc
# Reraise exception with Python 2
raise
return obj
def load(filename, mmap_mode=None):
"""Reconstruct a Python object from a file persisted with joblib.dump.
Parameters
-----------
filename: str or pathlib.Path
The path of the file from which to load the object
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
If not None, the arrays are memory-mapped from the disk. This
mode has no effect for compressed files. Note that in this
case the reconstructed object might not longer match exactly
the originally pickled object.
Returns
-------
result: any Python object
The object stored in the file.
See Also
--------
joblib.dump : function to save an object
Notes
-----
This function can load numpy array files saved separately during the
dump. If the mmap_mode argument is given, it is passed to np.load and
arrays are loaded as memmaps. As a consequence, the reconstructed
object might not match the original pickled object. Note that if the
file was saved with compression, the arrays cannot be memmaped.
"""
if Path is not None and isinstance(filename, Path):
filename = str(filename)
if hasattr(filename, "read") and hasattr(filename, "seek"):
with _read_fileobject(filename, "", mmap_mode) as fobj:
obj = _unpickle(fobj)
else:
with open(filename, 'rb') as f:
with _read_fileobject(f, filename, mmap_mode) as fobj:
if isinstance(fobj, _basestring):
# if the returned file object is a string, this means we
# try to load a pickle file generated with an version of
# Joblib so we load it with joblib compatibility function.
return load_compatibility(fobj)
obj = _unpickle(fobj, filename, mmap_mode)
return obj
| bsd-3-clause |
jonyroda97/redbot-amigosprovaveis | lib/youtube_dl/extractor/dispeak.py | 23 | 4368 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_duration,
remove_end,
xpath_element,
xpath_text,
)
class DigitallySpeakingIE(InfoExtractor):
_VALID_URL = r'https?://(?:s?evt\.dispeak|events\.digitallyspeaking)\.com/(?:[^/]+/)+xml/(?P<id>[^.]+)\.xml'
_TESTS = [{
# From http://gdcvault.com/play/1023460/Tenacious-Design-and-The-Interface
'url': 'http://evt.dispeak.com/ubm/gdc/sf16/xml/840376_BQRC.xml',
'md5': 'a8efb6c31ed06ca8739294960b2dbabd',
'info_dict': {
'id': '840376_BQRC',
'ext': 'mp4',
'title': 'Tenacious Design and The Interface of \'Destiny\'',
},
}, {
# From http://www.gdcvault.com/play/1014631/Classic-Game-Postmortem-PAC
'url': 'http://events.digitallyspeaking.com/gdc/sf11/xml/12396_1299111843500GMPX.xml',
'only_matching': True,
}, {
# From http://www.gdcvault.com/play/1013700/Advanced-Material
'url': 'http://sevt.dispeak.com/ubm/gdc/eur10/xml/11256_1282118587281VNIT.xml',
'only_matching': True,
}]
def _parse_mp4(self, metadata):
video_formats = []
video_root = None
mp4_video = xpath_text(metadata, './mp4video', default=None)
if mp4_video is not None:
mobj = re.match(r'(?P<root>https?://.*?/).*', mp4_video)
video_root = mobj.group('root')
if video_root is None:
http_host = xpath_text(metadata, 'httpHost', default=None)
if http_host:
video_root = 'http://%s/' % http_host
if video_root is None:
# Hard-coded in http://evt.dispeak.com/ubm/gdc/sf16/custom/player2.js
# Works for GPUTechConf, too
video_root = 'http://s3-2u.digitallyspeaking.com/'
formats = metadata.findall('./MBRVideos/MBRVideo')
if not formats:
return None
for a_format in formats:
stream_name = xpath_text(a_format, 'streamName', fatal=True)
video_path = re.match(r'mp4\:(?P<path>.*)', stream_name).group('path')
url = video_root + video_path
vbr = xpath_text(a_format, 'bitrate')
video_formats.append({
'url': url,
'vbr': int_or_none(vbr),
})
return video_formats
def _parse_flv(self, metadata):
formats = []
akamai_url = xpath_text(metadata, './akamaiHost', fatal=True)
audios = metadata.findall('./audios/audio')
for audio in audios:
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(audio.get('url'), '.flv'),
'ext': 'flv',
'vcodec': 'none',
'format_id': audio.get('code'),
})
slide_video_path = xpath_text(metadata, './slideVideo', fatal=True)
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(slide_video_path, '.flv'),
'ext': 'flv',
'format_note': 'slide deck video',
'quality': -2,
'preference': -2,
'format_id': 'slides',
})
speaker_video_path = xpath_text(metadata, './speakerVideo', fatal=True)
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(speaker_video_path, '.flv'),
'ext': 'flv',
'format_note': 'speaker video',
'quality': -1,
'preference': -1,
'format_id': 'speaker',
})
return formats
def _real_extract(self, url):
video_id = self._match_id(url)
xml_description = self._download_xml(url, video_id)
metadata = xpath_element(xml_description, 'metadata')
video_formats = self._parse_mp4(metadata)
if video_formats is None:
video_formats = self._parse_flv(metadata)
return {
'id': video_id,
'formats': video_formats,
'title': xpath_text(metadata, 'title', fatal=True),
'duration': parse_duration(xpath_text(metadata, 'endTime')),
'creator': xpath_text(metadata, 'speaker'),
}
| gpl-3.0 |
damianam/easybuild-framework | easybuild/toolchains/craypgi.py | 5 | 1596 | ##
# Copyright 2014-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
CrayPGI toolchain: Cray compilers (PGI) and MPI via Cray compiler drivers (PrgEnv-pgi) minus LibSci minus Cray FFTW
:author: Jg Piccinali (CSCS)
"""
from easybuild.toolchains.compiler.craype import CrayPEPGI
from easybuild.toolchains.mpi.craympich import CrayMPICH
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
class CrayPGI(CrayPEPGI, CrayMPICH):
"""Compiler toolchain for Cray Programming Environment for Cray Compiling Environment (PGI) (PrgEnv-pgi)."""
NAME = 'CrayPGI'
SUBTOOLCHAIN = DUMMY_TOOLCHAIN_NAME
| gpl-2.0 |
quamilek/django | tests/admin_inlines/tests.py | 118 | 44045 | from __future__ import unicode_literals
import datetime
import warnings
from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.test import RequestFactory, TestCase, override_settings
from django.utils.encoding import force_text
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInline(TestDataMixin, TestCase):
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.factory = RequestFactory()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
holder = Holder.objects.get(dummy=13)
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author\\u002Dbook relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Help text for ReadOnlyInline)" title="Help text for ReadOnlyInline" />', 1)
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertContains(response, (
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1" />'))
def test_non_related_name_inline(self):
"""
Ensure that multiple inlines with related_name='+' have correct form
prefixes. Bug #16838.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response,
'<input type="hidden" name="-1-0-id" id="id_-1-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-1-0-name" type="text" class="vTextField" '
'name="-1-0-name" maxlength="100" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-id" id="id_-2-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-2-0-name" type="text" class="vTextField" '
'name="-2-0-name" maxlength="100" />', html=True)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for locales that use
thousand separators
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for models with a
custom primary key field. Bug #18433.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
Ensure that an object can be created with inlines when it inherits
another class. Bug #19524.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = '<input id="id_binarytree_set-MAX_NUM_FORMS" name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d" />'
# The total number of forms will remain the same in either case
total_forms_hidden = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="2" />'
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertContains(response, max_forms_input % 3)
self.assertContains(response, total_forms_hidden)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertContains(response, max_forms_input % 2)
self.assertContains(response, total_forms_hidden)
def test_min_num(self):
"""
Ensure that min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="5" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms)
self.assertContains(response, total_forms)
def test_custom_min_num(self):
"""
Ensure that get_min_num is called and used correctly.
"""
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms % 2)
self.assertContains(response, total_forms % 5)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertContains(response, min_forms % 5)
self.assertContains(response, total_forms % 8)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input id="id_nonautopkbook_set-0-rand_pk" name="nonautopkbook_set-0-rand_pk" type="hidden" />',
html=True)
self.assertContains(response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" name="nonautopkbook_set-2-0-rand_pk" type="hidden" />',
html=True)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" name="editablepkbook_set-0-manual_pk" type="text" />',
html=True, count=1)
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" name="editablepkbook_set-2-0-manual_pk" type="text" />',
html=True, count=1)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInlineMedia(TestDataMixin, TestCase):
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
def test_original_content_type_id_deprecated(self):
"""
#23444 -- Verify a warning is raised when accessing
`original_content_type_id` attribute of `InlineAdminForm` object.
"""
iaf = InlineAdminForm(None, None, {}, {}, None)
poll = Poll.objects.create(name="poll")
iaf2 = InlineAdminForm(None, None, {}, {}, poll)
poll_ct = ContentType.objects.get_for_model(Poll)
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
with self.assertRaises(AttributeError):
iaf.original_content_type_id
msg = force_text(recorded.pop().message)
self.assertEqual(
msg,
'InlineAdminForm.original_content_type_id is deprecated and will be '
'removed in Django 1.10. If you were using this attribute to construct '
'the "view on site" URL, use the `absolute_url` attribute instead.'
)
self.assertEqual(iaf2.original_content_type_id, poll_ct.id)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInlineProtectedOnDelete(TestDataMixin, TestCase):
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
def setUp(self):
self.user = User(username='admin')
self.user.is_staff = True
self.user.is_active = True
self.user.set_password('secret')
self.user.save()
self.author_ct = ContentType.objects.get_for_model(Author)
self.holder_ct = ContentType.objects.get_for_model(Holder2)
self.book_ct = ContentType.objects.get_for_model(Book)
self.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book')
self.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
holder = Holder2.objects.create(dummy=13)
inner2 = Inner2.objects.create(dummy=42, holder=holder)
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
self.inner2_id = inner2.id
self.assertEqual(
self.client.login(username='admin', password='secret'),
True)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author\\u002Dbook relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_Author_books-0-id" '
'value="%i" name="Author_books-0-id" />' % self.author_book_auto_m2m_intermediate_id, html=True)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertNotContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>')
# Just the one form for existing instances
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
# max-num 0 means we can't add new ones
self.assertContains(response, '<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" '
'value="0" name="inner2_set-MAX_NUM_FORMS" />', html=True)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, three for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def test_add_stackeds(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector(
'%s .inline-deletelink' % inline_id):
delete_link.click()
self.assertEqual(rows_length(), 3)
def test_add_inlines(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Check that there's only one inline to start with and that it has the
# correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# Check that the inline has been added, that it has the right id, and
# that it contains the right fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
# Check that the objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# Verify that they're gone and that the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_alternating_rows(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
row_selector = 'form#profilecollection_form tr.dynamic-profile_set'
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row1" % row_selector)), 2, msg="Expect two row1 styled rows")
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row2" % row_selector)), 1, msg="Expect one row2 styled row")
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause |
dbkaynor/PyCopyMoveTk | auxfiles/Send2Trash/Send2Trash-1.3.0/send2trash/plat_win.py | 2 | 1655 | # Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint
from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class SHFILEOPSTRUCTW(Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
if not isinstance(path, text_type):
path = text_type(path, 'mbcs')
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg)
| gpl-2.0 |
timonwong/foo_uie_wsh_panel_mod.scintilla | test/simpleTests.py | 4 | 74073 | # -*- coding: utf-8 -*-
# Requires Python 2.7 or later
from __future__ import with_statement
from __future__ import unicode_literals
import codecs, ctypes, os, sys, unittest
if sys.platform == "win32":
import XiteWin as Xite
else:
import XiteQt as Xite
class TestSimple(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testLength(self):
self.assertEquals(self.ed.Length, 0)
def testAddText(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.GetCharAt(0), ord("x"))
self.assertEquals(self.ed.GetStyleAt(0), 0)
self.ed.ClearAll()
self.assertEquals(self.ed.Length, 0)
def testDeleteRange(self):
self.ed.AddText(5, b"abcde")
self.assertEquals(self.ed.Length, 5)
self.ed.DeleteRange(1, 2)
self.assertEquals(self.ed.Length, 3)
self.assertEquals(self.ed.Contents(), b"ade")
def testAddStyledText(self):
self.assertEquals(self.ed.EndStyled, 0)
self.ed.AddStyledText(2, b"x\002")
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.GetCharAt(0), ord("x"))
self.assertEquals(self.ed.GetStyleAt(0), 2)
self.assertEquals(self.ed.StyledTextRange(0, 1), b"x\002")
self.ed.ClearDocumentStyle()
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.GetCharAt(0), ord("x"))
self.assertEquals(self.ed.GetStyleAt(0), 0)
self.assertEquals(self.ed.StyledTextRange(0, 1), b"x\0")
def testStyling(self):
self.assertEquals(self.ed.EndStyled, 0)
self.ed.AddStyledText(4, b"x\002y\003")
self.assertEquals(self.ed.StyledTextRange(0, 2), b"x\002y\003")
self.ed.StartStyling(0,0xf)
self.ed.SetStyling(1, 5)
self.assertEquals(self.ed.StyledTextRange(0, 2), b"x\005y\003")
self.ed.StartStyling(0,0xff)
self.ed.SetStylingEx(2, b"\100\101")
self.assertEquals(self.ed.StyledTextRange(0, 2), b"x\100y\101")
def testPosition(self):
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 0)
self.ed.AddText(1, b"x")
# Caret has automatically moved
self.assertEquals(self.ed.CurrentPos, 1)
self.assertEquals(self.ed.Anchor, 1)
self.ed.SelectAll()
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 1)
self.ed.Anchor = 0
self.assertEquals(self.ed.Anchor, 0)
# Check line positions
self.assertEquals(self.ed.PositionFromLine(0), 0)
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.PositionFromLine(1), 1)
self.ed.CurrentPos = 1
self.assertEquals(self.ed.Anchor, 0)
self.assertEquals(self.ed.CurrentPos, 1)
def testBeyonEnd(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 1)
self.assertEquals(self.ed.GetLineEndPosition(2), 1)
def testSelection(self):
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 0)
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 0)
self.ed.AddText(1, b"x")
self.ed.SelectionStart = 0
self.assertEquals(self.ed.CurrentPos, 1)
self.assertEquals(self.ed.Anchor, 0)
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 1)
self.ed.SelectionStart = 1
self.assertEquals(self.ed.CurrentPos, 1)
self.assertEquals(self.ed.Anchor, 1)
self.assertEquals(self.ed.SelectionStart, 1)
self.assertEquals(self.ed.SelectionEnd, 1)
self.ed.SelectionEnd = 0
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 0)
def testSetSelection(self):
self.ed.AddText(4, b"abcd")
self.ed.SetSel(1, 3)
self.assertEquals(self.ed.SelectionStart, 1)
self.assertEquals(self.ed.SelectionEnd, 3)
result = self.ed.GetSelText(0)
self.assertEquals(result, b"bc\0")
self.ed.ReplaceSel(0, b"1234")
self.assertEquals(self.ed.Length, 6)
self.assertEquals(self.ed.Contents(), b"a1234d")
def testReadOnly(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.ReadOnly, 0)
self.assertEquals(self.ed.Contents(), b"x")
self.ed.ReadOnly = 1
self.assertEquals(self.ed.ReadOnly, 1)
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.Contents(), b"x")
self.ed.ReadOnly = 0
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.Contents(), b"xx")
self.ed.Null()
self.assertEquals(self.ed.Contents(), b"xx")
def testAddLine(self):
data = b"x" * 70 + b"\n"
for i in range(5):
self.ed.AddText(len(data), data)
self.xite.DoEvents()
self.assertEquals(self.ed.LineCount, i + 2)
self.assert_(self.ed.Length > 0)
def testInsertText(self):
data = b"xy"
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(data, self.ed.ByteRange(0,2))
self.ed.InsertText(1, data)
# Should now be "xxyy"
self.assertEquals(self.ed.Length, 4)
self.assertEquals(b"xxyy", self.ed.ByteRange(0,4))
def testInsertNul(self):
data = b"\0"
self.ed.AddText(1, data)
self.assertEquals(self.ed.Length, 1)
self.assertEquals(data, self.ed.ByteRange(0,1))
def testUndoRedo(self):
data = b"xy"
self.assertEquals(self.ed.Modify, 0)
self.assertEquals(self.ed.UndoCollection, 1)
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.ed.Modify, 1)
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 1)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.ed.Modify, 0)
self.assertEquals(self.ed.CanRedo(), 1)
self.assertEquals(self.ed.CanUndo(), 0)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.ed.Modify, 1)
self.assertEquals(data, self.ed.Contents())
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 1)
def testUndoSavePoint(self):
data = b"xy"
self.assertEquals(self.ed.Modify, 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Modify, 1)
self.ed.SetSavePoint()
self.assertEquals(self.ed.Modify, 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Modify, 1)
def testUndoCollection(self):
data = b"xy"
self.assertEquals(self.ed.UndoCollection, 1)
self.ed.UndoCollection = 0
self.assertEquals(self.ed.UndoCollection, 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 0)
self.ed.UndoCollection = 1
def testGetColumn(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.GetColumn(0), 0)
self.assertEquals(self.ed.GetColumn(1), 1)
# Next line caused infinite loop in 1.71
self.assertEquals(self.ed.GetColumn(2), 1)
self.assertEquals(self.ed.GetColumn(3), 1)
def testTabWidth(self):
self.assertEquals(self.ed.TabWidth, 8)
self.ed.AddText(3, b"x\tb")
self.assertEquals(self.ed.GetColumn(0), 0)
self.assertEquals(self.ed.GetColumn(1), 1)
self.assertEquals(self.ed.GetColumn(2), 8)
for col in range(10):
if col == 0:
self.assertEquals(self.ed.FindColumn(0, col), 0)
elif col == 1:
self.assertEquals(self.ed.FindColumn(0, col), 1)
elif col == 8:
self.assertEquals(self.ed.FindColumn(0, col), 2)
elif col == 9:
self.assertEquals(self.ed.FindColumn(0, col), 3)
else:
self.assertEquals(self.ed.FindColumn(0, col), 1)
self.ed.TabWidth = 4
self.assertEquals(self.ed.TabWidth, 4)
self.assertEquals(self.ed.GetColumn(0), 0)
self.assertEquals(self.ed.GetColumn(1), 1)
self.assertEquals(self.ed.GetColumn(2), 4)
def testIndent(self):
self.assertEquals(self.ed.Indent, 0)
self.assertEquals(self.ed.UseTabs, 1)
self.ed.Indent = 8
self.ed.UseTabs = 0
self.assertEquals(self.ed.Indent, 8)
self.assertEquals(self.ed.UseTabs, 0)
self.ed.AddText(3, b"x\tb")
self.assertEquals(self.ed.GetLineIndentation(0), 0)
self.ed.InsertText(0, b" ")
self.assertEquals(self.ed.GetLineIndentation(0), 1)
self.assertEquals(self.ed.GetLineIndentPosition(0), 1)
self.assertEquals(self.ed.Contents(), b" x\tb")
self.ed.SetLineIndentation(0,2)
self.assertEquals(self.ed.Contents(), b" x\tb")
self.assertEquals(self.ed.GetLineIndentPosition(0), 2)
self.ed.UseTabs = 1
self.ed.SetLineIndentation(0,8)
self.assertEquals(self.ed.Contents(), b"\tx\tb")
self.assertEquals(self.ed.GetLineIndentPosition(0), 1)
def testGetCurLine(self):
self.ed.AddText(1, b"x")
data = ctypes.create_string_buffer(b"\0" * 100)
caret = self.ed.GetCurLine(len(data), data)
self.assertEquals(caret, 1)
self.assertEquals(data.value, b"x")
def testGetLine(self):
self.ed.AddText(1, b"x")
data = ctypes.create_string_buffer(b"\0" * 100)
self.ed.GetLine(0, data)
self.assertEquals(data.value, b"x")
def testLineEnds(self):
self.ed.AddText(3, b"x\ny")
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 3)
self.assertEquals(self.ed.LineLength(0), 2)
self.assertEquals(self.ed.LineLength(1), 1)
if sys.platform == "win32":
self.assertEquals(self.ed.EOLMode, self.ed.SC_EOL_CRLF)
else:
self.assertEquals(self.ed.EOLMode, self.ed.SC_EOL_LF)
lineEnds = [b"\r\n", b"\r", b"\n"]
for lineEndType in [self.ed.SC_EOL_CR, self.ed.SC_EOL_LF, self.ed.SC_EOL_CRLF]:
self.ed.EOLMode = lineEndType
self.assertEquals(self.ed.EOLMode, lineEndType)
self.ed.ConvertEOLs(lineEndType)
self.assertEquals(self.ed.Contents(), b"x" + lineEnds[lineEndType] + b"y")
self.assertEquals(self.ed.LineLength(0), 1 + len(lineEnds[lineEndType]))
# Several tests for unicode line ends U+2028 and U+2029
def testUnicodeLineEnds(self):
# Add two lines separated with U+2028 and ensure it is seen as two lines
# Then remove U+2028 and should be just 1 lines
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 5)
self.assertEquals(self.ed.LineLength(0), 4)
self.assertEquals(self.ed.LineLength(1), 1)
self.ed.TargetStart = 1
self.ed.TargetEnd = 4
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.LineLength(0), 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 2)
self.assertEquals(self.ed.LineEndTypesSupported, 1)
def testUnicodeLineEndsWithCodePage0(self):
# Try the Unicode line ends when not in Unicode mode -> should remain 1 line
self.ed.SetCodePage(0)
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 1)
self.ed.AddText(4, b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 1)
def testUnicodeLineEndsSwitchToUnicodeAndBack(self):
# Add the Unicode line ends when not in Unicode mode
self.ed.SetCodePage(0)
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 1)
# Into UTF-8 mode - should now be interpreting as two lines
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.assertEquals(self.ed.LineCount, 2)
# Back to code page 0 and 1 line
self.ed.SetCodePage(0)
self.assertEquals(self.ed.LineCount, 1)
def testUFragmentedEOLCompletion(self):
# Add 2 starting bytes of UTF-8 line end then complete it
self.ed.ClearAll()
self.ed.AddText(4, b"x\xe2\x80y")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.GetLineEndPosition(0), 4)
self.ed.SetSel(3,3)
self.ed.AddText(1, b"\xa8")
self.assertEquals(self.ed.Contents(), b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
# Add 1 starting bytes of UTF-8 line end then complete it
self.ed.ClearAll()
self.ed.AddText(3, b"x\xe2y")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.GetLineEndPosition(0), 3)
self.ed.SetSel(2,2)
self.ed.AddText(2, b"\x80\xa8")
self.assertEquals(self.ed.Contents(), b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
def testUFragmentedEOLStart(self):
# Add end of UTF-8 line end then insert start
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.assertEquals(self.ed.LineCount, 1)
self.ed.AddText(4, b"x\x80\xa8y")
self.assertEquals(self.ed.LineCount, 1)
self.ed.SetSel(1,1)
self.ed.AddText(1, b"\xe2")
self.assertEquals(self.ed.LineCount, 2)
def testUBreakApartEOL(self):
# Add two lines separated by U+2029 then remove and add back each byte ensuring
# only one line after each removal of any byte in line end and 2 lines after reinsertion
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
text = b"x\xe2\x80\xa9y";
self.ed.AddText(5, text)
self.assertEquals(self.ed.LineCount, 2)
for i in range(len(text)):
self.ed.TargetStart = i
self.ed.TargetEnd = i + 1
self.ed.ReplaceTarget(0, b"")
if i in [0, 4]:
# Removing text characters does not change number of lines
self.assertEquals(self.ed.LineCount, 2)
else:
# Removing byte from line end, removes 1 line
self.assertEquals(self.ed.LineCount, 1)
self.ed.TargetEnd = i
self.ed.ReplaceTarget(1, text[i:i+1])
self.assertEquals(self.ed.LineCount, 2)
def testURemoveEOLFragment(self):
# Add UTF-8 line end then delete each byte causing line end to disappear
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
for i in range(3):
self.ed.ClearAll()
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
self.ed.TargetStart = i+1
self.ed.TargetEnd = i+2
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
# Several tests for unicode NEL line ends U+0085
def testNELLineEnds(self):
# Add two lines separated with U+0085 and ensure it is seen as two lines
# Then remove U+0085 and should be just 1 lines
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.ed.AddText(4, b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 4)
self.assertEquals(self.ed.LineLength(0), 3)
self.assertEquals(self.ed.LineLength(1), 1)
self.ed.TargetStart = 1
self.ed.TargetEnd = 3
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.LineLength(0), 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 2)
def testNELFragmentedEOLCompletion(self):
# Add starting byte of UTF-8 NEL then complete it
self.ed.AddText(3, b"x\xc2y")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.GetLineEndPosition(0), 3)
self.ed.SetSel(2,2)
self.ed.AddText(1, b"\x85")
self.assertEquals(self.ed.Contents(), b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 2)
def testNELFragmentedEOLStart(self):
# Add end of UTF-8 NEL then insert start
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.assertEquals(self.ed.LineCount, 1)
self.ed.AddText(4, b"x\x85y")
self.assertEquals(self.ed.LineCount, 1)
self.ed.SetSel(1,1)
self.ed.AddText(1, b"\xc2")
self.assertEquals(self.ed.LineCount, 2)
def testNELBreakApartEOL(self):
# Add two lines separated by U+0085 then remove and add back each byte ensuring
# only one line after each removal of any byte in line end and 2 lines after reinsertion
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
text = b"x\xc2\x85y";
self.ed.AddText(4, text)
self.assertEquals(self.ed.LineCount, 2)
for i in range(len(text)):
self.ed.TargetStart = i
self.ed.TargetEnd = i + 1
self.ed.ReplaceTarget(0, b"")
if i in [0, 3]:
# Removing text characters does not change number of lines
self.assertEquals(self.ed.LineCount, 2)
else:
# Removing byte from line end, removes 1 line
self.assertEquals(self.ed.LineCount, 1)
self.ed.TargetEnd = i
self.ed.ReplaceTarget(1, text[i:i+1])
self.assertEquals(self.ed.LineCount, 2)
def testNELRemoveEOLFragment(self):
# Add UTF-8 NEL then delete each byte causing line end to disappear
self.ed.SetCodePage(65001)
for i in range(2):
self.ed.ClearAll()
self.ed.AddText(4, b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 2)
self.ed.TargetStart = i+1
self.ed.TargetEnd = i+2
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
def testGoto(self):
self.ed.AddText(5, b"a\nb\nc")
self.assertEquals(self.ed.CurrentPos, 5)
self.ed.GotoLine(1)
self.assertEquals(self.ed.CurrentPos, 2)
self.ed.GotoPos(4)
self.assertEquals(self.ed.CurrentPos, 4)
def testCutCopyPaste(self):
self.ed.AddText(5, b"a1b2c")
self.ed.SetSel(1,3)
self.ed.Cut()
self.xite.DoEvents()
self.assertEquals(self.ed.CanPaste(), 1)
self.ed.SetSel(0, 0)
self.ed.Paste()
self.assertEquals(self.ed.Contents(), b"1ba2c")
self.ed.SetSel(4,5)
self.ed.Copy()
self.ed.SetSel(1,3)
self.ed.Paste()
self.assertEquals(self.ed.Contents(), b"1c2c")
self.ed.SetSel(2,4)
self.ed.Clear()
self.assertEquals(self.ed.Contents(), b"1c")
def testCopyAllowLine(self):
self.xite.DoEvents()
lineEndType = self.ed.EOLMode
self.ed.EOLMode = self.ed.SC_EOL_LF
self.ed.AddText(5, b"a1\nb2")
self.ed.SetSel(1,1)
self.ed.CopyAllowLine()
self.xite.DoEvents()
self.assertEquals(self.ed.CanPaste(), 1)
self.ed.SetSel(0, 0)
self.ed.Paste()
self.ed.EOLMode = lineEndType
self.assertEquals(self.ed.Contents(), b"a1\na1\nb2")
def testDuplicate(self):
self.ed.AddText(3, b"1b2")
self.ed.SetSel(1,2)
self.ed.SelectionDuplicate()
self.assertEquals(self.ed.Contents(), b"1bb2")
def testTransposeLines(self):
self.ed.AddText(8, b"a1\nb2\nc3")
self.ed.SetSel(3,3)
self.ed.LineTranspose()
self.assertEquals(self.ed.Contents(), b"b2\na1\nc3")
def testGetSet(self):
self.ed.SetContents(b"abc")
self.assertEquals(self.ed.TextLength, 3)
result = ctypes.create_string_buffer(b"\0" * 5)
length = self.ed.GetText(4, result)
self.assertEquals(result.value, b"abc")
def testAppend(self):
self.ed.SetContents(b"abc")
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 0)
text = b"12"
self.ed.AppendText(len(text), text)
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 0)
self.assertEquals(self.ed.Contents(), b"abc12")
def testTarget(self):
self.ed.SetContents(b"abcd")
self.ed.TargetStart = 1
self.ed.TargetEnd = 3
self.assertEquals(self.ed.TargetStart, 1)
self.assertEquals(self.ed.TargetEnd, 3)
rep = b"321"
self.ed.ReplaceTarget(len(rep), rep)
self.assertEquals(self.ed.Contents(), b"a321d")
self.ed.SearchFlags = self.ed.SCFIND_REGEXP
self.assertEquals(self.ed.SearchFlags, self.ed.SCFIND_REGEXP)
searchString = b"\([1-9]+\)"
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(1, pos)
tagString = self.ed.GetTag(1)
self.assertEquals(tagString, b"321")
rep = b"\\1"
self.ed.TargetStart = 0
self.ed.TargetEnd = 0
self.ed.ReplaceTargetRE(len(rep), rep)
self.assertEquals(self.ed.Contents(), b"321a321d")
self.ed.SetSel(4,5)
self.ed.TargetFromSelection()
self.assertEquals(self.ed.TargetStart, 4)
self.assertEquals(self.ed.TargetEnd, 5)
def testTargetEscape(self):
# Checks that a literal \ can be in the replacement. Bug #2959876
self.ed.SetContents(b"abcd")
self.ed.TargetStart = 1
self.ed.TargetEnd = 3
rep = b"\\\\n"
self.ed.ReplaceTargetRE(len(rep), rep)
self.assertEquals(self.ed.Contents(), b"a\\nd")
def testPointsAndPositions(self):
self.ed.AddText(1, b"x")
# Start of text
self.assertEquals(self.ed.PositionFromPoint(0,0), 0)
# End of text
self.assertEquals(self.ed.PositionFromPoint(0,100), 1)
def testLinePositions(self):
text = b"ab\ncd\nef"
nl = b"\n"
if sys.version_info[0] == 3:
nl = ord(b"\n")
self.ed.AddText(len(text), text)
self.assertEquals(self.ed.LineFromPosition(-1), 0)
line = 0
for pos in range(len(text)+1):
self.assertEquals(self.ed.LineFromPosition(pos), line)
if pos < len(text) and text[pos] == nl:
line += 1
def testWordPositions(self):
text = b"ab cd\tef"
self.ed.AddText(len(text), text)
self.assertEquals(self.ed.WordStartPosition(3, 0), 2)
self.assertEquals(self.ed.WordStartPosition(4, 0), 3)
self.assertEquals(self.ed.WordStartPosition(5, 0), 3)
self.assertEquals(self.ed.WordStartPosition(6, 0), 5)
self.assertEquals(self.ed.WordEndPosition(2, 0), 3)
self.assertEquals(self.ed.WordEndPosition(3, 0), 5)
self.assertEquals(self.ed.WordEndPosition(4, 0), 5)
self.assertEquals(self.ed.WordEndPosition(5, 0), 6)
self.assertEquals(self.ed.WordEndPosition(6, 0), 8)
MODI = 1
UNDO = 2
REDO = 4
class TestContainerUndo(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.data = b"xy"
def UndoState(self):
return (MODI if self.ed.Modify else 0) | \
(UNDO if self.ed.CanUndo() else 0) | \
(REDO if self.ed.CanRedo() else 0)
def testContainerActNoCoalesce(self):
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 0)
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
def testContainerActCoalesce(self):
self.ed.InsertText(0, self.data)
self.ed.AddUndoAction(5, 1)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
def testContainerMultiStage(self):
self.ed.InsertText(0, self.data)
self.ed.AddUndoAction(5, 1)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
def testContainerMultiStageNoText(self):
self.ed.AddUndoAction(5, 1)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.UndoState(), REDO)
def testContainerActCoalesceEnd(self):
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
def testContainerBetweenInsertAndInsert(self):
self.assertEquals(self.ed.Length, 0)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.InsertText(2, self.data)
self.assertEquals(self.ed.Length, 4)
self.assertEquals(self.UndoState(), MODI | UNDO)
# Undoes both insertions and the containerAction in the middle
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
def testContainerNoCoalesceBetweenInsertAndInsert(self):
self.assertEquals(self.ed.Length, 0)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 0)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.InsertText(2, self.data)
self.assertEquals(self.ed.Length, 4)
self.assertEquals(self.UndoState(), MODI | UNDO)
# Undo last insertion
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
# Undo container
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
# Undo first insertion
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
def testContainerBetweenDeleteAndDelete(self):
self.ed.InsertText(0, self.data)
self.ed.EmptyUndoBuffer()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), 0)
self.ed.SetSel(2,2)
self.ed.DeleteBack()
self.assertEquals(self.ed.Length, 1)
self.ed.AddUndoAction(5, 1)
self.ed.DeleteBack()
self.assertEquals(self.ed.Length, 0)
# Undoes both deletions and the containerAction in the middle
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), REDO)
def testContainerBetweenInsertAndDelete(self):
self.assertEquals(self.ed.Length, 0)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.SetSel(0,1)
self.ed.Cut()
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo() # Only undoes the deletion
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
class TestKeyCommands(unittest.TestCase):
""" These commands are normally assigned to keys and take no arguments """
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def selRange(self):
return self.ed.CurrentPos, self.ed.Anchor
def testLineMove(self):
self.ed.AddText(8, b"x1\ny2\nz3")
self.ed.SetSel(0,0)
self.ed.ChooseCaretX()
self.ed.LineDown()
self.ed.LineDown()
self.assertEquals(self.selRange(), (6, 6))
self.ed.LineUp()
self.assertEquals(self.selRange(), (3, 3))
self.ed.LineDownExtend()
self.assertEquals(self.selRange(), (6, 3))
self.ed.LineUpExtend()
self.ed.LineUpExtend()
self.assertEquals(self.selRange(), (0, 3))
def testCharMove(self):
self.ed.AddText(8, b"x1\ny2\nz3")
self.ed.SetSel(0,0)
self.ed.CharRight()
self.ed.CharRight()
self.assertEquals(self.selRange(), (2, 2))
self.ed.CharLeft()
self.assertEquals(self.selRange(), (1, 1))
self.ed.CharRightExtend()
self.assertEquals(self.selRange(), (2, 1))
self.ed.CharLeftExtend()
self.ed.CharLeftExtend()
self.assertEquals(self.selRange(), (0, 1))
def testWordMove(self):
self.ed.AddText(10, b"a big boat")
self.ed.SetSel(3,3)
self.ed.WordRight()
self.ed.WordRight()
self.assertEquals(self.selRange(), (10, 10))
self.ed.WordLeft()
self.assertEquals(self.selRange(), (6, 6))
self.ed.WordRightExtend()
self.assertEquals(self.selRange(), (10, 6))
self.ed.WordLeftExtend()
self.ed.WordLeftExtend()
self.assertEquals(self.selRange(), (2, 6))
def testHomeEndMove(self):
self.ed.AddText(10, b"a big boat")
self.ed.SetSel(3,3)
self.ed.Home()
self.assertEquals(self.selRange(), (0, 0))
self.ed.LineEnd()
self.assertEquals(self.selRange(), (10, 10))
self.ed.SetSel(3,3)
self.ed.HomeExtend()
self.assertEquals(self.selRange(), (0, 3))
self.ed.LineEndExtend()
self.assertEquals(self.selRange(), (10, 3))
def testStartEndMove(self):
self.ed.AddText(10, b"a\nbig\nboat")
self.ed.SetSel(3,3)
self.ed.DocumentStart()
self.assertEquals(self.selRange(), (0, 0))
self.ed.DocumentEnd()
self.assertEquals(self.selRange(), (10, 10))
self.ed.SetSel(3,3)
self.ed.DocumentStartExtend()
self.assertEquals(self.selRange(), (0, 3))
self.ed.DocumentEndExtend()
self.assertEquals(self.selRange(), (10, 3))
class TestMarkers(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.AddText(5, b"x\ny\nz")
def testMarker(self):
handle = self.ed.MarkerAdd(1,1)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), 1)
self.ed.MarkerDelete(1,1)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), -1)
def testTwiceAddedDelete(self):
handle = self.ed.MarkerAdd(1,1)
self.assertEquals(self.ed.MarkerGet(1), 2)
handle2 = self.ed.MarkerAdd(1,1)
self.assertEquals(self.ed.MarkerGet(1), 2)
self.ed.MarkerDelete(1,1)
self.assertEquals(self.ed.MarkerGet(1), 2)
self.ed.MarkerDelete(1,1)
self.assertEquals(self.ed.MarkerGet(1), 0)
def testMarkerDeleteAll(self):
h1 = self.ed.MarkerAdd(0,1)
h2 = self.ed.MarkerAdd(1,2)
self.assertEquals(self.ed.MarkerLineFromHandle(h1), 0)
self.assertEquals(self.ed.MarkerLineFromHandle(h2), 1)
self.ed.MarkerDeleteAll(1)
self.assertEquals(self.ed.MarkerLineFromHandle(h1), -1)
self.assertEquals(self.ed.MarkerLineFromHandle(h2), 1)
self.ed.MarkerDeleteAll(-1)
self.assertEquals(self.ed.MarkerLineFromHandle(h1), -1)
self.assertEquals(self.ed.MarkerLineFromHandle(h2), -1)
def testMarkerDeleteHandle(self):
handle = self.ed.MarkerAdd(0,1)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), 0)
self.ed.MarkerDeleteHandle(handle)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), -1)
def testMarkerBits(self):
self.assertEquals(self.ed.MarkerGet(0), 0)
self.ed.MarkerAdd(0,1)
self.assertEquals(self.ed.MarkerGet(0), 2)
self.ed.MarkerAdd(0,2)
self.assertEquals(self.ed.MarkerGet(0), 6)
def testMarkerAddSet(self):
self.assertEquals(self.ed.MarkerGet(0), 0)
self.ed.MarkerAddSet(0,5)
self.assertEquals(self.ed.MarkerGet(0), 5)
self.ed.MarkerDeleteAll(-1)
def testMarkerNext(self):
self.assertEquals(self.ed.MarkerNext(0, 2), -1)
h1 = self.ed.MarkerAdd(0,1)
h2 = self.ed.MarkerAdd(2,1)
self.assertEquals(self.ed.MarkerNext(0, 2), 0)
self.assertEquals(self.ed.MarkerNext(1, 2), 2)
self.assertEquals(self.ed.MarkerNext(2, 2), 2)
self.assertEquals(self.ed.MarkerPrevious(0, 2), 0)
self.assertEquals(self.ed.MarkerPrevious(1, 2), 0)
self.assertEquals(self.ed.MarkerPrevious(2, 2), 2)
def testMarkerNegative(self):
self.assertEquals(self.ed.MarkerNext(-1, 2), -1)
def testLineState(self):
self.assertEquals(self.ed.MaxLineState, 0)
self.assertEquals(self.ed.GetLineState(0), 0)
self.assertEquals(self.ed.GetLineState(1), 0)
self.assertEquals(self.ed.GetLineState(2), 0)
self.ed.SetLineState(1, 100)
self.assertNotEquals(self.ed.MaxLineState, 0)
self.assertEquals(self.ed.GetLineState(0), 0)
self.assertEquals(self.ed.GetLineState(1), 100)
self.assertEquals(self.ed.GetLineState(2), 0)
def testSymbolRetrieval(self):
self.ed.MarkerDefine(1,3)
self.assertEquals(self.ed.MarkerSymbolDefined(1), 3)
class TestIndicators(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testSetIndicator(self):
self.assertEquals(self.ed.IndicGetStyle(0), 1)
self.assertEquals(self.ed.IndicGetFore(0), 0x007f00)
self.ed.IndicSetStyle(0, 2)
self.ed.IndicSetFore(0, 0xff0080)
self.assertEquals(self.ed.IndicGetStyle(0), 2)
self.assertEquals(self.ed.IndicGetFore(0), 0xff0080)
def testIndicatorFill(self):
self.ed.InsertText(0, b"abc")
self.ed.IndicatorCurrent = 3
self.ed.IndicatorFillRange(1,1)
self.assertEquals(self.ed.IndicatorValueAt(3, 0), 0)
self.assertEquals(self.ed.IndicatorValueAt(3, 1), 1)
self.assertEquals(self.ed.IndicatorValueAt(3, 2), 0)
self.assertEquals(self.ed.IndicatorStart(3, 0), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 0), 1)
self.assertEquals(self.ed.IndicatorStart(3, 1), 1)
self.assertEquals(self.ed.IndicatorEnd(3, 1), 2)
self.assertEquals(self.ed.IndicatorStart(3, 2), 2)
self.assertEquals(self.ed.IndicatorEnd(3, 2), 3)
def testIndicatorAtEnd(self):
self.ed.InsertText(0, b"ab")
self.ed.IndicatorCurrent = 3
self.ed.IndicatorFillRange(1,1)
self.assertEquals(self.ed.IndicatorValueAt(3, 0), 0)
self.assertEquals(self.ed.IndicatorValueAt(3, 1), 1)
self.assertEquals(self.ed.IndicatorStart(3, 0), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 0), 1)
self.assertEquals(self.ed.IndicatorStart(3, 1), 1)
self.assertEquals(self.ed.IndicatorEnd(3, 1), 2)
self.ed.DeleteRange(1, 1)
# Now only one character left and does not have indicator so indicator 3 is null
self.assertEquals(self.ed.IndicatorValueAt(3, 0), 0)
# Since null, remaining calls return 0
self.assertEquals(self.ed.IndicatorStart(3, 0), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 0), 0)
self.assertEquals(self.ed.IndicatorStart(3, 1), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 1), 0)
class TestScrolling(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 150 should be enough lines
self.ed.InsertText(0, b"a" * 150 + b"\n" * 150)
def testTop(self):
self.ed.GotoLine(0)
self.assertEquals(self.ed.FirstVisibleLine, 0)
def testLineScroll(self):
self.ed.GotoLine(0)
self.ed.LineScroll(0, 3)
self.assertEquals(self.ed.FirstVisibleLine, 3)
self.ed.LineScroll(0, -2)
self.assertEquals(self.ed.FirstVisibleLine, 1)
self.assertEquals(self.ed.XOffset, 0)
self.ed.LineScroll(10, 0)
self.assertGreater(self.ed.XOffset, 0)
scroll_width = float(self.ed.XOffset) / 10
self.ed.LineScroll(-2, 0)
self.assertEquals(self.ed.XOffset, scroll_width * 8)
def testVisibleLine(self):
self.ed.FirstVisibleLine = 7
self.assertEquals(self.ed.FirstVisibleLine, 7)
class TestSearch(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.InsertText(0, b"a\tbig boat\t")
def testFind(self):
pos = self.ed.FindBytes(0, self.ed.Length, b"zzz", 0)
self.assertEquals(pos, -1)
pos = self.ed.FindBytes(0, self.ed.Length, b"big", 0)
self.assertEquals(pos, 2)
def testFindEmpty(self):
pos = self.ed.FindBytes(0, self.ed.Length, b"", 0)
self.assertEquals(pos, 0)
def testCaseFind(self):
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"big", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bIg", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bIg",
self.ed.SCFIND_MATCHCASE), -1)
def testWordFind(self):
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi",
self.ed.SCFIND_WHOLEWORD), -1)
def testWordStartFind(self):
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi",
self.ed.SCFIND_WORDSTART), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"ig", 0), 3)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"ig",
self.ed.SCFIND_WORDSTART), -1)
def testREFind(self):
flags = self.ed.SCFIND_REGEXP
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b.g", 0))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b.g", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\<b.g\>", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b[A-Z]g",
flags | self.ed.SCFIND_MATCHCASE))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]g", flags))
self.assertEquals(6, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]*t", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"^a", flags))
self.assertEquals(10, self.ed.FindBytes(0, self.ed.Length, b"\t$", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\([a]\).*\0", flags))
def testPosixREFind(self):
flags = self.ed.SCFIND_REGEXP | self.ed.SCFIND_POSIX
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b.g", 0))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b.g", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\<b.g\>", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b[A-Z]g",
flags | self.ed.SCFIND_MATCHCASE))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]g", flags))
self.assertEquals(6, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]*t", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"^a", flags))
self.assertEquals(10, self.ed.FindBytes(0, self.ed.Length, b"\t$", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"([a]).*\0", flags))
def testPhilippeREFind(self):
# Requires 1.,72
flags = self.ed.SCFIND_REGEXP
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\w", flags))
self.assertEquals(1, self.ed.FindBytes(0, self.ed.Length, b"\W", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"\d", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\D", flags))
self.assertEquals(1, self.ed.FindBytes(0, self.ed.Length, b"\s", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\S", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\x62", flags))
def testRENonASCII(self):
self.ed.InsertText(0, b"\xAD")
flags = self.ed.SCFIND_REGEXP
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"\\x10", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\\x09", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"\\xAB", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\\xAD", flags))
class TestRepresentations(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testGetControl(self):
result = self.ed.GetRepresentation(b"\001")
self.assertEquals(result, b"SOH")
def testClearControl(self):
result = self.ed.GetRepresentation(b"\002")
self.assertEquals(result, b"STX")
self.ed.ClearRepresentation(b"\002")
result = self.ed.GetRepresentation(b"\002")
self.assertEquals(result, b"")
def testSetOhm(self):
ohmSign = b"\xe2\x84\xa6"
ohmExplained = b"U+2126 \xe2\x84\xa6"
self.ed.SetRepresentation(ohmSign, ohmExplained)
result = self.ed.GetRepresentation(ohmSign)
self.assertEquals(result, ohmExplained)
class TestProperties(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testSet(self):
self.ed.SetProperty(b"test", b"12")
self.assertEquals(self.ed.GetPropertyInt(b"test"), 12)
result = self.ed.GetProperty(b"test")
self.assertEquals(result, b"12")
self.ed.SetProperty(b"test.plus", b"[$(test)]")
result = self.ed.GetPropertyExpanded(b"test.plus")
self.assertEquals(result, b"[12]")
class TestTextMargin(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.txt = b"abcd"
self.ed.AddText(1, b"x")
def testAscent(self):
lineHeight = self.ed.TextHeight(0)
self.assertEquals(self.ed.ExtraAscent, 0)
self.assertEquals(self.ed.ExtraDescent, 0)
self.ed.ExtraAscent = 1
self.assertEquals(self.ed.ExtraAscent, 1)
self.ed.ExtraDescent = 2
self.assertEquals(self.ed.ExtraDescent, 2)
# Allow line height to recalculate
self.xite.DoEvents()
lineHeightIncreased = self.ed.TextHeight(0)
self.assertEquals(lineHeightIncreased, lineHeight + 2 + 1)
def testTextMargin(self):
self.ed.MarginSetText(0, self.txt)
result = self.ed.MarginGetText(0)
self.assertEquals(result, self.txt)
self.ed.MarginTextClearAll()
def testTextMarginStyle(self):
self.ed.MarginSetText(0, self.txt)
self.ed.MarginSetStyle(0, 33)
self.assertEquals(self.ed.MarginGetStyle(0), 33)
self.ed.MarginTextClearAll()
def testTextMarginStyles(self):
styles = b"\001\002\003\004"
self.ed.MarginSetText(0, self.txt)
self.ed.MarginSetStyles(0, styles)
result = self.ed.MarginGetStyles(0)
self.assertEquals(result, styles)
self.ed.MarginTextClearAll()
def testTextMarginStyleOffset(self):
self.ed.MarginSetStyleOffset(300)
self.assertEquals(self.ed.MarginGetStyleOffset(), 300)
class TestAnnotation(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.txt = b"abcd"
self.ed.AddText(1, b"x")
def testTextAnnotation(self):
self.assertEquals(self.ed.AnnotationGetLines(), 0)
self.ed.AnnotationSetText(0, self.txt)
self.assertEquals(self.ed.AnnotationGetLines(), 1)
result = self.ed.AnnotationGetText(0)
self.assertEquals(len(result), 4)
self.assertEquals(result, self.txt)
self.ed.AnnotationClearAll()
def testTextAnnotationStyle(self):
self.ed.AnnotationSetText(0, self.txt)
self.ed.AnnotationSetStyle(0, 33)
self.assertEquals(self.ed.AnnotationGetStyle(0), 33)
self.ed.AnnotationClearAll()
def testTextAnnotationStyles(self):
styles = b"\001\002\003\004"
self.ed.AnnotationSetText(0, self.txt)
self.ed.AnnotationSetStyles(0, styles)
result = self.ed.AnnotationGetStyles(0)
self.assertEquals(result, styles)
self.ed.AnnotationClearAll()
def testExtendedStyles(self):
start0 = self.ed.AllocateExtendedStyles(0)
self.assertEquals(start0, 256)
start1 = self.ed.AllocateExtendedStyles(10)
self.assertEquals(start1, 256)
start2 = self.ed.AllocateExtendedStyles(20)
self.assertEquals(start2, start1 + 10)
# Reset by changing lexer
self.ed.ReleaseAllExtendedStyles()
start0 = self.ed.AllocateExtendedStyles(0)
self.assertEquals(start0, 256)
def testTextAnnotationStyleOffset(self):
self.ed.AnnotationSetStyleOffset(300)
self.assertEquals(self.ed.AnnotationGetStyleOffset(), 300)
def testTextAnnotationVisible(self):
self.assertEquals(self.ed.AnnotationGetVisible(), 0)
self.ed.AnnotationSetVisible(2)
self.assertEquals(self.ed.AnnotationGetVisible(), 2)
self.ed.AnnotationSetVisible(0)
class TestMultiSelection(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 3 lines of 3 characters
t = b"xxx\nxxx\nxxx"
self.ed.AddText(len(t), t)
def testSelectionCleared(self):
self.ed.ClearSelections()
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 0)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 0)
def test1Selection(self):
self.ed.SetSelection(1, 2)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 2)
self.assertEquals(self.ed.GetSelectionNStart(0), 1)
self.assertEquals(self.ed.GetSelectionNEnd(0), 2)
self.ed.SwapMainAnchorCaret()
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 2)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 1)
def test1SelectionReversed(self):
self.ed.SetSelection(2, 1)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 2)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 1)
self.assertEquals(self.ed.GetSelectionNStart(0), 1)
self.assertEquals(self.ed.GetSelectionNEnd(0), 2)
def test1SelectionByStartEnd(self):
self.ed.SetSelectionNStart(0, 2)
self.ed.SetSelectionNEnd(0, 3)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 2)
self.assertEquals(self.ed.GetSelectionNCaret(0), 3)
self.assertEquals(self.ed.GetSelectionNStart(0), 2)
self.assertEquals(self.ed.GetSelectionNEnd(0), 3)
def test2Selections(self):
self.ed.SetSelection(1, 2)
self.ed.AddSelection(4, 5)
self.assertEquals(self.ed.Selections, 2)
self.assertEquals(self.ed.MainSelection, 1)
self.assertEquals(self.ed.GetSelectionNCaret(0), 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 2)
self.assertEquals(self.ed.GetSelectionNCaret(1), 4)
self.assertEquals(self.ed.GetSelectionNAnchor(1), 5)
self.assertEquals(self.ed.GetSelectionNStart(0), 1)
self.assertEquals(self.ed.GetSelectionNEnd(0), 2)
self.ed.MainSelection = 0
self.assertEquals(self.ed.MainSelection, 0)
self.ed.RotateSelection()
self.assertEquals(self.ed.MainSelection, 1)
def testRectangularSelection(self):
self.ed.RectangularSelectionAnchor = 1
self.assertEquals(self.ed.RectangularSelectionAnchor, 1)
self.ed.RectangularSelectionCaret = 10
self.assertEquals(self.ed.RectangularSelectionCaret, 10)
self.assertEquals(self.ed.Selections, 3)
self.assertEquals(self.ed.MainSelection, 2)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 1)
self.assertEquals(self.ed.GetSelectionNCaret(0), 2)
self.assertEquals(self.ed.GetSelectionNAnchor(1), 5)
self.assertEquals(self.ed.GetSelectionNCaret(1), 6)
self.assertEquals(self.ed.GetSelectionNAnchor(2), 9)
self.assertEquals(self.ed.GetSelectionNCaret(2), 10)
def testVirtualSpace(self):
self.ed.SetSelection(3, 7)
self.ed.SetSelectionNCaretVirtualSpace(0, 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 3)
self.ed.SetSelectionNAnchorVirtualSpace(0, 2)
self.assertEquals(self.ed.GetSelectionNAnchorVirtualSpace(0), 2)
# Does not check that virtual space is valid by being at end of line
self.ed.SetSelection(1, 1)
self.ed.SetSelectionNCaretVirtualSpace(0, 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 3)
def testRectangularVirtualSpace(self):
self.ed.VirtualSpaceOptions=1
self.ed.RectangularSelectionAnchor = 3
self.assertEquals(self.ed.RectangularSelectionAnchor, 3)
self.ed.RectangularSelectionCaret = 7
self.assertEquals(self.ed.RectangularSelectionCaret, 7)
self.ed.RectangularSelectionAnchorVirtualSpace = 1
self.assertEquals(self.ed.RectangularSelectionAnchorVirtualSpace, 1)
self.ed.RectangularSelectionCaretVirtualSpace = 10
self.assertEquals(self.ed.RectangularSelectionCaretVirtualSpace, 10)
self.assertEquals(self.ed.Selections, 2)
self.assertEquals(self.ed.MainSelection, 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 3)
self.assertEquals(self.ed.GetSelectionNAnchorVirtualSpace(0), 1)
self.assertEquals(self.ed.GetSelectionNCaret(0), 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 10)
def testRectangularVirtualSpaceOptionOff(self):
# Same as previous test but virtual space option off so no virtual space in result
self.ed.VirtualSpaceOptions=0
self.ed.RectangularSelectionAnchor = 3
self.assertEquals(self.ed.RectangularSelectionAnchor, 3)
self.ed.RectangularSelectionCaret = 7
self.assertEquals(self.ed.RectangularSelectionCaret, 7)
self.ed.RectangularSelectionAnchorVirtualSpace = 1
self.assertEquals(self.ed.RectangularSelectionAnchorVirtualSpace, 1)
self.ed.RectangularSelectionCaretVirtualSpace = 10
self.assertEquals(self.ed.RectangularSelectionCaretVirtualSpace, 10)
self.assertEquals(self.ed.Selections, 2)
self.assertEquals(self.ed.MainSelection, 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 3)
self.assertEquals(self.ed.GetSelectionNAnchorVirtualSpace(0), 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 0)
def testDropSelectionN(self):
self.ed.SetSelection(1, 2)
# Only one so dropping has no effect
self.ed.DropSelectionN(0)
self.assertEquals(self.ed.Selections, 1)
self.ed.AddSelection(4, 5)
self.assertEquals(self.ed.Selections, 2)
# Outside bounds so no effect
self.ed.DropSelectionN(2)
self.assertEquals(self.ed.Selections, 2)
# Dropping before main so main decreases
self.ed.DropSelectionN(0)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 4)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 5)
self.ed.AddSelection(10, 11)
self.ed.AddSelection(20, 21)
self.assertEquals(self.ed.Selections, 3)
self.assertEquals(self.ed.MainSelection, 2)
self.ed.MainSelection = 1
# Dropping after main so main does not change
self.ed.DropSelectionN(2)
self.assertEquals(self.ed.MainSelection, 1)
# Dropping first selection so wraps around to new last.
self.ed.AddSelection(30, 31)
self.ed.AddSelection(40, 41)
self.assertEquals(self.ed.Selections, 4)
self.ed.MainSelection = 0
self.ed.DropSelectionN(0)
self.assertEquals(self.ed.MainSelection, 2)
class TestStyleAttributes(unittest.TestCase):
""" These tests are just to ensure that the calls set and retrieve values.
They do not check the visual appearance of the style attributes.
"""
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.testColour = 0x171615
self.testFont = b"Georgia"
def tearDown(self):
self.ed.StyleResetDefault()
def testFont(self):
self.ed.StyleSetFont(self.ed.STYLE_DEFAULT, self.testFont)
self.assertEquals(self.ed.StyleGetFont(self.ed.STYLE_DEFAULT), self.testFont)
def testSize(self):
self.ed.StyleSetSize(self.ed.STYLE_DEFAULT, 12)
self.assertEquals(self.ed.StyleGetSize(self.ed.STYLE_DEFAULT), 12)
self.assertEquals(self.ed.StyleGetSizeFractional(self.ed.STYLE_DEFAULT), 12*self.ed.SC_FONT_SIZE_MULTIPLIER)
self.ed.StyleSetSizeFractional(self.ed.STYLE_DEFAULT, 1234)
self.assertEquals(self.ed.StyleGetSizeFractional(self.ed.STYLE_DEFAULT), 1234)
def testBold(self):
self.ed.StyleSetBold(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetBold(self.ed.STYLE_DEFAULT), 1)
self.assertEquals(self.ed.StyleGetWeight(self.ed.STYLE_DEFAULT), self.ed.SC_WEIGHT_BOLD)
self.ed.StyleSetWeight(self.ed.STYLE_DEFAULT, 530)
self.assertEquals(self.ed.StyleGetWeight(self.ed.STYLE_DEFAULT), 530)
def testItalic(self):
self.ed.StyleSetItalic(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetItalic(self.ed.STYLE_DEFAULT), 1)
def testUnderline(self):
self.assertEquals(self.ed.StyleGetUnderline(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetUnderline(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetUnderline(self.ed.STYLE_DEFAULT), 1)
def testFore(self):
self.assertEquals(self.ed.StyleGetFore(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetFore(self.ed.STYLE_DEFAULT, self.testColour)
self.assertEquals(self.ed.StyleGetFore(self.ed.STYLE_DEFAULT), self.testColour)
def testBack(self):
self.assertEquals(self.ed.StyleGetBack(self.ed.STYLE_DEFAULT), 0xffffff)
self.ed.StyleSetBack(self.ed.STYLE_DEFAULT, self.testColour)
self.assertEquals(self.ed.StyleGetBack(self.ed.STYLE_DEFAULT), self.testColour)
def testEOLFilled(self):
self.assertEquals(self.ed.StyleGetEOLFilled(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetEOLFilled(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetEOLFilled(self.ed.STYLE_DEFAULT), 1)
def testCharacterSet(self):
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_RUSSIAN)
self.assertEquals(self.ed.StyleGetCharacterSet(self.ed.STYLE_DEFAULT), self.ed.SC_CHARSET_RUSSIAN)
def testCase(self):
self.assertEquals(self.ed.StyleGetCase(self.ed.STYLE_DEFAULT), self.ed.SC_CASE_MIXED)
self.ed.StyleSetCase(self.ed.STYLE_DEFAULT, self.ed.SC_CASE_UPPER)
self.assertEquals(self.ed.StyleGetCase(self.ed.STYLE_DEFAULT), self.ed.SC_CASE_UPPER)
self.ed.StyleSetCase(self.ed.STYLE_DEFAULT, self.ed.SC_CASE_LOWER)
self.assertEquals(self.ed.StyleGetCase(self.ed.STYLE_DEFAULT), self.ed.SC_CASE_LOWER)
def testVisible(self):
self.assertEquals(self.ed.StyleGetVisible(self.ed.STYLE_DEFAULT), 1)
self.ed.StyleSetVisible(self.ed.STYLE_DEFAULT, 0)
self.assertEquals(self.ed.StyleGetVisible(self.ed.STYLE_DEFAULT), 0)
def testChangeable(self):
self.assertEquals(self.ed.StyleGetChangeable(self.ed.STYLE_DEFAULT), 1)
self.ed.StyleSetChangeable(self.ed.STYLE_DEFAULT, 0)
self.assertEquals(self.ed.StyleGetChangeable(self.ed.STYLE_DEFAULT), 0)
def testHotSpot(self):
self.assertEquals(self.ed.StyleGetHotSpot(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetHotSpot(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetHotSpot(self.ed.STYLE_DEFAULT), 1)
class TestCharacterNavigation(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.SetCodePage(65001)
def tearDown(self):
self.ed.SetCodePage(0)
def testBeforeAfter(self):
t = "aåflﬔ-"
tv = t.encode("UTF-8")
self.ed.SetContents(tv)
pos = 0
for i in range(len(t)-1):
after = self.ed.PositionAfter(pos)
self.assert_(after > i)
back = self.ed.PositionBefore(after)
self.assertEquals(pos, back)
pos = after
def testRelative(self):
# \x61 \xc3\xa5 \xef\xac\x82 \xef\xac\x94 \x2d
t = "aåflﬔ-"
tv = t.encode("UTF-8")
self.ed.SetContents(tv)
self.assertEquals(self.ed.PositionRelative(1, 2), 6)
self.assertEquals(self.ed.PositionRelative(6, -2), 1)
pos = 0
previous = 0
for i in range(1, len(t)):
after = self.ed.PositionRelative(pos, i)
self.assert_(after > pos)
self.assert_(after > previous)
previous = after
pos = len(t)
previous = pos
for i in range(1, len(t)-1):
after = self.ed.PositionRelative(pos, -i)
self.assert_(after < pos)
self.assert_(after < previous)
previous = after
class TestCaseMapping(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def tearDown(self):
self.ed.SetCodePage(0)
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_DEFAULT)
def testEmpty(self):
# Trying to upper case an empty string caused a crash at one stage
t = b"x"
self.ed.SetContents(t)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), b"x")
def testASCII(self):
t = b"x"
self.ed.SetContents(t)
self.ed.SetSel(0,1)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), b"X")
def testLatin1(self):
t = "å".encode("Latin-1")
r = "Å".encode("Latin-1")
self.ed.SetContents(t)
self.ed.SetSel(0,1)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), r)
def testRussian(self):
if sys.platform == "win32":
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_RUSSIAN)
else:
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_CYRILLIC)
t = "Б".encode("Windows-1251")
r = "б".encode("Windows-1251")
self.ed.SetContents(t)
self.ed.SetSel(0,1)
self.ed.LowerCase()
self.assertEquals(self.ed.Contents(), r)
def testUTF(self):
self.ed.SetCodePage(65001)
t = "å".encode("UTF-8")
r = "Å".encode("UTF-8")
self.ed.SetContents(t)
self.ed.SetSel(0,2)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), r)
def testUTFDifferentLength(self):
self.ed.SetCodePage(65001)
t = "ı".encode("UTF-8")
r = "I".encode("UTF-8")
self.ed.SetContents(t)
self.assertEquals(self.ed.Length, 2)
self.ed.SetSel(0,2)
self.ed.UpperCase()
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.Contents(), r)
def testUTFGrows(self):
# This crashed at one point in debug builds due to looking past end of shorter string
self.ed.SetCodePage(65001)
# ﬖ is a single character ligature taking 3 bytes in UTF8: EF AC 96
t = 'ﬖﬖ'.encode("UTF-8")
self.ed.SetContents(t)
self.assertEquals(self.ed.Length, 6)
self.ed.SetSel(0,self.ed.Length)
self.ed.UpperCase()
# To convert to upper case the ligature is separated into վ and ն then uppercased to Վ and Ն
# each of which takes 2 bytes in UTF-8: D5 8E D5 86
r = 'ՎՆՎՆ'.encode("UTF-8")
self.assertEquals(self.ed.Length, 8)
self.assertEquals(self.ed.Contents(), r)
self.assertEquals(self.ed.SelectionEnd, self.ed.Length)
def testUTFShrinks(self):
self.ed.SetCodePage(65001)
# fi is a single character ligature taking 3 bytes in UTF8: EF AC 81
t = 'fifi'.encode("UTF-8")
self.ed.SetContents(t)
self.assertEquals(self.ed.Length, 6)
self.ed.SetSel(0,self.ed.Length)
self.ed.UpperCase()
# To convert to upper case the ligature is separated into f and i then uppercased to F and I
# each of which takes 1 byte in UTF-8: 46 49
r = 'FIFI'.encode("UTF-8")
self.assertEquals(self.ed.Length, 4)
self.assertEquals(self.ed.Contents(), r)
self.assertEquals(self.ed.SelectionEnd, self.ed.Length)
class TestCaseInsensitiveSearch(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def tearDown(self):
self.ed.SetCodePage(0)
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_DEFAULT)
def testEmpty(self):
text = b" x X"
searchString = b""
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(0, pos)
def testASCII(self):
text = b" x X"
searchString = b"X"
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(1, pos)
def testLatin1(self):
text = "Frånd Åå".encode("Latin-1")
searchString = "Å".encode("Latin-1")
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(2, pos)
def testRussian(self):
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_RUSSIAN)
text = "=(Б tex б)".encode("Windows-1251")
searchString = "б".encode("Windows-1251")
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(2, pos)
def testUTF(self):
self.ed.SetCodePage(65001)
text = "Frånd Åå".encode("UTF-8")
searchString = "Å".encode("UTF-8")
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(2, pos)
def testUTFDifferentLength(self):
# Searching for a two byte string finds a single byte
self.ed.SetCodePage(65001)
# two byte string "ſ" single byte "s"
text = "Frånds Ååſ $".encode("UTF-8")
searchString = "ſ".encode("UTF-8")
firstPosition = len("Frånd".encode("UTF-8"))
self.assertEquals(len(searchString), 2)
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(firstPosition, pos)
self.assertEquals(firstPosition+1, self.ed.TargetEnd)
class TestLexer(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testLexerNumber(self):
self.ed.Lexer = self.ed.SCLEX_CPP
self.assertEquals(self.ed.GetLexer(), self.ed.SCLEX_CPP)
def testLexerName(self):
self.ed.LexerLanguage = b"cpp"
self.assertEquals(self.ed.GetLexer(), self.ed.SCLEX_CPP)
name = self.ed.GetLexerLanguage(0)
self.assertEquals(name, b"cpp")
def testPropertyNames(self):
propertyNames = self.ed.PropertyNames()
self.assertNotEquals(propertyNames, b"")
# The cpp lexer has a boolean property named lexer.cpp.allow.dollars
propNameDollars = b"lexer.cpp.allow.dollars"
propertyType = self.ed.PropertyType(propNameDollars)
self.assertEquals(propertyType, self.ed.SC_TYPE_BOOLEAN)
propertyDescription = self.ed.DescribeProperty(propNameDollars)
self.assertNotEquals(propertyDescription, b"")
def testWordListDescriptions(self):
wordSet = self.ed.DescribeKeyWordSets()
self.assertNotEquals(wordSet, b"")
class TestSubStyles(unittest.TestCase):
''' These tests include knowledge of the current implementation in the cpp lexer
and may have to change when that implementation changes.
Currently supports subStyles for IDENTIFIER 11 and COMMENTDOCKEYWORD 17 '''
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testInfo(self):
self.ed.Lexer = self.ed.SCLEX_CPP
bases = self.ed.GetSubStyleBases()
self.assertEquals(bases, b"\x0b\x11") # 11, 17
self.assertEquals(self.ed.DistanceToSecondaryStyles(), 0x40)
def testAllocate(self):
firstSubStyle = 0x80 # Current implementation
self.ed.Lexer = self.ed.SCLEX_CPP
self.assertEquals(self.ed.GetStyleFromSubStyle(firstSubStyle), firstSubStyle)
self.assertEquals(self.ed.GetSubStylesStart(self.ed.SCE_C_IDENTIFIER), 0)
self.assertEquals(self.ed.GetSubStylesLength(self.ed.SCE_C_IDENTIFIER), 0)
numSubStyles = 5
subs = self.ed.AllocateSubStyles(self.ed.SCE_C_IDENTIFIER, numSubStyles)
self.assertEquals(subs, firstSubStyle)
self.assertEquals(self.ed.GetSubStylesStart(self.ed.SCE_C_IDENTIFIER), firstSubStyle)
self.assertEquals(self.ed.GetSubStylesLength(self.ed.SCE_C_IDENTIFIER), numSubStyles)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs), self.ed.SCE_C_IDENTIFIER)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs+numSubStyles-1), self.ed.SCE_C_IDENTIFIER)
self.assertEquals(self.ed.GetStyleFromSubStyle(self.ed.SCE_C_IDENTIFIER), self.ed.SCE_C_IDENTIFIER)
# Now free and check same as start
self.ed.FreeSubStyles()
self.assertEquals(self.ed.GetStyleFromSubStyle(subs), subs)
self.assertEquals(self.ed.GetSubStylesStart(self.ed.SCE_C_IDENTIFIER), 0)
self.assertEquals(self.ed.GetSubStylesLength(self.ed.SCE_C_IDENTIFIER), 0)
def testInactive(self):
firstSubStyle = 0x80 # Current implementation
inactiveDistance = self.ed.DistanceToSecondaryStyles()
self.ed.Lexer = self.ed.SCLEX_CPP
numSubStyles = 5
subs = self.ed.AllocateSubStyles(self.ed.SCE_C_IDENTIFIER, numSubStyles)
self.assertEquals(subs, firstSubStyle)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs), self.ed.SCE_C_IDENTIFIER)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs+inactiveDistance), self.ed.SCE_C_IDENTIFIER+inactiveDistance)
self.ed.FreeSubStyles()
def testSecondary(self):
inactiveDistance = self.ed.DistanceToSecondaryStyles()
self.assertEquals(self.ed.GetPrimaryStyleFromStyle(self.ed.SCE_C_IDENTIFIER+inactiveDistance), self.ed.SCE_C_IDENTIFIER)
class TestCallTip(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 1 line of 4 characters
t = b"fun("
self.ed.AddText(len(t), t)
def testBasics(self):
self.assertEquals(self.ed.CallTipActive(), 0)
self.ed.CallTipShow(1, "fun(int x)")
self.assertEquals(self.ed.CallTipActive(), 1)
self.assertEquals(self.ed.CallTipPosStart(), 4)
self.ed.CallTipSetPosStart(1)
self.assertEquals(self.ed.CallTipPosStart(), 1)
self.ed.CallTipCancel()
self.assertEquals(self.ed.CallTipActive(), 0)
class TestAutoComplete(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 1 line of 3 characters
t = b"xxx\n"
self.ed.AddText(len(t), t)
def testDefaults(self):
self.assertEquals(self.ed.AutoCGetSeparator(), ord(' '))
self.assertEquals(self.ed.AutoCGetMaxHeight(), 5)
self.assertEquals(self.ed.AutoCGetMaxWidth(), 0)
self.assertEquals(self.ed.AutoCGetTypeSeparator(), ord('?'))
self.assertEquals(self.ed.AutoCGetIgnoreCase(), 0)
self.assertEquals(self.ed.AutoCGetAutoHide(), 1)
self.assertEquals(self.ed.AutoCGetDropRestOfWord(), 0)
def testChangeDefaults(self):
self.ed.AutoCSetSeparator(ord('-'))
self.assertEquals(self.ed.AutoCGetSeparator(), ord('-'))
self.ed.AutoCSetSeparator(ord(' '))
self.ed.AutoCSetMaxHeight(100)
self.assertEquals(self.ed.AutoCGetMaxHeight(), 100)
self.ed.AutoCSetMaxHeight(5)
self.ed.AutoCSetMaxWidth(100)
self.assertEquals(self.ed.AutoCGetMaxWidth(), 100)
self.ed.AutoCSetMaxWidth(0)
self.ed.AutoCSetTypeSeparator(ord('@'))
self.assertEquals(self.ed.AutoCGetTypeSeparator(), ord('@'))
self.ed.AutoCSetTypeSeparator(ord('?'))
self.ed.AutoCSetIgnoreCase(1)
self.assertEquals(self.ed.AutoCGetIgnoreCase(), 1)
self.ed.AutoCSetIgnoreCase(0)
self.ed.AutoCSetAutoHide(0)
self.assertEquals(self.ed.AutoCGetAutoHide(), 0)
self.ed.AutoCSetAutoHide(1)
self.ed.AutoCSetDropRestOfWord(1)
self.assertEquals(self.ed.AutoCGetDropRestOfWord(), 1)
self.ed.AutoCSetDropRestOfWord(0)
def testAutoShow(self):
self.assertEquals(self.ed.AutoCActive(), 0)
self.ed.SetSel(0, 0)
self.ed.AutoCShow(0, b"za defn ghi")
self.assertEquals(self.ed.AutoCActive(), 1)
#~ time.sleep(2)
self.assertEquals(self.ed.AutoCPosStart(), 0)
self.assertEquals(self.ed.AutoCGetCurrent(), 0)
t = self.ed.AutoCGetCurrentText(5)
#~ self.assertEquals(l, 3)
self.assertEquals(t, b"za")
self.ed.AutoCCancel()
self.assertEquals(self.ed.AutoCActive(), 0)
def testAutoShowComplete(self):
self.assertEquals(self.ed.AutoCActive(), 0)
self.ed.SetSel(0, 0)
self.ed.AutoCShow(0, b"za defn ghi")
self.ed.AutoCComplete()
self.assertEquals(self.ed.Contents(), b"zaxxx\n")
self.assertEquals(self.ed.AutoCActive(), 0)
def testAutoShowSelect(self):
self.assertEquals(self.ed.AutoCActive(), 0)
self.ed.SetSel(0, 0)
self.ed.AutoCShow(0, b"za defn ghi")
self.ed.AutoCSelect(0, b"d")
self.ed.AutoCComplete()
self.assertEquals(self.ed.Contents(), b"defnxxx\n")
self.assertEquals(self.ed.AutoCActive(), 0)
def testWriteOnly(self):
""" Checks that setting attributes doesn't crash or change tested behaviour
but does not check that the changed attributes are effective. """
self.ed.AutoCStops(0, b"abcde")
self.ed.AutoCSetFillUps(0, b"1234")
class TestDirectAccess(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testGapPosition(self):
text = b"abcd"
self.ed.SetContents(text)
self.assertEquals(self.ed.GapPosition, 4)
self.ed.TargetStart = 1
self.ed.TargetEnd = 1
rep = b"-"
self.ed.ReplaceTarget(len(rep), rep)
self.assertEquals(self.ed.GapPosition, 2)
def testCharacterPointerAndRangePointer(self):
text = b"abcd"
self.ed.SetContents(text)
characterPointer = self.ed.CharacterPointer
rangePointer = self.ed.GetRangePointer(0,3)
self.assertEquals(characterPointer, rangePointer)
cpBuffer = ctypes.c_char_p(characterPointer)
self.assertEquals(cpBuffer.value, text)
# Gap will not be moved as already moved for CharacterPointer call
rangePointer = self.ed.GetRangePointer(1,3)
cpBuffer = ctypes.c_char_p(rangePointer)
self.assertEquals(cpBuffer.value, text[1:])
class TestWordChars(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def tearDown(self):
self.ed.SetCharsDefault()
def _setChars(self, charClass, chars):
""" Wrapper to call self.ed.Set*Chars with the right type
@param charClass {str} the character class, "word", "space", etc.
@param chars {iterable of int} characters to set
"""
if sys.version_info.major == 2:
# Python 2, use latin-1 encoded str
unichars = (unichr(x) for x in chars if x != 0)
# can't use literal u"", that's a syntax error in Py3k
# uncode() doesn't exist in Py3k, but we never run it there
result = unicode("").join(unichars).encode("latin-1")
else:
# Python 3, use bytes()
result = bytes(x for x in chars if x != 0)
meth = getattr(self.ed, "Set%sChars" % (charClass.capitalize()))
return meth(None, result)
def assertCharSetsEqual(self, first, second, *args, **kwargs):
""" Assert that the two character sets are equal.
If either set are an iterable of numbers, convert them to chars
first. """
first_set = set()
for c in first:
first_set.add(chr(c) if isinstance(c, int) else c)
second_set = set()
for c in second:
second_set.add(chr(c) if isinstance(c, int) else c)
return self.assertEqual(first_set, second_set, *args, **kwargs)
def testDefaultWordChars(self):
# check that the default word chars are as expected
import string
data = self.ed.GetWordChars(None)
expected = set(string.digits + string.ascii_letters + '_') | \
set(chr(x) for x in range(0x80, 0x100))
self.assertCharSetsEqual(data, expected)
def testDefaultWhitespaceChars(self):
# check that the default whitespace chars are as expected
import string
data = self.ed.GetWhitespaceChars(None)
expected = (set(chr(x) for x in (range(0, 0x20))) | set(' ')) - \
set(['\r', '\n'])
self.assertCharSetsEqual(data, expected)
def testDefaultPunctuationChars(self):
# check that the default punctuation chars are as expected
import string
data = self.ed.GetPunctuationChars(None)
expected = set(chr(x) for x in range(0x20, 0x80)) - \
set(string.ascii_letters + string.digits + "\r\n_ ")
self.assertCharSetsEqual(data, expected)
def testCustomWordChars(self):
# check that setting things to whitespace chars makes them not words
self._setChars("whitespace", range(1, 0x100))
data = self.ed.GetWordChars(None)
expected = set()
self.assertCharSetsEqual(data, expected)
# and now set something to make sure that works too
expected = set(range(1, 0x100, 2))
self._setChars("word", expected)
data = self.ed.GetWordChars(None)
self.assertCharSetsEqual(data, expected)
def testCustomWhitespaceChars(self):
# check setting whitespace chars to non-default values
self._setChars("word", range(1, 0x100))
# we can't change chr(0) from being anything but whitespace
expected = set([0])
data = self.ed.GetWhitespaceChars(None)
self.assertCharSetsEqual(data, expected)
# now try to set it to something custom
expected = set(range(1, 0x100, 2)) | set([0])
self._setChars("whitespace", expected)
data = self.ed.GetWhitespaceChars(None)
self.assertCharSetsEqual(data, expected)
def testCustomPunctuationChars(self):
# check setting punctuation chars to non-default values
self._setChars("word", range(1, 0x100))
expected = set()
data = self.ed.GetPunctuationChars(0)
self.assertEquals(set(data), expected)
# now try to set it to something custom
expected = set(range(1, 0x100, 1))
self._setChars("punctuation", expected)
data = self.ed.GetPunctuationChars(None)
self.assertCharSetsEqual(data, expected)
class TestExplicitTabStops(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 2 lines of 4 characters
self.t = b"fun(\nint)"
self.ed.AddText(len(self.t), self.t)
def testAddingAndClearing(self):
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
# Add a tab stop at 7
self.ed.AddTabStop(0, 7)
# Check added
self.assertEquals(self.ed.GetNextTabStop(0,0), 7)
# Check does not affect line 1
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
# Add a tab stop at 18
self.ed.AddTabStop(0, 18)
# Check added
self.assertEquals(self.ed.GetNextTabStop(0,0), 7)
self.assertEquals(self.ed.GetNextTabStop(0,7), 18)
# Check does not affect line 1
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,7), 0)
# Add a tab stop between others at 13
self.ed.AddTabStop(0, 13)
# Check added
self.assertEquals(self.ed.GetNextTabStop(0,0), 7)
self.assertEquals(self.ed.GetNextTabStop(0,7), 13)
self.assertEquals(self.ed.GetNextTabStop(0,13), 18)
# Check does not affect line 1
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,7), 0)
self.ed.ClearTabStops(0)
# Check back to original state
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
def testLineInsertionDeletion(self):
# Add a tab stop at 7 on line 1
self.ed.AddTabStop(1, 7)
# Check added
self.assertEquals(self.ed.GetNextTabStop(1,0), 7)
# More text at end
self.ed.AddText(len(self.t), self.t)
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,0), 7)
self.assertEquals(self.ed.GetNextTabStop(2,0), 0)
self.assertEquals(self.ed.GetNextTabStop(3,0), 0)
# Another 2 lines before explicit line moves the explicit tab stop
data = b"x\ny\n"
self.ed.InsertText(4, data)
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
self.assertEquals(self.ed.GetNextTabStop(2,0), 0)
self.assertEquals(self.ed.GetNextTabStop(3,0), 7)
self.assertEquals(self.ed.GetNextTabStop(4,0), 0)
self.assertEquals(self.ed.GetNextTabStop(5,0), 0)
# Undo moves the explicit tab stop back
self.ed.Undo()
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,0), 7)
self.assertEquals(self.ed.GetNextTabStop(2,0), 0)
self.assertEquals(self.ed.GetNextTabStop(3,0), 0)
if __name__ == '__main__':
uu = Xite.main("simpleTests")
#~ for x in sorted(uu.keys()):
#~ print(x, uu[x])
#~ print()
| isc |
qedsoftware/commcare-hq | corehq/apps/app_manager/tests/test_translations.py | 1 | 2099 | # coding=utf-8
import os
from lxml import etree
from django.test import TestCase
from corehq.apps.app_manager.models import Application
from corehq.apps.app_manager.tests.util import SuiteMixin
from corehq.apps.app_manager.translations import escape_output_value
import commcare_translations
class AppManagerTranslationsTest(TestCase, SuiteMixin):
root = os.path.dirname(__file__)
file_path = ('data', 'suite')
def test_escape_output_value(self):
test_cases = [
('hello', '<value>hello</value>'),
('abc < def > abc', '<value>abc < def > abc</value>'),
("bee's knees", "<value>bee's knees</value>"),
('unfortunate <xml expression', '<value>unfortunate <xml expression</value>'),
(u'क्लिक', '<value>क्लिक</value>'),
(''', '<value>&#39</value>'),
('question1 is <output value="/data/question1" vellum:value="#form/question1"/> !',
'<value>question1 is <output value="/data/question1" vellum:value="#form/question1"/> !</value>'),
('Here is a ref <output value="/data/no_media"/> with some "trailing" text & that\'s some bad < xml.',
'<value>Here is a ref <output value="/data/no_media"/> with some "trailing" text & that\'s some bad < xml.</value>')
]
for input, expected_output in test_cases:
self.assertEqual(expected_output, etree.tostring(escape_output_value(input)))
def test_language_names(self):
app_json = self.get_json('app')
app_json['langs'] = ['en', 'fra', 'hin', 'pol']
app = Application.wrap(app_json)
app.create_suite()
app_strings = app.create_app_strings('default')
app_strings_dict = commcare_translations.loads(app_strings)
self.assertEqual(app_strings_dict['en'], 'English')
self.assertEqual(app_strings_dict['fra'], u'Français')
self.assertEqual(app_strings_dict['hin'], u'हिंदी')
self.assertEqual(app_strings_dict['pol'], 'polski')
| bsd-3-clause |
uannight/reposan | plugin.video.tvalacarta/lib/youtube_dl/extractor/swrmediathek.py | 64 | 4375 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
determine_protocol,
)
class SWRMediathekIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?swrmediathek\.de/(?:content/)?player\.htm\?show=(?P<id>[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})'
_TESTS = [{
'url': 'http://swrmediathek.de/player.htm?show=849790d0-dab8-11e3-a953-0026b975f2e6',
'md5': '8c5f6f0172753368547ca8413a7768ac',
'info_dict': {
'id': '849790d0-dab8-11e3-a953-0026b975f2e6',
'ext': 'mp4',
'title': 'SWR odysso',
'description': 'md5:2012e31baad36162e97ce9eb3f157b8a',
'thumbnail': r're:^http:.*\.jpg$',
'duration': 2602,
'upload_date': '20140515',
'uploader': 'SWR Fernsehen',
'uploader_id': '990030',
},
}, {
'url': 'http://swrmediathek.de/player.htm?show=0e1a8510-ddf2-11e3-9be3-0026b975f2e6',
'md5': 'b10ab854f912eecc5a6b55cd6fc1f545',
'info_dict': {
'id': '0e1a8510-ddf2-11e3-9be3-0026b975f2e6',
'ext': 'mp4',
'title': 'Nachtcafé - Alltagsdroge Alkohol - zwischen Sektempfang und Komasaufen',
'description': 'md5:e0a3adc17e47db2c23aab9ebc36dbee2',
'thumbnail': r're:http://.*\.jpg',
'duration': 5305,
'upload_date': '20140516',
'uploader': 'SWR Fernsehen',
'uploader_id': '990030',
},
'skip': 'redirect to http://swrmediathek.de/index.htm?hinweis=swrlink',
}, {
'url': 'http://swrmediathek.de/player.htm?show=bba23e10-cb93-11e3-bf7f-0026b975f2e6',
'md5': '4382e4ef2c9d7ce6852535fa867a0dd3',
'info_dict': {
'id': 'bba23e10-cb93-11e3-bf7f-0026b975f2e6',
'ext': 'mp3',
'title': 'Saša Stanišic: Vor dem Fest',
'description': 'md5:5b792387dc3fbb171eb709060654e8c9',
'thumbnail': r're:http://.*\.jpg',
'duration': 3366,
'upload_date': '20140520',
'uploader': 'SWR 2',
'uploader_id': '284670',
},
'skip': 'redirect to http://swrmediathek.de/index.htm?hinweis=swrlink',
}]
def _real_extract(self, url):
video_id = self._match_id(url)
video = self._download_json(
'http://swrmediathek.de/AjaxEntry?ekey=%s' % video_id,
video_id, 'Downloading video JSON')
attr = video['attr']
title = attr['entry_title']
media_type = attr.get('entry_etype')
formats = []
for entry in video.get('sub', []):
if entry.get('name') != 'entry_media':
continue
entry_attr = entry.get('attr', {})
f_url = entry_attr.get('val2')
if not f_url:
continue
codec = entry_attr.get('val0')
if codec == 'm3u8':
formats.extend(self._extract_m3u8_formats(
f_url, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
elif codec == 'f4m':
formats.extend(self._extract_f4m_formats(
f_url + '?hdcore=3.7.0', video_id,
f4m_id='hds', fatal=False))
else:
formats.append({
'format_id': determine_protocol({'url': f_url}),
'url': f_url,
'quality': int_or_none(entry_attr.get('val1')),
'vcodec': codec if media_type == 'Video' else 'none',
'acodec': codec if media_type == 'Audio' else None,
})
self._sort_formats(formats)
upload_date = None
entry_pdatet = attr.get('entry_pdatet')
if entry_pdatet:
upload_date = entry_pdatet[:-4]
return {
'id': video_id,
'title': title,
'description': attr.get('entry_descl'),
'thumbnail': attr.get('entry_image_16_9'),
'duration': parse_duration(attr.get('entry_durat')),
'upload_date': upload_date,
'uploader': attr.get('channel_title'),
'uploader_id': attr.get('channel_idkey'),
'formats': formats,
}
| gpl-2.0 |
HydrelioxGitHub/home-assistant | tests/components/sensor/test_rest.py | 2 | 12631 | """The tests for the REST sensor platform."""
import unittest
from pytest import raises
from unittest.mock import patch, Mock
import requests
from requests.exceptions import Timeout, MissingSchema, RequestException
import requests_mock
from homeassistant.exceptions import PlatformNotReady
from homeassistant.setup import setup_component
import homeassistant.components.sensor as sensor
import homeassistant.components.sensor.rest as rest
from homeassistant.helpers.config_validation import template
from tests.common import get_test_home_assistant, assert_setup_component
import pytest
class TestRestSensorSetup(unittest.TestCase):
"""Tests for setting up the REST sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_missing_config(self):
"""Test setup with configuration missing required entries."""
with assert_setup_component(0):
assert setup_component(self.hass, sensor.DOMAIN, {
'sensor': {'platform': 'rest'}})
def test_setup_missing_schema(self):
"""Test setup with resource missing schema."""
with pytest.raises(MissingSchema):
rest.setup_platform(self.hass, {
'platform': 'rest',
'resource': 'localhost',
'method': 'GET'
}, None)
@patch('requests.Session.send',
side_effect=requests.exceptions.ConnectionError())
def test_setup_failed_connect(self, mock_req):
"""Test setup when connection error occurs."""
with raises(PlatformNotReady):
rest.setup_platform(self.hass, {
'platform': 'rest',
'resource': 'http://localhost',
}, lambda devices, update=True: None)
@patch('requests.Session.send', side_effect=Timeout())
def test_setup_timeout(self, mock_req):
"""Test setup when connection timeout occurs."""
with raises(PlatformNotReady):
rest.setup_platform(self.hass, {
'platform': 'rest',
'resource': 'http://localhost',
}, lambda devices, update=True: None)
@requests_mock.Mocker()
def test_setup_minimum(self, mock_req):
"""Test setup with minimum configuration."""
mock_req.get('http://localhost', status_code=200)
with assert_setup_component(1, 'sensor'):
assert setup_component(self.hass, 'sensor', {
'sensor': {
'platform': 'rest',
'resource': 'http://localhost'
}
})
assert 2 == mock_req.call_count
@requests_mock.Mocker()
def test_setup_get(self, mock_req):
"""Test setup with valid configuration."""
mock_req.get('http://localhost', status_code=200)
with assert_setup_component(1, 'sensor'):
assert setup_component(self.hass, 'sensor', {
'sensor': {
'platform': 'rest',
'resource': 'http://localhost',
'method': 'GET',
'value_template': '{{ value_json.key }}',
'name': 'foo',
'unit_of_measurement': 'MB',
'verify_ssl': 'true',
'timeout': 30,
'authentication': 'basic',
'username': 'my username',
'password': 'my password',
'headers': {'Accept': 'application/json'}
}
})
assert 2 == mock_req.call_count
@requests_mock.Mocker()
def test_setup_post(self, mock_req):
"""Test setup with valid configuration."""
mock_req.post('http://localhost', status_code=200)
with assert_setup_component(1, 'sensor'):
assert setup_component(self.hass, 'sensor', {
'sensor': {
'platform': 'rest',
'resource': 'http://localhost',
'method': 'POST',
'value_template': '{{ value_json.key }}',
'payload': '{ "device": "toaster"}',
'name': 'foo',
'unit_of_measurement': 'MB',
'verify_ssl': 'true',
'timeout': 30,
'authentication': 'basic',
'username': 'my username',
'password': 'my password',
'headers': {'Accept': 'application/json'}
}
})
assert 2 == mock_req.call_count
class TestRestSensor(unittest.TestCase):
"""Tests for REST sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.initial_state = 'initial_state'
self.rest = Mock('rest.RestData')
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "' + self.initial_state + '" }'))
self.name = 'foo'
self.unit_of_measurement = 'MB'
self.device_class = None
self.value_template = template('{{ value_json.key }}')
self.value_template.hass = self.hass
self.force_update = False
self.sensor = rest.RestSensor(
self.hass, self.rest, self.name, self.unit_of_measurement,
self.device_class, self.value_template, [], self.force_update
)
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def update_side_effect(self, data):
"""Side effect function for mocking RestData.update()."""
self.rest.data = data
def test_name(self):
"""Test the name."""
assert self.name == self.sensor.name
def test_unit_of_measurement(self):
"""Test the unit of measurement."""
assert self.unit_of_measurement == self.sensor.unit_of_measurement
def test_force_update(self):
"""Test the unit of measurement."""
assert self.force_update == self.sensor.force_update
def test_state(self):
"""Test the initial state."""
self.sensor.update()
assert self.initial_state == self.sensor.state
def test_update_when_value_is_none(self):
"""Test state gets updated to unknown when sensor returns no data."""
self.rest.update = Mock(
'rest.RestData.update', side_effect=self.update_side_effect(None))
self.sensor.update()
assert self.sensor.state is None
assert not self.sensor.available
def test_update_when_value_changed(self):
"""Test state gets updated when sensor returns a new status."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "updated_state" }'))
self.sensor.update()
assert 'updated_state' == self.sensor.state
assert self.sensor.available
def test_update_with_no_template(self):
"""Test update when there is no value template."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'plain_state'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, [],
self.force_update)
self.sensor.update()
assert 'plain_state' == self.sensor.state
assert self.sensor.available
def test_update_with_json_attrs(self):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "some_json_value" }'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert 'some_json_value' == \
self.sensor.device_state_attributes['key']
@patch('homeassistant.components.sensor.rest._LOGGER')
def test_update_with_json_attrs_no_data(self, mock_logger):
"""Test attributes when no JSON result fetched."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(None))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch('homeassistant.components.sensor.rest._LOGGER')
def test_update_with_json_attrs_not_dict(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'["list", "of", "things"]'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch('homeassistant.components.sensor.rest._LOGGER')
def test_update_with_json_attrs_bad_JSON(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'This is text rather than JSON data.'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
assert mock_logger.debug.called
def test_update_with_json_attrs_and_template(self):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "json_state_updated_value" }'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class,
self.value_template, ['key'],
self.force_update)
self.sensor.update()
assert 'json_state_updated_value' == self.sensor.state
assert 'json_state_updated_value' == \
self.sensor.device_state_attributes['key'], \
self.force_update
class TestRestData(unittest.TestCase):
"""Tests for RestData."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.method = "GET"
self.resource = "http://localhost"
self.verify_ssl = True
self.timeout = 10
self.rest = rest.RestData(
self.method, self.resource, None, None, None, self.verify_ssl,
self.timeout)
@requests_mock.Mocker()
def test_update(self, mock_req):
"""Test update."""
mock_req.get('http://localhost', text='test data')
self.rest.update()
assert 'test data' == self.rest.data
@patch('requests.Session', side_effect=RequestException)
def test_update_request_exception(self, mock_req):
"""Test update when a request exception occurs."""
self.rest.update()
assert self.rest.data is None
| apache-2.0 |
MaxTyutyunnikov/lino | lino/modlib/cal/__init__.py | 1 | 1851 | # -*- coding: UTF-8 -*-
## Copyright 2011-2013 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, see <http://www.gnu.org/licenses/>.
"""
This module turns Lino into a basic calendar client.
When using this app, you probably also like to set
:settings:`use_extensible` to True.
"""
#~ class SiteMixin(object):
#~ """
#~ Class methods and attibutes added to a Site by this module.
#~ """
#~ def get_reminder_generators_by_user(self,user):
#~ """
#~ Override this per application to return a list of
#~ reminder generators from all models for a give ueser
#~ A reminder generator is an object that has a `update_reminders`
#~ method.
#~ """
#~ return []
#~ def get_todo_tables(self,ar):
#~ """
#~ Return or yield a list of tables that should be empty
#~ """
#~ from django.db.models import loading
#~ for mod in loading.get_apps():
#~ meth = getattr(mod,'get_todo_tables',None)
#~ if meth is not None:
#~ dblogger.debug("Running %s of %s", methname, mod.__name__)
#~ for i in meth(self,ar):
#~ yield i
| gpl-3.0 |
josephsnyder/VistA | Scripts/DefaultKIDSBuildInstaller.py | 1 | 30883 | #---------------------------------------------------------------------------
# Copyright 2012-2019 The Open Source Electronic Health Record Alliance
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
from __future__ import division
from __future__ import print_function
from builtins import object
from past.utils import old_div
import sys
import os
import re
import tempfile
import shutil
import argparse
import glob
from PatchInfoParser import installNameToDirName
from VistATestClient import VistATestClientFactory, createTestClientArgParser
from LoggerManager import logger, initConsoleLogging
from VistAPackageInfoFetcher import VistAPackageInfoFetcher
from VistAGlobalImport import VistAGlobalImport, DEFAULT_GLOBAL_IMPORT_TIMEOUT
from ExternalDownloader import obtainKIDSBuildFileBySha1
from ConvertToExternalData import readSha1SumFromSha1File
from ConvertToExternalData import isValidExternalDataFileName
from ConvertToExternalData import isValidGlobalFileSuffix, isValidGlobalSha1Suffix
from ConvertToExternalData import getSha1HashFromExternalDataFileName
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
DEFAULT_CACHE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "../"))
from VistAMenuUtil import VistAMenuUtil
DEFAULT_INSTALL_DUZ = 17 # VistA user, "USER,SEVENTEEN"
CHECK_INSTALLATION_PROGRESS_TIMEOUT = 7200 # 7200 seconds or 120 minutes
GLOBAL_IMPORT_BYTE_PER_SEC = 0.5*1024*1024 # import speed is 0.5 MiB per sec
""" Default Installer for KIDS Build """
class DefaultKIDSBuildInstaller(object):
#---------------------------------------------------------------------------#
# Class Constants
#---------------------------------------------------------------------------#
""" A list of tuple, defined the action list corresponding to KIDS Build
questions that might need to act.
each tuple should have three items.
first item: KIDS Menu option text
second item: default answer, use \"\" for default
third item: bool flag to indicate whether to break out of the menu loop
If more menu options is needed, please either add extra option
in the subclass if just specific to that KIDS Build, or add it here if
it is a general question
"""
KIDS_MENU_OPTION_ACTION_LIST = [
("Want to continue installing this build\?","YES", False),
("Enter the Coordinator for Mail Group", "POSTMASTER", False),
("Want KIDS to Rebuild Menu Trees Upon Completion of Install\?",
"", False),
("Want KIDS to INHIBIT LOGONs during the install?",
"NO", False),
("Want to DISABLE Scheduled Options, Menu Options, and Protocols\?",
"NO", False),
("Delay Install \(Minutes\): \(0\-60\):", "0", False),
("do you want to include disabled components\?", "NO", False),
("DEVICE:", None, True)
]
""" A list of tuple, defined the action list corresponding to KIDS Build
questions that might need to act.
each tuple should have three items.
first item: KIDS Menu option text
second item: default answer, use \"\" for default
third item: bool flag to indicate whether to break out of the menu loop
If more menu options is needed, please either add extra option
in the subclass if just specific to that KIDS Build, or add it here if
it is a general question
"""
KIDS_LOAD_QUESTION_ACTION_LIST = [
("OK to continue with Load","YES", False),
("Want to Continue with Load\?","YES", False),
("Select Installation ","?", True),
("Want to continue installing this build\?","YES", False),
("Want to RUN the Environment Check Routine\? YES//","YES",False)
]
""" option action list for Exit KIDS menu, similar struct as above """
EXIT_KIDS_MENU_ACTION_LIST = [
("Select Installation ", "", False),
("Select Kernel Installation & Distribution System ", "", False),
("Select Programmer Options ", "", False),
("Select Systems Manager Menu ", "", False),
("Do you really want to halt\?", "YES", True)
]
KIDS_FILE_PATH_MAX_LEN = 75 # this might need to be fixed in VistA XPD
#---------------------------------------------------------------------------#
# Class Methods
#---------------------------------------------------------------------------#
""" Constructor
@kidsFile: the absolute path to KIDS Build file
@kidsInstallName: the install name for the KIDS Build
@seqNo: seqNo of the KIDS Build, default is None
@logFile: logFile to store the log information for VistA interaction
@multiBuildList: a python list of install names, only applies to
a multibuilds KIDS Build
@duz: the applier's VistA DUZ, default is set to 17, in VistA FOIA
it is USER SEVENTEEN
@**kargs: any extra information that might be needed
"""
def __init__(self, kidsFile, kidsInstallName, seqNo=None, logFile=None,
multiBuildList = None, duz = DEFAULT_INSTALL_DUZ, **kargs):
assert os.path.exists(kidsFile), ("kids file does not exist %s" % kidsFile)
self._origKidsFile = kidsFile
if len(kidsFile) >= self.KIDS_FILE_PATH_MAX_LEN:
destFilename = os.path.basename(kidsFile)
tempDir = tempfile.gettempdir()
if isValidExternalDataFileName(kidsFile):
# if read directly from inplace, need to replace the name with hash
destFilename = getSha1HashFromExternalDataFileName(kidsFile)
while (len(tempDir)+len(destFilename)+1) >= self.KIDS_FILE_PATH_MAX_LEN:
tempDir = os.path.split(tempDir)[0]
dest = os.path.join(tempDir, destFilename)
shutil.copy(kidsFile, dest)
self._kidsFile = os.path.normpath(dest)
logger.info("new kids file is %s" % self._kidsFile)
else:
self._kidsFile = os.path.normpath(kidsFile)
self._kidsInstallName = kidsInstallName
self._logFile = logFile
self._duz = duz
self._updatePackageLink = False
self._multiBuildList = multiBuildList
# store all the globals files associated with KIDS"
self._globalFiles = None
if "globals" in kargs:
self._globalFiles = kargs['globals']
self._tgOutputDir = None
if "printTG" in kargs:
self._tgOutputDir = kargs['printTG']
""" set up the log for VistA connection
@connection: a connection from a VistATestClient
"""
def __setupLogFile__(self, connection):
if self._logFile:
connection.logfile = open(self._logFile, "ab")
else:
connection.logfile = sys.stdout
""" Go to KIDS Main Menu
Always start with ready state (wait for promp)
"""
def __gotoKIDSMainMenu__(self, vistATestClient):
menuUtil = VistAMenuUtil(self._duz)
menuUtil.gotoKidsMainMenu(vistATestClient)
""" load the KIDS build distribution file via menu
must be called while in KIDS Main Menu
"""
def __loadKIDSBuild__(self, connection):
connection.send("Installation\r")
connection.expect("Select Installation ")
connection.send("1\r") # load the distribution
connection.expect("Enter a Host File:")
connection.send(self._kidsFile+"\r")
""" Answer all the KIDS install questions
"""
def __handleKIDSInstallQuestions__(self, connection, connection2=None):
connection.send("Install\r")
connection.expect("Select INSTALL NAME:")
connection.send(self._kidsInstallName+"\r")
""" handle any questions lastconnection general KIDS installation questions"""
result = self.handleKIDSInstallQuestions(connection)
if not result:
return False
kidsMenuActionLst = self.KIDS_MENU_OPTION_ACTION_LIST
while True:
index = connection.expect([x[0] for x in kidsMenuActionLst])
if index > 0:
sendCmd = kidsMenuActionLst[index][1]
if sendCmd != None:
connection.send("%s\r" % sendCmd)
if kidsMenuActionLst[index][2]:
break
else:
connection.send("")
return True
""" restart the previous installation
"""
def restartInstallation(self, vistATestClient):
logger.warn("restart the previous installation for %s" %
self._kidsInstallName)
connection = vistATestClient.getConnection()
self.__gotoKIDSMainMenu__(vistATestClient)
self.__selectRestartInstallOption__(connection)
index = connection.expect(["DEVICE: ", "Select INSTALL NAME: "])
if index == 0:
self.__installationCommon__(vistATestClient)
return True
else:
logger.error("Restart install %s failed" % self._kidsInstallName)
""" go back to KIDS main menu first """
connection.send('\r')
connection.expect("Select Installation ")
connection.send('\r')
""" try to unload a distribution first """
result = self.unloadDistribution(vistATestClient, False)
if not result:
logger.error("Unload Distribution %s failed" % self._kidsInstallName)
return self.normalInstallation(vistATestClient)
""" go to the restart KIDS build option """
def __selectRestartInstallOption__(self, connection):
connection.send("Installation\r")
connection.expect("Select Installation ")
connection.send("Restart Install of\r") # restart install of package(s)
connection.expect("Select INSTALL NAME: ")
connection.send(self._kidsInstallName+"\r")
""" go to the unload a distribution option """
def __selectUnloadDistributionOption__(self, connection):
#connection.expect("Select Kernel Installation & Distribution System ")
connection.send("installation\r")
connection.expect("Select Installation ")
connection.send("Unload a Distribution\r")
connection.expect("Select INSTALL NAME: ")
connection.send(self._kidsInstallName+"\r")
""" unload a previous loaded distribution """
def unloadDistribution(self, vistATestClient, waitForPrompt=True):
connection = vistATestClient.getConnection()
logger.info("Unload distribution for %s" % self._kidsInstallName)
if waitForPrompt:
self.__gotoKIDSMainMenu__(vistATestClient)
self.__selectUnloadDistributionOption__(connection)
index = connection.expect([
"Want to continue with the Unload of this Distribution\? NO// ",
"Select INSTALL NAME: "])
if index == 1:
connection.send('\r')
self.__exitKIDSMenu__(vistATestClient)
return False
connection.send('YES\r')
self.__exitKIDSMenu__(vistATestClient)
return True
""" Do a fresh load and installation """
def normalInstallation(self, vistATestClient, vistATestClient2=None, reinst=True):
logger.info("Start installing %s" % self._kidsInstallName)
connection = vistATestClient.getConnection()
if vistATestClient2:
connection2 = vistATestClient2.getConnection()
self.__gotoKIDSMainMenu__(vistATestClient)
self.__loadKIDSBuild__(connection)
result = self.__handleKIDSLoadOptions__(connection, reinst)
if not result:
logger.error("Error handling KIDS Load Options %s, %s" %
(self._kidsInstallName, self._kidsFile))
return False
if self._tgOutputDir:
if self._multiBuildList is None:
self.__printTransportGlobal__(vistATestClient,[self._kidsInstallName],self._tgOutputDir)
else:
self.__printTransportGlobal__(vistATestClient,self._multiBuildList,self._tgOutputDir)
if vistATestClient2:
result = self.__handleKIDSInstallQuestions__(connection, connection2)
else:
result = self.__handleKIDSInstallQuestions__(connection)
if not result:
result = self.unloadDistribution(vistATestClient, False)
if not result:
logger.error("Unload %s failed" % self._kidsInstallName)
return False
return self.normalInstallation(vistATestClient, vistATestClient2, reinst)
self.__installationCommon__(vistATestClient)
return True
""" common shared workflow in KIDS installation process """
def __installationCommon__(self, vistATestClient):
connection = vistATestClient.getConnection()
self.setupDevice(connection)
self.__checkInstallationProgress__(connection)
self.__exitKIDSMenu__(vistATestClient)
self.extraFixWork(vistATestClient)
""" Handle options during load KIDS distribution section """
def __handleKIDSLoadOptions__(self, connection, reinst):
loadOptionActionList = self.KIDS_LOAD_QUESTION_ACTION_LIST[:]
""" make sure install completed is the last one """
loadOptionActionList.append(
(self._kidsInstallName + " Install Completed", None))
while True:
index = connection.expect([x[0] for x in loadOptionActionList], 120)
if index == len(loadOptionActionList) - 1:
if not reinst:
return False
else:
connection.send("%s\r" % (loadOptionActionList[index][1]))
if loadOptionActionList[index][2]:
break
return True
""" Exit the KIDS Menu option.
Make sure the VistA connection is in the ready state (wait for prompt)
"""
def __exitKIDSMenu__(self, vistATestClient):
exitMenuActionList = self.EXIT_KIDS_MENU_ACTION_LIST[:]
connection = vistATestClient.getConnection()
""" add wait for prompt """
exitMenuActionList.append((vistATestClient.getPrompt(), "\r", True))
expectList = [x[0] for x in exitMenuActionList]
while True:
idx = connection.expect(expectList,120)
connection.send("%s\r" % exitMenuActionList[idx][1])
if exitMenuActionList[idx][2]:
break
""" Checking the current status of the KIDS build
"""
def __checkInstallationProgress__(self, connection):
KIDS_BUILD_STATUS_ACTION_LIST = [
("Running Pre-Install Routine:",self.runPreInstallationRoutine,False),
("Running Post-Install Routine:",self.runPostInstallationRoutine,False),
("Begin Post-Install:",None,False),
("Starting Menu Rebuild:", None , False),
("Installing Routines:", None , False),
("Installing Data:", None , False),
("Menu Rebuild Complete:", None , False),
("Installing PACKAGE COMPONENTS:", None ,False),
("Send mail to: ", self.handleSendMailToOptions, False),
("Select Installation ", self.handleInstallError, True),
("Install Completed", self.installCompleted, True)
]
""" Bulid the status update action list """
statusActionList = []
installName = self._kidsInstallName
if self._multiBuildList:
for item in self._multiBuildList:
statusActionList.append(
(re.escape("Install Started for %s :" %item), None, False))
statusActionList.append(
(re.escape("%s Installed." % item), None, False))
else:
statusActionList.append(
(re.escape("Install Started for %s :" % installName),
None, False))
statusActionList.append(
(re.escape("%s Installed." % installName), None, False))
statusActionList.extend(KIDS_BUILD_STATUS_ACTION_LIST)
expectList = [x[0] for x in statusActionList]
while True:
index = connection.expect(expectList, CHECK_INSTALLATION_PROGRESS_TIMEOUT)
status = expectList[index].replace("\\","")
logger.info(status)
callback = statusActionList[index][1]
if callback:
callback(connection, status=status)
if statusActionList[index][2]:
break
else:
continue
""" This is the entry point of KIDS installer
It defines the workflow of KIDS installation process
@reinst: wether re-install the KIDS build, default is False
@return, True if no error, otherwise False
"""
def runInstallation(self, vistATestClient, vistATestClient2=None, reinst=False):
connection = vistATestClient.getConnection()
self.__setupLogFile__(connection)
infoFetcher = VistAPackageInfoFetcher(vistATestClient)
installStatus = infoFetcher.getInstallationStatus(self._kidsInstallName)
""" select KIDS installation workflow based on install status """
if infoFetcher.isInstallCompleted(installStatus):
logger.warn("install %s is already completed!" %
self._kidsInstallName)
if not reinst:
return True
# run pre-installation preparation
self.preInstallationWork(vistATestClient)
if infoFetcher.isInstallStarted(installStatus):
return self.restartInstallation(vistATestClient)
return self.normalInstallation(vistATestClient,vistATestClient2, reinst)
def __printTGlobalChecksums__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Verify Checksums\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("Want each Routine Listed with Checksums")
connection.send("YES\r")
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Checksums.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
def __printTGlobalSummary__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Print Transport Global\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("What to Print")
connection.send('2\r')
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Print.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
def __printTGlobalCompare__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Compare Transport Global\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("Type of Compare")
connection.send("1\r")
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Compare.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
''' Print out the checksums and the summary of the transport global '''
def __printTransportGlobal__(self,testClient,installNameList,outputDir):
for installName in installNameList:
self.__printTGlobalChecksums__(testClient,installName,outputDir)
self.__printTGlobalSummary__(testClient,installName,outputDir)
self.__printTGlobalCompare__(testClient,installName,outputDir)
#---------------------------------------------------------------------------#
# Public override methods sections
#---------------------------------------------------------------------------#
""" Set up the KIDS installation result output device
default is to use HOME device
if you want to use a difference device, please override this method
"""
def setupDevice(self, connection):
connection.send("HOME;82;999\r")
""" intended to be implemented by subclass
this is to handle any build related questions that
comes up before the general KIDS questions
default implementation is to check the error condition
"""
def handleKIDSInstallQuestions(self, connection, **kargs):
errorCheckTimeout = 5 # 5 seconds
try:
connection.expect("\*\*INSTALL FILE IS CORRUPTED\*\*",errorCheckTimeout)
logger.error("%s:INSTALL FILE IS CORRUPTED" % self._kidsInstallName)
connection.expect("Select Installation ", errorCheckTimeout)
connection.send('\r')
return False
except Exception as ex:
return True
""" intended to be implemented by subclass
answer question related to pre install routine
"""
def runPreInstallationRoutine(self, connection, **kargs):
pass
""" intended to be implemented by subclass
answer question related to post install routine
"""
def runPostInstallationRoutine(self, connection, **kargs):
pass
""" intended to be implemented by subclass """
def extraFixWork(self, vistATestClient):
pass
""" default action for Send Mail To option
please override or enhance it if more action is needed
"""
def handleSendMailToOptions(self, connection, **kargs):
connection.send("\r")
connection.expect("Select basket to send to: ")
connection.send("\r")
connection.expect("Send ")
connection.send("\r")
""" default action for install completed
please override or enhance it if more action is needed
"""
def installCompleted(self, connection, **kargs):
extraInfo = connection.lastconnection
logger.debug(extraInfo)
if re.search("No link to PACKAGE file", extraInfo):
self._updatePackageLink = True
logger.warn("You might have to update KIDS build %s to link"
" to Package file" %
(self._kidsInstallName))
""" default action for installation error
please override or enhance it if more action is needed
"""
def handleInstallError(self, connection, **kargs):
logger.error("Installation failed for %s" % self._kidsInstallName)
connection.send("\r")
""" default action for pre-installation preperation.
right now it is just to import the globals file under
the same directory as the KIDs directory
please override or enhance it if more action is needed
"""
def preInstallationWork(self, vistATestClient, **kargs):
""" ignore the multi-build patch for now """
if self._multiBuildList is not None:
return
globalFiles = self.__getGlobalFileList__()
if globalFiles is None or len(globalFiles) == 0:
return
globalImport = VistAGlobalImport()
for glbFile in globalFiles:
logger.info("Import global file %s" % (glbFile))
fileSize = os.path.getsize(glbFile)
importTimeout = DEFAULT_GLOBAL_IMPORT_TIMEOUT
importTimeout += int(old_div(fileSize,GLOBAL_IMPORT_BYTE_PER_SEC))
globalImport.importGlobal(vistATestClient, glbFile, timeout=importTimeout)
#---------------------------------------------------------------------------#
# Utilities Functions
#---------------------------------------------------------------------------#
""" utility function to find the all global files ends with GLB/s """
def __getGlobalFileList__(self):
globalFiles = []
if self._globalFiles is None or len(self._globalFiles) == 0:
return globalFiles
for gFile in self._globalFiles:
if isValidGlobalFileSuffix(gFile):
globalFiles.append(gFile)
continue
if isValidGlobalSha1Suffix(gFile): # external file
sha1Sum = readSha1SumFromSha1File(gFile)
(result, path) = obtainKIDSBuildFileBySha1(gFile,
sha1Sum,
DEFAULT_CACHE_DIR)
if not result:
logger.error("Could not obtain global file for %s" % gFile)
raise Exception("Error getting global file for %s" % gFile)
globalFiles.append(path)
if len(globalFiles) > 0:
logger.info("global file lists %s" % globalFiles)
return globalFiles
""" utility function to find the name associated the DUZ """
def getPersonNameByDuz(inputDuz, vistAClient):
logger.info ("inputDuz is %s" % inputDuz)
""" user Kernel User API """
connection = vistAClient.getConnection()
menuUtil = VistAMenuUtil(duz=1)
menuUtil.gotoSystemMenu(vistAClient)
connection.send('Prog\r')
connection.expect('Select Programmer Options')
connection.send('^\r')
menuUtil.exitSystemMenu(vistAClient)
vistAClient.waitForPrompt()
connection.send('W $$NAME^XUSER(%s)\r' % inputDuz)
connection.expect('\)') # get rid of the echo
vistAClient.waitForPrompt()
result = connection.lastconnection.strip(' \r\n')
connection.send('\r')
return result
""" function to add an entry to PACAKGE HISTORY """
def addPackagePatchHistory(packageName, version, seqNo,
patchNo, vistAClient, inputDuz):
logger.info("Adding %s, %s, %s, %s to Package Patch history" %
(packageName, version, seqNo, patchNo))
connection = vistAClient.getConnection()
menuUtil = VistAMenuUtil(duz=1)
menuUtil.gotoFileManEditEnterEntryMenu(vistAClient)
connection.send("9.4\r") # package file
connection.expect("EDIT WHICH FIELD: ")
connection.send("VERSION\r")
connection.expect("EDIT WHICH VERSION SUB-FIELD: ")
connection.send("PATCH APPLICATION HISTORY\r")
connection.expect("EDIT WHICH PATCH APPLICATION HISTORY SUB-FIELD: ")
connection.send("ALL\r")
connection.expect("THEN EDIT VERSION SUB-FIELD: ")
connection.send("\r")
connection.expect("THEN EDIT FIELD: ")
connection.send("\r")
connection.expect("Select PACKAGE NAME: ")
connection.send("%s\r" % packageName)
connection.expect("Select VERSION: %s//" % version)
connection.send("\r")
connection.expect("Select PATCH APPLICATION HISTORY: ")
connection.send("%s SEQ #%s\r" % (patchNo, seqNo))
connection.expect("Are you adding .*\? No//")
connection.send("YES\r")
connection.expect("DATE APPLIED: ")
connection.send("T\r")
connection.expect("APPLIED BY: ")
connection.send("`%s\r" % inputDuz)
connection.expect("DESCRIPTION:")
connection.send("\r")
connection.expect("Select PATCH APPLICATION HISTORY: ")
connection.send("\r")
connection.expect("Select PACKAGE NAME: ")
connection.send("\r")
menuUtil.exitFileManMenu(vistAClient)
""" class KIDSInstallerFactory
create KIDS installer via Factory methods
"""
class KIDSInstallerFactory(object):
installerDict = {}
@staticmethod
def createKIDSInstaller(kidsFile, kidsInstallName,
seqNo=None, logFile=None,
multiBuildList=None, duz=DEFAULT_INSTALL_DUZ,
**kargs):
return KIDSInstallerFactory.installerDict.get(
kidsInstallName,
DefaultKIDSBuildInstaller)(kidsFile,
kidsInstallName,
seqNo, logFile,
multiBuildList, duz,
**kargs)
@staticmethod
def registerKidsInstaller(kidsInstallName, kidsInstaller):
KIDSInstallerFactory.installerDict[kidsInstallName] = kidsInstaller
""" Test code """
def createTestClient():
testClientParser = createTestClientArgParser()
parser = argparse.ArgumentParser(description='Default KIDS Installer',
parents=[testClientParser])
result = parser.parse_args();
print (result)
testClient = VistATestClientFactory.createVistATestClientWithArgs(result)
return testClient
def testAddPackagePatchHistory():
testClient = createTestClient()
with testClient:
addPackagePatchHistory("LAB SERVICE", "5.2", "288", "334",
testClient, 17)
""" Test Function getPersonNameByDuz """
def testGetPersonNameByDuz():
testClient = createTestClient()
initConsoleLogging()
with testClient:
result = getPersonNameByDuz(1, testClient)
print ("Name is [%s]" % result)
""" main entry """
def main():
testClientParser = createTestClientArgParser()
parser = argparse.ArgumentParser(description='Default KIDS Installer',
parents=[testClientParser])
parser.add_argument('kidsFile', help='path to KIDS Build file')
parser.add_argument('-l', '--logFile', default=None, help='path to logFile')
parser.add_argument('-r', '--reinstall', default=False, action='store_true',
help='whether re-install the KIDS even it is already installed')
parser.add_argument('-t', '--tglobalprint', default=None,
help='folder to hold a printout of Transport global information')
parser.add_argument('-g', '--globalFiles', default=None, nargs='*',
help='list of global files that need to import')
parser.add_argument('-d', '--duz', default=DEFAULT_INSTALL_DUZ, type=int,
help='installer\'s VistA instance\'s DUZ')
result = parser.parse_args();
print (result)
testClient = VistATestClientFactory.createVistATestClientWithArgs(result)
assert testClient
initConsoleLogging()
with testClient:
kidsFile = os.path.abspath(result.kidsFile)
from KIDSBuildParser import KIDSBuildParser
kidsParser = KIDSBuildParser(None)
kidsParser.unregisterSectionHandler(KIDSBuildParser.ROUTINE_SECTION)
kidsParser.parseKIDSBuild(kidsFile)
installNameList = kidsParser.installNameList
installName = installNameList[0]
multiBuildList = installNameList
if len(installNameList) == 1:
multiBuildList = None
defaultKidsInstall = DefaultKIDSBuildInstaller(kidsFile,
installName,
logFile=result.logFile,
multiBuildList=multiBuildList,
duz = result.duz,
globals=result.globalFiles,
printTG=result.tglobalprint)
defaultKidsInstall.runInstallation(testClient, result.reinstall)
if __name__ == "__main__":
main()
| apache-2.0 |
jxta/cc | vendor/Twisted-10.0.0/twisted/web/test/test_domhelpers.py | 53 | 11063 | # -*- test-case-name: twisted.web.test.test_domhelpers -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Specific tests for (some of) the methods in L{twisted.web.domhelpers}.
"""
from xml.dom import minidom
from twisted.trial.unittest import TestCase
from twisted.web import microdom
from twisted.web import domhelpers
class DOMHelpersTestsMixin:
"""
A mixin for L{TestCase} subclasses which defines test methods for
domhelpers functionality based on a DOM creation function provided by a
subclass.
"""
dom = None
def test_getElementsByTagName(self):
doc1 = self.dom.parseString('<foo/>')
actual=domhelpers.getElementsByTagName(doc1, 'foo')[0].nodeName
expected='foo'
self.assertEquals(actual, expected)
el1=doc1.documentElement
actual=domhelpers.getElementsByTagName(el1, 'foo')[0].nodeName
self.assertEqual(actual, expected)
doc2_xml='<a><foo in="a"/><b><foo in="b"/></b><c><foo in="c"/></c><foo in="d"/><foo in="ef"/><g><foo in="g"/><h><foo in="h"/></h></g></a>'
doc2 = self.dom.parseString(doc2_xml)
tag_list=domhelpers.getElementsByTagName(doc2, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
expected='abcdefgh'
self.assertEquals(actual, expected)
el2=doc2.documentElement
tag_list=domhelpers.getElementsByTagName(el2, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
self.assertEqual(actual, expected)
doc3_xml='''
<a><foo in="a"/>
<b><foo in="b"/>
<d><foo in="d"/>
<g><foo in="g"/></g>
<h><foo in="h"/></h>
</d>
<e><foo in="e"/>
<i><foo in="i"/></i>
</e>
</b>
<c><foo in="c"/>
<f><foo in="f"/>
<j><foo in="j"/></j>
</f>
</c>
</a>'''
doc3 = self.dom.parseString(doc3_xml)
tag_list=domhelpers.getElementsByTagName(doc3, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
expected='abdgheicfj'
self.assertEquals(actual, expected)
el3=doc3.documentElement
tag_list=domhelpers.getElementsByTagName(el3, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
self.assertEqual(actual, expected)
doc4_xml='<foo><bar></bar><baz><foo/></baz></foo>'
doc4 = self.dom.parseString(doc4_xml)
actual=domhelpers.getElementsByTagName(doc4, 'foo')
root=doc4.documentElement
expected=[root, root.childNodes[-1].childNodes[0]]
self.assertEquals(actual, expected)
actual=domhelpers.getElementsByTagName(root, 'foo')
self.assertEqual(actual, expected)
def test_gatherTextNodes(self):
doc1 = self.dom.parseString('<a>foo</a>')
actual=domhelpers.gatherTextNodes(doc1)
expected='foo'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc1.documentElement)
self.assertEqual(actual, expected)
doc2_xml='<a>a<b>b</b><c>c</c>def<g>g<h>h</h></g></a>'
doc2 = self.dom.parseString(doc2_xml)
actual=domhelpers.gatherTextNodes(doc2)
expected='abcdefgh'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc2.documentElement)
self.assertEqual(actual, expected)
doc3_xml=('<a>a<b>b<d>d<g>g</g><h>h</h></d><e>e<i>i</i></e></b>' +
'<c>c<f>f<j>j</j></f></c></a>')
doc3 = self.dom.parseString(doc3_xml)
actual=domhelpers.gatherTextNodes(doc3)
expected='abdgheicfj'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc3.documentElement)
self.assertEqual(actual, expected)
def test_clearNode(self):
doc1 = self.dom.parseString('<a><b><c><d/></c></b></a>')
a_node=doc1.documentElement
domhelpers.clearNode(a_node)
self.assertEqual(
a_node.toxml(),
self.dom.Element('a').toxml())
doc2 = self.dom.parseString('<a><b><c><d/></c></b></a>')
b_node=doc2.documentElement.childNodes[0]
domhelpers.clearNode(b_node)
actual=doc2.documentElement.toxml()
expected = self.dom.Element('a')
expected.appendChild(self.dom.Element('b'))
self.assertEqual(actual, expected.toxml())
def test_get(self):
doc1 = self.dom.parseString('<a><b id="bar"/><c class="foo"/></a>')
node=domhelpers.get(doc1, "foo")
actual=node.toxml()
expected = self.dom.Element('c')
expected.setAttribute('class', 'foo')
self.assertEqual(actual, expected.toxml())
node=domhelpers.get(doc1, "bar")
actual=node.toxml()
expected = self.dom.Element('b')
expected.setAttribute('id', 'bar')
self.assertEqual(actual, expected.toxml())
self.assertRaises(domhelpers.NodeLookupError,
domhelpers.get,
doc1,
"pzork")
def test_getIfExists(self):
doc1 = self.dom.parseString('<a><b id="bar"/><c class="foo"/></a>')
node=domhelpers.getIfExists(doc1, "foo")
actual=node.toxml()
expected = self.dom.Element('c')
expected.setAttribute('class', 'foo')
self.assertEqual(actual, expected.toxml())
node=domhelpers.getIfExists(doc1, "pzork")
self.assertIdentical(node, None)
def test_getAndClear(self):
doc1 = self.dom.parseString('<a><b id="foo"><c></c></b></a>')
node=domhelpers.getAndClear(doc1, "foo")
actual=node.toxml()
expected = self.dom.Element('b')
expected.setAttribute('id', 'foo')
self.assertEqual(actual, expected.toxml())
def test_locateNodes(self):
doc1 = self.dom.parseString('<a><b foo="olive"><c foo="olive"/></b><d foo="poopy"/></a>')
node_list=domhelpers.locateNodes(
doc1.childNodes, 'foo', 'olive', noNesting=1)
actual=''.join([node.toxml() for node in node_list])
expected = self.dom.Element('b')
expected.setAttribute('foo', 'olive')
c = self.dom.Element('c')
c.setAttribute('foo', 'olive')
expected.appendChild(c)
self.assertEqual(actual, expected.toxml())
node_list=domhelpers.locateNodes(
doc1.childNodes, 'foo', 'olive', noNesting=0)
actual=''.join([node.toxml() for node in node_list])
self.assertEqual(actual, expected.toxml() + c.toxml())
def test_getParents(self):
doc1 = self.dom.parseString('<a><b><c><d/></c><e/></b><f/></a>')
node_list = domhelpers.getParents(
doc1.childNodes[0].childNodes[0].childNodes[0])
actual = ''.join([node.tagName for node in node_list
if hasattr(node, 'tagName')])
self.assertEqual(actual, 'cba')
def test_findElementsWithAttribute(self):
doc1 = self.dom.parseString('<a foo="1"><b foo="2"/><c foo="1"/><d/></a>')
node_list = domhelpers.findElementsWithAttribute(doc1, 'foo')
actual = ''.join([node.tagName for node in node_list])
self.assertEqual(actual, 'abc')
node_list = domhelpers.findElementsWithAttribute(doc1, 'foo', '1')
actual = ''.join([node.tagName for node in node_list])
self.assertEqual(actual, 'ac')
def test_findNodesNamed(self):
doc1 = self.dom.parseString('<doc><foo/><bar/><foo>a</foo></doc>')
node_list = domhelpers.findNodesNamed(doc1, 'foo')
actual = len(node_list)
self.assertEqual(actual, 2)
# NOT SURE WHAT THESE ARE SUPPOSED TO DO..
# def test_RawText FIXME
# def test_superSetAttribute FIXME
# def test_superPrependAttribute FIXME
# def test_superAppendAttribute FIXME
# def test_substitute FIXME
def test_escape(self):
j='this string " contains many & characters> xml< won\'t like'
expected='this string " contains many & characters> xml< won\'t like'
self.assertEqual(domhelpers.escape(j), expected)
def test_unescape(self):
j='this string " has && entities > < and some characters xml won\'t like<'
expected='this string " has && entities > < and some characters xml won\'t like<'
self.assertEqual(domhelpers.unescape(j), expected)
def test_getNodeText(self):
"""
L{getNodeText} returns the concatenation of all the text data at or
beneath the node passed to it.
"""
node = self.dom.parseString('<foo><bar>baz</bar><bar>quux</bar></foo>')
self.assertEqual(domhelpers.getNodeText(node), "bazquux")
class MicroDOMHelpersTests(DOMHelpersTestsMixin, TestCase):
dom = microdom
def test_gatherTextNodesDropsWhitespace(self):
"""
Microdom discards whitespace-only text nodes, so L{gatherTextNodes}
returns only the text from nodes which had non-whitespace characters.
"""
doc4_xml='''<html>
<head>
</head>
<body>
stuff
</body>
</html>
'''
doc4 = self.dom.parseString(doc4_xml)
actual = domhelpers.gatherTextNodes(doc4)
expected = '\n stuff\n '
self.assertEqual(actual, expected)
actual = domhelpers.gatherTextNodes(doc4.documentElement)
self.assertEqual(actual, expected)
def test_textEntitiesNotDecoded(self):
"""
Microdom does not decode entities in text nodes.
"""
doc5_xml='<x>Souffl&</x>'
doc5 = self.dom.parseString(doc5_xml)
actual=domhelpers.gatherTextNodes(doc5)
expected='Souffl&'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc5.documentElement)
self.assertEqual(actual, expected)
class MiniDOMHelpersTests(DOMHelpersTestsMixin, TestCase):
dom = minidom
def test_textEntitiesDecoded(self):
"""
Minidom does decode entities in text nodes.
"""
doc5_xml='<x>Souffl&</x>'
doc5 = self.dom.parseString(doc5_xml)
actual=domhelpers.gatherTextNodes(doc5)
expected='Souffl&'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc5.documentElement)
self.assertEqual(actual, expected)
def test_getNodeUnicodeText(self):
"""
L{domhelpers.getNodeText} returns a C{unicode} string when text
nodes are represented in the DOM with unicode, whether or not there
are non-ASCII characters present.
"""
node = self.dom.parseString("<foo>bar</foo>")
text = domhelpers.getNodeText(node)
self.assertEqual(text, u"bar")
self.assertIsInstance(text, unicode)
node = self.dom.parseString(u"<foo>\N{SNOWMAN}</foo>".encode('utf-8'))
text = domhelpers.getNodeText(node)
self.assertEqual(text, u"\N{SNOWMAN}")
self.assertIsInstance(text, unicode)
| apache-2.0 |
PaulKinlan/cli-caniuse | site/app/scripts/bower_components/jsrepl-build/extern/python/closured/lib/python2.7/json/encoder.py | 103 | 16014 | """Implementation of JSONEncoder
"""
import re
try:
from _json import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from _json import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
#ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u{0:04x}'.format(n)
#return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
#return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on the
# internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if (_one_shot and c_make_encoder is not None
and self.indent is None and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| apache-2.0 |
halfcrazy/sqlalchemy | lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 59 | 5323 | # mysql/mysqlconnector.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+mysqlconnector
:name: MySQL Connector/Python
:dbapi: myconnpy
:connectstring: mysql+mysqlconnector://<user>:<password>@\
<host>[:<port>]/<dbname>
:url: http://dev.mysql.com/downloads/connector/python/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
"""
from .base import (MySQLDialect, MySQLExecutionContext,
MySQLCompiler, MySQLIdentifierPreparer,
BIT)
from ... import util
import re
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
def get_lastrowid(self):
return self.cursor.lastrowid
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
if self.dialect._mysqlconnector_double_percents:
return self.process(binary.left, **kw) + " %% " + \
self.process(binary.right, **kw)
else:
return self.process(binary.left, **kw) + " % " + \
self.process(binary.right, **kw)
def post_process_text(self, text):
if self.dialect._mysqlconnector_double_percents:
return text.replace('%', '%%')
else:
return text
def escape_literal_column(self, text):
if self.dialect._mysqlconnector_double_percents:
return text.replace('%', '%%')
else:
return text
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
if self.dialect._mysqlconnector_double_percents:
return value.replace("%", "%%")
else:
return value
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype):
"""MySQL-connector already converts mysql bits, so."""
return None
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
supports_unicode_binds = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = 'format'
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
statement_compiler = MySQLCompiler_mysqlconnector
preparer = MySQLIdentifierPreparer_mysqlconnector
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
BIT: _myconnpyBIT,
}
)
@util.memoized_property
def supports_unicode_statements(self):
return util.py3k or self._mysqlconnector_version_info > (2, 0)
@classmethod
def dbapi(cls):
from mysql import connector
return connector
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
# unfortunately, MySQL/connector python refuses to release a
# cursor without reading fully, so non-buffered isn't an option
opts.setdefault('buffered', True)
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
client_flags = opts.get(
'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
except Exception:
pass
return [[], opts]
@util.memoized_property
def _mysqlconnector_version_info(self):
if self.dbapi and hasattr(self.dbapi, '__version__'):
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
self.dbapi.__version__)
if m:
return tuple(
int(x)
for x in m.group(1, 2, 3)
if x is not None)
@util.memoized_property
def _mysqlconnector_double_percents(self):
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = dbapi_con.get_server_version()
return tuple(version)
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
if isinstance(e, exceptions):
return e.errno in errnos or \
"MySQL Connection not available." in str(e)
else:
return False
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
dialect = MySQLDialect_mysqlconnector
| mit |
ApplauseOSS/djangoevents | djangoevents/__init__.py | 1 | 1816 | import warnings
from eventsourcing.domain.model.entity import EventSourcedEntity
from eventsourcing.domain.model.entity import entity_mutator
from eventsourcing.domain.model.entity import singledispatch
from eventsourcing.domain.model.decorators import subscribe_to
from eventsourcing.domain.model.events import publish as es_publish
from eventsourcing.domain.model.events import subscribe
from eventsourcing.domain.model.events import unsubscribe
from eventsourcing.infrastructure.event_sourced_repo import EventSourcedRepository
from .domain import BaseEntity
from .domain import BaseAggregate
from .domain import DomainEvent
from .app import EventSourcingWithDjango
from .exceptions import EventSchemaError
from .schema import validate_event
from .settings import is_validation_enabled
default_app_config = 'djangoevents.apps.AppConfig'
__all__ = [
'DomainEvent',
'EventSourcedEntity',
'EventSourcedRepository',
'entity_mutator',
'singledispatch',
'publish',
'store_event'
'subscribe',
'unsubscribe',
'subscribe_to',
'BaseEntity',
'BaseAggregate',
'EventSourcingWithDjango'
]
def publish(event):
warnings.warn("`publish` is depreciated. Please switch to: `store_event`.", DeprecationWarning)
return es_publish(event)
def store_event(event, force_validate=False):
"""
Store an event to the service's event journal. Optionally validates event
schema if one is provided.
`force_validate` - enforces event schema validation even if configuration disables it globally.
"""
if is_validation_enabled() or force_validate:
is_valid = validate_event(event)
if not is_valid:
msg = "Event: {} does not match its schema.".format(event)
raise EventSchemaError(msg)
return es_publish(event)
| mit |
4ON91/KnickKnacks | Boolean Algebra Notes/LogicGates.py | 1 | 10004 | import copy
import csv
import os
class Gate:
def __init__(self, Sockets):
self.Sockets = Sockets
self.Inputs = []
self.UniqueInputs = ""
def canPass(self):
return(True)
def getInput(self, I):
if( (type(I) == Input) &
(I.sym().casefold() not in self.UniqueInputs.casefold()) ):
self.UniqueInputs += I.sym()
self.Inputs.append(I.On)
class Input:
def __init__(self, Symbol, On):
self.Symbol = Symbol.upper()[:1]
self.On = On
self.Position = (int, int)
def sym(self):
if(self.On):
return(self.Symbol.upper())
else:
return(self.Symbol.lower())
def csym(self):
return(self.Symbol.casefold())
def __repr__(self):
return(self.sym())
def __invert__(self):
if(self.On):
self.On = False
else:
self.On = True
def canPass(self):
return(False)
def canContinue(self, I):
return(True)
class Output:
def canPass(self):
return(True)
def canContinue(self, I):
return(True)
class AND(Gate):
def canContinue(self, I):
self.getInput(I)
if((True in self.Inputs)&
(False not in self.Inputs)&
(len(self.Inputs) >= self.Sockets)):
return(True)
else:
return(False)
class NAND(Gate):
def canContinue(self, I):
self.getInput(I)
if((False in self.Inputs)&
(True not in self.Inputs)&
(len(self.Inputs) >= self.Sockets)):
return(True)
else:
return(False)
class OR(Gate):
def canContinue(self, I):
self.getInput(I)
if( (len(self.Inputs) >= self.Sockets) &
(True in self.Inputs) ):
return(True)
else:
return(False)
class NOR(Gate):
def canContinue(self, I):
self.getInput(I)
if( (len(self.Inputs) >= self.Sockets) &
(False in self.Inputs) ):
return(True)
else:
return(False)
class INVERT:
def canPass(self):
return(True)
def canContinue(self, I):
~I
return(True)
class CircuitPath:
def __init__(self, Passable):
self.Passable = Passable
def canPass(self):
return(self.Passable)
def canContinue(self, I):
return(True)
def SwitchStateList(NumberOfSwitches):
binary_string = ""
i = 0
Switches = NumberOfSwitches
Switch_States = []
while( len(binary_string) <= NumberOfSwitches ):
binary_string = str(bin(i))[2:]
i += 1
Switch_States.append(("{:>0%s}"%str(Switches)).format(binary_string))
Switch_States.pop(-1)
return(Switch_States)
def ANDList(NumberOfSwitches):
a = list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
binary_string = ""
i = 0
Switches = NumberOfSwitches
Switch_States = []
while( len(binary_string) <= NumberOfSwitches ):
binary_string = ("{:>0%s}"%str(Switches)).format(str(bin(i))[2:])
b = ""
for x in range(0, len(binary_string)):
if(int(binary_string[x]) == 0):
b += a[x].lower()
else:
b += a[x].upper()
i += 1
Switch_States.append(b)
Switch_States.pop(-1)
return(Switch_States)
def RunCircuit(file):
OP1 = OR(1)
OP2 = OR(2)
OP3 = OR(3)
ON1 = NOR(1)
ON2 = NOR(2)
ON3 = NOR(3)
AP1 = AND(1)
AP2 = AND(2)
AP3 = AND(3)
AN1 = NAND(1)
AN2 = NAND(2)
AN3 = NAND(3)
CP0 = CircuitPath(False)
CP1 = CircuitPath(True)
I00 = Input("A", False)
I01 = Input("B", True)
I02 = Input("C", True)
OUT = Output()
INV = INVERT()
Circuit_Array = [line for line in csv.reader(open(file, "r"))]
for y in range(0, len(Circuit_Array)):
for x in range(0, len(Circuit_Array[0])):
exec("Circuit_Array[y][x] = " + Circuit_Array[y][x])
Circuit = copy.deepcopy(Circuit_Array)
Row = len(Circuit)-1
Col = len(Circuit[0])-1
Integers = []
Input_List = []
for y in range(0, len(Circuit)):
for x in range(0, len(Circuit[0])):
if(type(Circuit[y][x]) == Input):
Circuit[y][x].Position = (x,y)
Input_List.append(Circuit[y][x])
def BoolMove(Tile, Direction):
if(Tile.canPass()):
return(Direction)
else:
return("")
def GetDirection(Position, Direction):
X, Y = Position
if(Direction == "N"):
X, Y = X, Y-1
if(Direction == "E"):
X, Y = X+1, Y
if(Direction == "S"):
X, Y = X, Y+1
if(Direction == "W"):
X, Y = X-1, Y
return((X, Y))
def FindOutput(Input, CurrentPosition, Directions, Map, Length, Path, Globals):
X, Y = CurrentPosition
while(True):
if len(Directions) >= 2:
for Direction in Directions:
FindOutput(Input, (X,Y), Direction, copy.deepcopy(Map), Length, copy.deepcopy(Path), Globals)
return
Map[Y][X] = CP0
if( Globals[Y][X].canContinue(Input) ):
pass
else:
Integers.append([0, Input.sym(), Length, Path])
return
if(len(Directions) > 0):
Path.append(Directions)
X, Y = GetDirection((X,Y), Directions)
if( type(Globals[Y][X]) == Output):
Integers.append([1, Input.sym(), Length, Path])
return
Directions = ""
if(Y-1 >= 0):
Directions += BoolMove(Map[Y-1][X], "N")
if(X+1 <= Col):
Directions += BoolMove(Map[Y][X+1], "E")
if(Y+1 <= Row):
Directions += BoolMove(Map[Y+1][X], "S")
if(X-1 >= 0):
Directions += BoolMove(Map[Y][X-1], "W")
if len(Directions) == 0:
Integers.append([0, Input.sym(), Length, Path])
return
Length += 1
Input_List.sort(key = Input.csym)
for I in Input_List:
FindOutput(I, I.Position, "", copy.deepcopy(Circuit), 0, [], Circuit_Array)
return(Integers)
EmulatedCircuit = RunCircuit("T01.txt")
for line in EmulatedCircuit:
print(line)
"""
C * ( (A*B) + (a*B) )
C * ( (A*b) + a )
A * ( (B*c) + (b*C) + (a*B) ) * B
C * ( (B*C*a) + (a * (B+C)) )
A - 835
Simplying circuit
ab + aB + Ab
a*(B+b) + Ab
a*(1) + Ab
a + Ab
(A+aB)*(B+bA)
(A*B) + (A*bA) + (aB * B) + (aB*bA)
AB + Ab + aB + aBbA (Switches can't be on and off at the same time so we get rid of aBbA)
AB + Ab + aB + 0
AB + Ab + aB
A*(B+b) + aB (We simplify the equation now by grouping like terms)
A(B+b) + aB (and again; Switches can't be on and off at the same time so we get rid of Bb)
A + aB (and we're left with this)
ABc + ABC + aBC
AB(c+C) + aBC = (ABc + ABC + aBC, but simplified)
AB(1) + aBC (Adding a switch's opposite to itself is equal to '1')
AB + aBC (A switch multiplied by 1 is equal to itself)
B(A + aC)
abC + aBC + AbC + ABC
bC(Aa) + BC(Aa)
bC(1) + BC(1)
bC + BC
C(Bb) = bC + BC
C(1)
C
0
1
10
11
100
101
110
111
1000
1001
1010
1011
1100
1101
1110
1111
Ac + a(B+C) + AB(C+b)
Ac + aB + aC + ABC + ABb
Ac + aB + aC + ABC + A(0) ( A switch multiplied by its opposite is equal to '0')
Ac + aB + aC + ABC
A(c+BC) + aB + aC (Rule 17: A + aB = A+B)
A(c+B) + aB + aC
Ac + AB + aB + aC
Ac + B(A+a)
Ac + B + aC (Simplify until you have a set of unique variables)
AbC + AB(aC) + BC(bA)
AbC + ABa + ABC + BCb + BCA
AbC + 0*B + ABC + 0*C + ABC
AbC + ABC + ABC (ABC + ABC = ABC)
AbC + ABC
AC(b+B)
AC(1)
AC
HEM 11 46 105
835
1
ab + aB
a(b + B)
a
2
aB + AB + ab
a(B+b) + AB
a + AB
3
ab + Ab + b(A+a)
ab + Ab + b(1)
ab +Ab + b
b(Aa) + b
b(1) + b
b + b
b
4
Ab + A(B+b) + AB
Ab + AB + Ab + AB
Ab + Ab = Ab
AB + AB = AB
Ab + AB
A(Bb)
A
5
(A+AB)*(B+BA)
(AB) + (A*AB) + (AB*B) + (AB*AB)
AB + (A*A)B + A(B*B) + AB
AB + A(B) + A(B) + AB
AB
6
abC + aBC + AbC
bC(a + A) + aBC
bC(a + A) + aBC
bC(1) + aBC
bC + aBC
C(b + aB)
7
Abc + ABC + aBC
Abc + BC(A+a)
Abc + BC
8
abc + aBC + Abc
bc(a+A) + aBC
bc + aBC
9
abc + abC + Abc + AbC
ab(c+C) + Ab(c+C)
ab + Ab
b(A+a)
b
10
AbC + ABC + ABc + aBc
AbC + ABC + Bc(A+a)
AbC + ABC + Bc
AC(b+B) + Bc
AC + Bc
11
C(AB+Ab) + c(ab+aB)
ABC + AbC + abc + aBc
AC(B+b) + ac(b+B)
AC + ac
12
c(ab + AB + Ab) + A(BC + bC)
abc + ABc + Abc + ABC + AbC
abc + A(Bc + bC) + A(bc+BC)
abc + A + A
abc + A -shallow simplification
c(ab + AB + Ab) + A(BC+ bC)
abc + ABc + Abc + ABC + AbC
bc(a+A) ABc + ABC + AbC
bc + ABc + ABC + AbC
bc + AB(c+C) + AbC
bc + AB + AbC
b(c + AC) + AB
b(c+A) + AB
bc + Ab + AB
bc + A(b+B)
bc + A -deeper simplification
A + bc
AbC * aBc
11.4 106 De Morgan's laws
____ __
(Ab+C)*(a+Bc)
t1: (a+B)*c = ac + Bc
t2: a + (b+C) = a + b + C
(ac+Bc)*(a+b+C)
aac + abc + acC + aBc + Bbc + BcC
ac + abc + 0 + aBc + 0 + 0
ac + abc + aBc
ac(B+b) + ac
ac + ac
ac
__ ___
(aB)+(a+B)
(A+b)+A*b
A+Ab+b (A+AB) = A, regardless of any of the variable's states.
A+b
HEM 11.4 E47 107
1
__
(ab)*(aB)
(ab)*(A+b)
Aab + abb
0 + ab
ab
2 __ __
(A+BC)+(AB+C) = a+b+C
((A+b)*c) + (a+b+C)
Ac+bc+a+b+C
(a+Ac)+(b+bc)+C
a+b+C
3
_____ __
(aB+Bc)*(Ab)
((A+b)*(b+C))*(a+B)
(Ab+AC+bb+bC)*(a+B)
Aab+ABb+AaC+ABC+abb+Bbb+abC+BbC
0+0+0+ABC+ab+0+abC+0
ABC+ab+abC (ab+abC = ab ???)
ABC + ab
4
__ __ __
(Ab+Bc)+(aB)
(a+B+b+C)+(A+b)
a+B+b+C+A+b
(A+a)+(B+b)+C
1 + 1 + C
(C+1) + 1
1 + 1 = 1 ???
5
__ __ __
(Ab+aC)*(aBC) = a(b+c)
(a+B+A+c)*(a*(b+c))
(a+B+A+c)*(ab+ac)
aab+aac+aBb+aBc+Aab+Aac+abc+acc
ab+ac+0+aBc+0+0+abc+ac
ab+ac+aBc+abc+ac
(ac+ac)+(ab+aBc)+(ac+acb)
ac+ab+ac
ac+ab
a(b+c)
"""
| mit |
PrashntS/scikit-learn | examples/calibration/plot_compare_calibration.py | 241 | 5008 | """
========================================
Comparison of Calibration of Classifiers
========================================
Well calibrated classifiers are probabilistic classifiers for which the output
of the predict_proba method can be directly interpreted as a confidence level.
For instance a well calibrated (binary) classifier should classify the samples
such that among the samples to which it gave a predict_proba value close to
0.8, approx. 80% actually belong to the positive class.
LogisticRegression returns well calibrated predictions as it directly
optimizes log-loss. In contrast, the other methods return biased probilities,
with different biases per method:
* GaussianNaiveBayes tends to push probabilties to 0 or 1 (note the counts in
the histograms). This is mainly because it makes the assumption that features
are conditionally independent given the class, which is not the case in this
dataset which contains 2 redundant features.
* RandomForestClassifier shows the opposite behavior: the histograms show
peaks at approx. 0.2 and 0.9 probability, while probabilities close to 0 or 1
are very rare. An explanation for this is given by Niculescu-Mizil and Caruana
[1]: "Methods such as bagging and random forests that average predictions from
a base set of models can have difficulty making predictions near 0 and 1
because variance in the underlying base models will bias predictions that
should be near zero or one away from these values. Because predictions are
restricted to the interval [0,1], errors caused by variance tend to be one-
sided near zero and one. For example, if a model should predict p = 0 for a
case, the only way bagging can achieve this is if all bagged trees predict
zero. If we add noise to the trees that bagging is averaging over, this noise
will cause some trees to predict values larger than 0 for this case, thus
moving the average prediction of the bagged ensemble away from 0. We observe
this effect most strongly with random forests because the base-level trees
trained with random forests have relatively high variance due to feature
subseting." As a result, the calibration curve shows a characteristic sigmoid
shape, indicating that the classifier could trust its "intuition" more and
return probabilties closer to 0 or 1 typically.
* Support Vector Classification (SVC) shows an even more sigmoid curve as
the RandomForestClassifier, which is typical for maximum-margin methods
(compare Niculescu-Mizil and Caruana [1]), which focus on hard samples
that are close to the decision boundary (the support vectors).
.. topic:: References:
.. [1] Predicting Good Probabilities with Supervised Learning,
A. Niculescu-Mizil & R. Caruana, ICML 2005
"""
print(__doc__)
# Author: Jan Hendrik Metzen <[email protected]>
# License: BSD Style.
import numpy as np
np.random.seed(0)
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import LinearSVC
from sklearn.calibration import calibration_curve
X, y = datasets.make_classification(n_samples=100000, n_features=20,
n_informative=2, n_redundant=2)
train_samples = 100 # Samples used for training the models
X_train = X[:train_samples]
X_test = X[train_samples:]
y_train = y[:train_samples]
y_test = y[train_samples:]
# Create classifiers
lr = LogisticRegression()
gnb = GaussianNB()
svc = LinearSVC(C=1.0)
rfc = RandomForestClassifier(n_estimators=100)
###############################################################################
# Plot calibration plots
plt.figure(figsize=(10, 10))
ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2)
ax2 = plt.subplot2grid((3, 1), (2, 0))
ax1.plot([0, 1], [0, 1], "k:", label="Perfectly calibrated")
for clf, name in [(lr, 'Logistic'),
(gnb, 'Naive Bayes'),
(svc, 'Support Vector Classification'),
(rfc, 'Random Forest')]:
clf.fit(X_train, y_train)
if hasattr(clf, "predict_proba"):
prob_pos = clf.predict_proba(X_test)[:, 1]
else: # use decision function
prob_pos = clf.decision_function(X_test)
prob_pos = \
(prob_pos - prob_pos.min()) / (prob_pos.max() - prob_pos.min())
fraction_of_positives, mean_predicted_value = \
calibration_curve(y_test, prob_pos, n_bins=10)
ax1.plot(mean_predicted_value, fraction_of_positives, "s-",
label="%s" % (name, ))
ax2.hist(prob_pos, range=(0, 1), bins=10, label=name,
histtype="step", lw=2)
ax1.set_ylabel("Fraction of positives")
ax1.set_ylim([-0.05, 1.05])
ax1.legend(loc="lower right")
ax1.set_title('Calibration plots (reliability curve)')
ax2.set_xlabel("Mean predicted value")
ax2.set_ylabel("Count")
ax2.legend(loc="upper center", ncol=2)
plt.tight_layout()
plt.show()
| bsd-3-clause |
Ophiuchus1312/enigma2-master | lib/python/Tools/Downloader.py | 3 | 1714 | from twisted.web import client
from twisted.internet import reactor, defer
from twisted.python import failure
class HTTPProgressDownloader(client.HTTPDownloader):
def __init__(self, url, outfile, headers=None):
client.HTTPDownloader.__init__(self, url, outfile, headers=headers, agent="STB_BOX HTTP Downloader")
self.status = None
self.progress_callback = None
self.deferred = defer.Deferred()
def noPage(self, reason):
if self.status == "304":
print reason.getErrorMessage()
client.HTTPDownloader.page(self, "")
else:
client.HTTPDownloader.noPage(self, reason)
def gotHeaders(self, headers):
if self.status == "200":
if headers.has_key("content-length"):
self.totalbytes = int(headers["content-length"][0])
else:
self.totalbytes = 0
self.currentbytes = 0.0
return client.HTTPDownloader.gotHeaders(self, headers)
def pagePart(self, packet):
if self.status == "200":
self.currentbytes += len(packet)
if self.totalbytes and self.progress_callback:
self.progress_callback(self.currentbytes, self.totalbytes)
return client.HTTPDownloader.pagePart(self, packet)
def pageEnd(self):
return client.HTTPDownloader.pageEnd(self)
class downloadWithProgress:
def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs):
scheme, host, port, path = client._parse(url)
self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs)
self.connection = reactor.connectTCP(host, port, self.factory)
def start(self):
return self.factory.deferred
def stop(self):
print "[stop]"
self.connection.disconnect()
def addProgress(self, progress_callback):
print "[addProgress]"
self.factory.progress_callback = progress_callback
| gpl-2.0 |
pbaesse/Sissens | lib/python2.7/site-packages/eventlet/support/dns/query.py | 2 | 22949 | # Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Talk to a DNS server."""
from __future__ import generators
import errno
import select
import socket
import struct
import sys
import time
import dns.exception
import dns.inet
import dns.name
import dns.message
import dns.rcode
import dns.rdataclass
import dns.rdatatype
from ._compat import long, string_types
if sys.version_info > (3,):
select_error = OSError
else:
select_error = select.error
# Function used to create a socket. Can be overridden if needed in special
# situations.
socket_factory = socket.socket
class UnexpectedSource(dns.exception.DNSException):
"""A DNS query response came from an unexpected address or port."""
class BadResponse(dns.exception.FormError):
"""A DNS query response does not respond to the question asked."""
class TransferError(dns.exception.DNSException):
"""A zone transfer response got a non-zero rcode."""
def __init__(self, rcode):
message = 'Zone transfer error: %s' % dns.rcode.to_text(rcode)
super(TransferError, self).__init__(message)
self.rcode = rcode
def _compute_expiration(timeout):
if timeout is None:
return None
else:
return time.time() + timeout
# This module can use either poll() or select() as the "polling backend".
#
# A backend function takes an fd, bools for readability, writablity, and
# error detection, and a timeout.
def _poll_for(fd, readable, writable, error, timeout):
"""Poll polling backend."""
event_mask = 0
if readable:
event_mask |= select.POLLIN
if writable:
event_mask |= select.POLLOUT
if error:
event_mask |= select.POLLERR
pollable = select.poll()
pollable.register(fd, event_mask)
if timeout:
event_list = pollable.poll(long(timeout * 1000))
else:
event_list = pollable.poll()
return bool(event_list)
def _select_for(fd, readable, writable, error, timeout):
"""Select polling backend."""
rset, wset, xset = [], [], []
if readable:
rset = [fd]
if writable:
wset = [fd]
if error:
xset = [fd]
if timeout is None:
(rcount, wcount, xcount) = select.select(rset, wset, xset)
else:
(rcount, wcount, xcount) = select.select(rset, wset, xset, timeout)
return bool((rcount or wcount or xcount))
def _wait_for(fd, readable, writable, error, expiration):
# Use the selected polling backend to wait for any of the specified
# events. An "expiration" absolute time is converted into a relative
# timeout.
done = False
while not done:
if expiration is None:
timeout = None
else:
timeout = expiration - time.time()
if timeout <= 0.0:
raise dns.exception.Timeout
try:
if not _polling_backend(fd, readable, writable, error, timeout):
raise dns.exception.Timeout
except select_error as e:
if e.args[0] != errno.EINTR:
raise e
done = True
def _set_polling_backend(fn):
# Internal API. Do not use.
global _polling_backend
_polling_backend = fn
if hasattr(select, 'poll'):
# Prefer poll() on platforms that support it because it has no
# limits on the maximum value of a file descriptor (plus it will
# be more efficient for high values).
_polling_backend = _poll_for
else:
_polling_backend = _select_for
def _wait_for_readable(s, expiration):
_wait_for(s, True, False, True, expiration)
def _wait_for_writable(s, expiration):
_wait_for(s, False, True, True, expiration)
def _addresses_equal(af, a1, a2):
# Convert the first value of the tuple, which is a textual format
# address into binary form, so that we are not confused by different
# textual representations of the same address
try:
n1 = dns.inet.inet_pton(af, a1[0])
n2 = dns.inet.inet_pton(af, a2[0])
except dns.exception.SyntaxError:
return False
return n1 == n2 and a1[1:] == a2[1:]
def _destination_and_source(af, where, port, source, source_port):
# Apply defaults and compute destination and source tuples
# suitable for use in connect(), sendto(), or bind().
if af is None:
try:
af = dns.inet.af_for_address(where)
except Exception:
af = dns.inet.AF_INET
if af == dns.inet.AF_INET:
destination = (where, port)
if source is not None or source_port != 0:
if source is None:
source = '0.0.0.0'
source = (source, source_port)
elif af == dns.inet.AF_INET6:
destination = (where, port, 0, 0)
if source is not None or source_port != 0:
if source is None:
source = '::'
source = (source, source_port, 0, 0)
return (af, destination, source)
def send_udp(sock, what, destination, expiration=None):
"""Send a DNS message to the specified UDP socket.
*sock*, a ``socket``.
*what*, a ``binary`` or ``dns.message.Message``, the message to send.
*destination*, a destination tuple appropriate for the address family
of the socket, specifying where to send the query.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
Returns an ``(int, float)`` tuple of bytes sent and the sent time.
"""
if isinstance(what, dns.message.Message):
what = what.to_wire()
_wait_for_writable(sock, expiration)
sent_time = time.time()
n = sock.sendto(what, destination)
return (n, sent_time)
def receive_udp(sock, destination, expiration=None,
ignore_unexpected=False, one_rr_per_rrset=False,
keyring=None, request_mac=b''):
"""Read a DNS message from a UDP socket.
*sock*, a ``socket``.
*destination*, a destination tuple appropriate for the address family
of the socket, specifying where the associated query was sent.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
unexpected sources.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*keyring*, a ``dict``, the keyring to use for TSIG.
*request_mac*, a ``binary``, the MAC of the request (for TSIG).
Raises if the message is malformed, if network errors occur, of if
there is a timeout.
Returns a ``dns.message.Message`` object.
"""
wire = b''
while 1:
_wait_for_readable(sock, expiration)
(wire, from_address) = sock.recvfrom(65535)
if _addresses_equal(sock.family, from_address, destination) or \
(dns.inet.is_multicast(destination[0]) and
from_address[1:] == destination[1:]):
break
if not ignore_unexpected:
raise UnexpectedSource('got a response from '
'%s instead of %s' % (from_address,
destination))
received_time = time.time()
r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac,
one_rr_per_rrset=one_rr_per_rrset)
return (r, received_time)
def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
ignore_unexpected=False, one_rr_per_rrset=False):
"""Return the response obtained after sending a query via UDP.
*q*, a ``dns.message.message``, the query to send
*where*, a ``text`` containing an IPv4 or IPv6 address, where
to send the message.
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
query times out. If ``None``, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
unexpected sources.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
Returns a ``dns.message.Message``.
"""
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
s = socket_factory(af, socket.SOCK_DGRAM, 0)
received_time = None
sent_time = None
try:
expiration = _compute_expiration(timeout)
s.setblocking(0)
if source is not None:
s.bind(source)
(_, sent_time) = send_udp(s, wire, destination, expiration)
(r, received_time) = receive_udp(s, destination, expiration,
ignore_unexpected, one_rr_per_rrset,
q.keyring, q.mac)
finally:
if sent_time is None or received_time is None:
response_time = 0
else:
response_time = received_time - sent_time
s.close()
r.time = response_time
if not q.is_response(r):
raise BadResponse
return r
def _net_read(sock, count, expiration):
"""Read the specified number of bytes from sock. Keep trying until we
either get the desired amount, or we hit EOF.
A Timeout exception will be raised if the operation is not completed
by the expiration time.
"""
s = b''
while count > 0:
_wait_for_readable(sock, expiration)
n = sock.recv(count)
if n == b'':
raise EOFError
count = count - len(n)
s = s + n
return s
def _net_write(sock, data, expiration):
"""Write the specified data to the socket.
A Timeout exception will be raised if the operation is not completed
by the expiration time.
"""
current = 0
l = len(data)
while current < l:
_wait_for_writable(sock, expiration)
current += sock.send(data[current:])
def send_tcp(sock, what, expiration=None):
"""Send a DNS message to the specified TCP socket.
*sock*, a ``socket``.
*what*, a ``binary`` or ``dns.message.Message``, the message to send.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
Returns an ``(int, float)`` tuple of bytes sent and the sent time.
"""
if isinstance(what, dns.message.Message):
what = what.to_wire()
l = len(what)
# copying the wire into tcpmsg is inefficient, but lets us
# avoid writev() or doing a short write that would get pushed
# onto the net
tcpmsg = struct.pack("!H", l) + what
_wait_for_writable(sock, expiration)
sent_time = time.time()
_net_write(sock, tcpmsg, expiration)
return (len(tcpmsg), sent_time)
def receive_tcp(sock, expiration=None, one_rr_per_rrset=False,
keyring=None, request_mac=b''):
"""Read a DNS message from a TCP socket.
*sock*, a ``socket``.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*keyring*, a ``dict``, the keyring to use for TSIG.
*request_mac*, a ``binary``, the MAC of the request (for TSIG).
Raises if the message is malformed, if network errors occur, of if
there is a timeout.
Returns a ``dns.message.Message`` object.
"""
ldata = _net_read(sock, 2, expiration)
(l,) = struct.unpack("!H", ldata)
wire = _net_read(sock, l, expiration)
received_time = time.time()
r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac,
one_rr_per_rrset=one_rr_per_rrset)
return (r, received_time)
def _connect(s, address):
try:
s.connect(address)
except socket.error:
(ty, v) = sys.exc_info()[:2]
if hasattr(v, 'errno'):
v_err = v.errno
else:
v_err = v[0]
if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]:
raise v
def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
one_rr_per_rrset=False):
"""Return the response obtained after sending a query via TCP.
*q*, a ``dns.message.message``, the query to send
*where*, a ``text`` containing an IPv4 or IPv6 address, where
to send the message.
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
query times out. If ``None``, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
Returns a ``dns.message.Message``.
"""
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
s = socket_factory(af, socket.SOCK_STREAM, 0)
begin_time = None
received_time = None
try:
expiration = _compute_expiration(timeout)
s.setblocking(0)
begin_time = time.time()
if source is not None:
s.bind(source)
_connect(s, destination)
send_tcp(s, wire, expiration)
(r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset,
q.keyring, q.mac)
finally:
if begin_time is None or received_time is None:
response_time = 0
else:
response_time = received_time - begin_time
s.close()
r.time = response_time
if not q.is_response(r):
raise BadResponse
return r
def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN,
timeout=None, port=53, keyring=None, keyname=None, relativize=True,
af=None, lifetime=None, source=None, source_port=0, serial=0,
use_udp=False, keyalgorithm=dns.tsig.default_algorithm):
"""Return a generator for the responses to a zone transfer.
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*zone*, a ``dns.name.Name`` or ``text``, the name of the zone to transfer.
*rdtype*, an ``int`` or ``text``, the type of zone transfer. The
default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be
used to do an incremental transfer instead.
*rdclass*, an ``int`` or ``text``, the class of the zone transfer.
The default is ``dns.rdataclass.IN``.
*timeout*, a ``float``, the number of seconds to wait for each
response message. If None, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*keyring*, a ``dict``, the keyring to use for TSIG.
*keyname*, a ``dns.name.Name`` or ``text``, the name of the TSIG
key to use.
*relativize*, a ``bool``. If ``True``, all names in the zone will be
relativized to the zone origin. It is essential that the
relativize setting matches the one specified to
``dns.zone.from_xfr()`` if using this generator to make a zone.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*lifetime*, a ``float``, the total number of seconds to spend
doing the transfer. If ``None``, the default, then there is no
limit on the time the transfer may take.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*serial*, an ``int``, the SOA serial number to use as the base for
an IXFR diff sequence (only meaningful if *rdtype* is
``dns.rdatatype.IXFR``).
*use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR).
*keyalgorithm*, a ``dns.name.Name`` or ``text``, the TSIG algorithm to use.
Raises on errors, and so does the generator.
Returns a generator of ``dns.message.Message`` objects.
"""
if isinstance(zone, string_types):
zone = dns.name.from_text(zone)
if isinstance(rdtype, string_types):
rdtype = dns.rdatatype.from_text(rdtype)
q = dns.message.make_query(zone, rdtype, rdclass)
if rdtype == dns.rdatatype.IXFR:
rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA',
'. . %u 0 0 0 0' % serial)
q.authority.append(rrset)
if keyring is not None:
q.use_tsig(keyring, keyname, algorithm=keyalgorithm)
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
if use_udp:
if rdtype != dns.rdatatype.IXFR:
raise ValueError('cannot do a UDP AXFR')
s = socket_factory(af, socket.SOCK_DGRAM, 0)
else:
s = socket_factory(af, socket.SOCK_STREAM, 0)
s.setblocking(0)
if source is not None:
s.bind(source)
expiration = _compute_expiration(lifetime)
_connect(s, destination)
l = len(wire)
if use_udp:
_wait_for_writable(s, expiration)
s.send(wire)
else:
tcpmsg = struct.pack("!H", l) + wire
_net_write(s, tcpmsg, expiration)
done = False
delete_mode = True
expecting_SOA = False
soa_rrset = None
if relativize:
origin = zone
oname = dns.name.empty
else:
origin = None
oname = zone
tsig_ctx = None
first = True
while not done:
mexpiration = _compute_expiration(timeout)
if mexpiration is None or mexpiration > expiration:
mexpiration = expiration
if use_udp:
_wait_for_readable(s, expiration)
(wire, from_address) = s.recvfrom(65535)
else:
ldata = _net_read(s, 2, mexpiration)
(l,) = struct.unpack("!H", ldata)
wire = _net_read(s, l, mexpiration)
is_ixfr = (rdtype == dns.rdatatype.IXFR)
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
xfr=True, origin=origin, tsig_ctx=tsig_ctx,
multi=True, first=first,
one_rr_per_rrset=is_ixfr)
rcode = r.rcode()
if rcode != dns.rcode.NOERROR:
raise TransferError(rcode)
tsig_ctx = r.tsig_ctx
first = False
answer_index = 0
if soa_rrset is None:
if not r.answer or r.answer[0].name != oname:
raise dns.exception.FormError(
"No answer or RRset not for qname")
rrset = r.answer[0]
if rrset.rdtype != dns.rdatatype.SOA:
raise dns.exception.FormError("first RRset is not an SOA")
answer_index = 1
soa_rrset = rrset.copy()
if rdtype == dns.rdatatype.IXFR:
if soa_rrset[0].serial <= serial:
#
# We're already up-to-date.
#
done = True
else:
expecting_SOA = True
#
# Process SOAs in the answer section (other than the initial
# SOA in the first message).
#
for rrset in r.answer[answer_index:]:
if done:
raise dns.exception.FormError("answers after final SOA")
if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname:
if expecting_SOA:
if rrset[0].serial != serial:
raise dns.exception.FormError(
"IXFR base serial mismatch")
expecting_SOA = False
elif rdtype == dns.rdatatype.IXFR:
delete_mode = not delete_mode
#
# If this SOA RRset is equal to the first we saw then we're
# finished. If this is an IXFR we also check that we're seeing
# the record in the expected part of the response.
#
if rrset == soa_rrset and \
(rdtype == dns.rdatatype.AXFR or
(rdtype == dns.rdatatype.IXFR and delete_mode)):
done = True
elif expecting_SOA:
#
# We made an IXFR request and are expecting another
# SOA RR, but saw something else, so this must be an
# AXFR response.
#
rdtype = dns.rdatatype.AXFR
expecting_SOA = False
if done and q.keyring and not r.had_tsig:
raise dns.exception.FormError("missing TSIG")
yield r
s.close()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.