repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
kyoren/https-github.com-h2oai-h2o-3 | refs/heads/master | py2/h2o_os_util.py | 30 |
import subprocess
import getpass
def kill_process_tree(pid, including_parent=True):
parent = psutil.Process(pid)
for child in parent.get_children(recursive=True):
child.kill()
if including_parent:
parent.kill()
def kill_child_processes():
me = os.getpid()
kill_process_tree(me, including_parent=False)
# since we hang if hosts has bad IP addresses, thought it'd be nice
# to have simple obvious feedback to user if he's running with -v
# and machines are down or his hosts definition has bad IPs.
# FIX! currently not used
def ping_host_if_verbose(host):
# if (h2o.verbose)
if 1==1:
username = getpass.getuser()
# if username=='jenkins' or username=='kevin' or username=='michal':
if username=='jenkins' or username=='kevin':
ping = subprocess.Popen( ["ping", "-c", "4", host])
ping.communicate()
def check_port_group(base_port):
# Only enable if useful for debug
if 1==1:
username = getpass.getuser()
# if username=='jenkins' or username=='kevin' or username=='michal':
if username=='jenkins':
# assumes you want to know about 3 ports starting at base_port
# can't use p, not root
command1Split = ['netstat', '-an']
command2Split = ['egrep']
# colon so only match ports. space at end? so no submatches
command2Split.append("(%s | %s)" % (base_port, base_port+1) )
command3Split = ['wc','-l']
print "Checking 2 ports starting at ", base_port
print ' '.join(command2Split)
# use netstat thru subprocess
p1 = subprocess.Popen(command1Split, stdout=subprocess.PIPE)
p2 = subprocess.Popen(command2Split, stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0]
print output
# I suppose we should use psutil here. since everyone has it installed?
# and it should work on windows?
def show_h2o_processes():
# Only enable if useful for debug
if 1==0:
username = getpass.getuser()
h2oFound = False
users = set()
h2oUsers = set()
# if username=='jenkins' or username=='kevin' or username=='michal':
if username=='jenkins' or username=='kevin':
import psutil
# print "get_users:", psutil.get_users()
print "total physical dram:" , (psutil.TOTAL_PHYMEM+0)/(1024*1024), "GB"
print "max cpu threads:", psutil.NUM_CPUS
print "\nReporting on h2o"
users = set()
h2oUsers = set()
h2oFound = False
for p in psutil.process_iter():
h2oProcess = False
# hack.
# psutil 2.x needs function reference
# psutil 1.x needs object reference
if hasattr(p.name, '__call__'):
pname = p.name()
pcmdline = p.cmdline()
# the user name might be uknown here, due to LXC?
try:
pusername = p.username()
except:
pusername = "Unknown-maybe-LXC-user"
pstatus = p.status()
else:
pname = p.name
pcmdline = p.cmdline
try:
pusername = p.username
except:
pusername = "Unknown-maybe-LXC-user"
pstatus = p.status
if hasattr(p.pid, '__call__'):
ppid = p.pid()
else:
ppid = p.pid
if 'java' in pname:
users.add(pusername)
# now iterate through the cmdline, to see if it's got 'h2o
for c in pcmdline:
if 'h2o' in c:
h2oProcess = True
h2oUsers.add(pusername)
break
if h2oProcess:
h2oFound = True
print "\n#**********************************************"
print p
# process could disappear while we're looking? (fast h2o version java process?)
try:
print "pid:", ppid
print "cmdline:", pcmdline
# AccessDenied problem?
# print p.getcwd()
print "status:", pstatus
print "username:", pusername
print "cpu_percent:", p.get_cpu_percent(interval=1.0)
print "memory_percent:", p.get_memory_percent()
print p.get_memory_info()
# AccessDenied problem
# print p.get_io_counters()
# AccessDenied problem
# p.get_open_files()
# AccessDenied problem
# print p.get_connections()
except:
pass
if h2oFound:
print "\n\n#**********************************************************************************************"
else:
print "No h2o processes found."
print "\nusers running java:", list(users)
print "users running h2o java:", list(h2oUsers)
|
puttarajubr/commcare-hq | refs/heads/master | corehq/apps/export/tests/__init__.py | 2 | from corehq.apps.export.tests.test_form_schema import *
|
ngonzalvez/sentry | refs/heads/master | src/sentry/middleware/env.py | 12 | from __future__ import absolute_import
from django.conf import settings
from django.core.urlresolvers import reverse
from sentry.app import env
class SentryEnvMiddleware(object):
def process_request(self, request):
# HACK: bootstrap some env crud if we haven't yet
if not settings.SENTRY_URL_PREFIX:
settings.SENTRY_URL_PREFIX = request.build_absolute_uri(reverse('sentry')).strip('/')
# bind request to env
env.request = request
|
xindus40223115/w16b_test | refs/heads/master | static/Brython3.1.3-20150514-095342/Lib/unittest/test/test_discovery.py | 785 | import os
import re
import sys
import unittest
class TestableTestProgram(unittest.TestProgram):
module = '__main__'
exit = True
defaultTest = failfast = catchbreak = buffer = None
verbosity = 1
progName = ''
testRunner = testLoader = None
def __init__(self):
pass
class TestDiscovery(unittest.TestCase):
# Heavily mocked tests so I can avoid hitting the filesystem
def test_get_name_from_path(self):
loader = unittest.TestLoader()
loader._top_level_dir = '/foo'
name = loader._get_name_from_path('/foo/bar/baz.py')
self.assertEqual(name, 'bar.baz')
if not __debug__:
# asserts are off
return
with self.assertRaises(AssertionError):
loader._get_name_from_path('/bar/baz.py')
def test_find_tests(self):
loader = unittest.TestLoader()
original_listdir = os.listdir
def restore_listdir():
os.listdir = original_listdir
original_isfile = os.path.isfile
def restore_isfile():
os.path.isfile = original_isfile
original_isdir = os.path.isdir
def restore_isdir():
os.path.isdir = original_isdir
path_lists = [['test1.py', 'test2.py', 'not_a_test.py', 'test_dir',
'test.foo', 'test-not-a-module.py', 'another_dir'],
['test3.py', 'test4.py', ]]
os.listdir = lambda path: path_lists.pop(0)
self.addCleanup(restore_listdir)
def isdir(path):
return path.endswith('dir')
os.path.isdir = isdir
self.addCleanup(restore_isdir)
def isfile(path):
# another_dir is not a package and so shouldn't be recursed into
return not path.endswith('dir') and not 'another_dir' in path
os.path.isfile = isfile
self.addCleanup(restore_isfile)
loader._get_module_from_name = lambda path: path + ' module'
loader.loadTestsFromModule = lambda module: module + ' tests'
top_level = os.path.abspath('/foo')
loader._top_level_dir = top_level
suite = list(loader._find_tests(top_level, 'test*.py'))
expected = [name + ' module tests' for name in
('test1', 'test2')]
expected.extend([('test_dir.%s' % name) + ' module tests' for name in
('test3', 'test4')])
self.assertEqual(suite, expected)
def test_find_tests_with_package(self):
loader = unittest.TestLoader()
original_listdir = os.listdir
def restore_listdir():
os.listdir = original_listdir
original_isfile = os.path.isfile
def restore_isfile():
os.path.isfile = original_isfile
original_isdir = os.path.isdir
def restore_isdir():
os.path.isdir = original_isdir
directories = ['a_directory', 'test_directory', 'test_directory2']
path_lists = [directories, [], [], []]
os.listdir = lambda path: path_lists.pop(0)
self.addCleanup(restore_listdir)
os.path.isdir = lambda path: True
self.addCleanup(restore_isdir)
os.path.isfile = lambda path: os.path.basename(path) not in directories
self.addCleanup(restore_isfile)
class Module(object):
paths = []
load_tests_args = []
def __init__(self, path):
self.path = path
self.paths.append(path)
if os.path.basename(path) == 'test_directory':
def load_tests(loader, tests, pattern):
self.load_tests_args.append((loader, tests, pattern))
return 'load_tests'
self.load_tests = load_tests
def __eq__(self, other):
return self.path == other.path
loader._get_module_from_name = lambda name: Module(name)
def loadTestsFromModule(module, use_load_tests):
if use_load_tests:
raise self.failureException('use_load_tests should be False for packages')
return module.path + ' module tests'
loader.loadTestsFromModule = loadTestsFromModule
loader._top_level_dir = '/foo'
# this time no '.py' on the pattern so that it can match
# a test package
suite = list(loader._find_tests('/foo', 'test*'))
# We should have loaded tests from the test_directory package by calling load_tests
# and directly from the test_directory2 package
self.assertEqual(suite,
['load_tests', 'test_directory2' + ' module tests'])
self.assertEqual(Module.paths, ['test_directory', 'test_directory2'])
# load_tests should have been called once with loader, tests and pattern
self.assertEqual(Module.load_tests_args,
[(loader, 'test_directory' + ' module tests', 'test*')])
def test_discover(self):
loader = unittest.TestLoader()
original_isfile = os.path.isfile
original_isdir = os.path.isdir
def restore_isfile():
os.path.isfile = original_isfile
os.path.isfile = lambda path: False
self.addCleanup(restore_isfile)
orig_sys_path = sys.path[:]
def restore_path():
sys.path[:] = orig_sys_path
self.addCleanup(restore_path)
full_path = os.path.abspath(os.path.normpath('/foo'))
with self.assertRaises(ImportError):
loader.discover('/foo/bar', top_level_dir='/foo')
self.assertEqual(loader._top_level_dir, full_path)
self.assertIn(full_path, sys.path)
os.path.isfile = lambda path: True
os.path.isdir = lambda path: True
def restore_isdir():
os.path.isdir = original_isdir
self.addCleanup(restore_isdir)
_find_tests_args = []
def _find_tests(start_dir, pattern):
_find_tests_args.append((start_dir, pattern))
return ['tests']
loader._find_tests = _find_tests
loader.suiteClass = str
suite = loader.discover('/foo/bar/baz', 'pattern', '/foo/bar')
top_level_dir = os.path.abspath('/foo/bar')
start_dir = os.path.abspath('/foo/bar/baz')
self.assertEqual(suite, "['tests']")
self.assertEqual(loader._top_level_dir, top_level_dir)
self.assertEqual(_find_tests_args, [(start_dir, 'pattern')])
self.assertIn(top_level_dir, sys.path)
def test_discover_with_modules_that_fail_to_import(self):
loader = unittest.TestLoader()
listdir = os.listdir
os.listdir = lambda _: ['test_this_does_not_exist.py']
isfile = os.path.isfile
os.path.isfile = lambda _: True
orig_sys_path = sys.path[:]
def restore():
os.path.isfile = isfile
os.listdir = listdir
sys.path[:] = orig_sys_path
self.addCleanup(restore)
suite = loader.discover('.')
self.assertIn(os.getcwd(), sys.path)
self.assertEqual(suite.countTestCases(), 1)
test = list(list(suite)[0])[0] # extract test from suite
with self.assertRaises(ImportError):
test.test_this_does_not_exist()
def test_command_line_handling_parseArgs(self):
program = TestableTestProgram()
args = []
def do_discovery(argv):
args.extend(argv)
program._do_discovery = do_discovery
program.parseArgs(['something', 'discover'])
self.assertEqual(args, [])
program.parseArgs(['something', 'discover', 'foo', 'bar'])
self.assertEqual(args, ['foo', 'bar'])
def test_command_line_handling_discover_by_default(self):
program = TestableTestProgram()
program.module = None
self.called = False
def do_discovery(argv):
self.called = True
self.assertEqual(argv, [])
program._do_discovery = do_discovery
program.parseArgs(['something'])
self.assertTrue(self.called)
def test_command_line_handling_discover_by_default_with_options(self):
program = TestableTestProgram()
program.module = None
args = ['something', '-v', '-b', '-v', '-c', '-f']
self.called = False
def do_discovery(argv):
self.called = True
self.assertEqual(argv, args[1:])
program._do_discovery = do_discovery
program.parseArgs(args)
self.assertTrue(self.called)
def test_command_line_handling_do_discovery_too_many_arguments(self):
class Stop(Exception):
pass
def usageExit():
raise Stop
program = TestableTestProgram()
program.usageExit = usageExit
with self.assertRaises(Stop):
# too many args
program._do_discovery(['one', 'two', 'three', 'four'])
def test_command_line_handling_do_discovery_calls_loader(self):
program = TestableTestProgram()
class Loader(object):
args = []
def discover(self, start_dir, pattern, top_level_dir):
self.args.append((start_dir, pattern, top_level_dir))
return 'tests'
program._do_discovery(['-v'], Loader=Loader)
self.assertEqual(program.verbosity, 2)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('.', 'test*.py', None)])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['--verbose'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('.', 'test*.py', None)])
Loader.args = []
program = TestableTestProgram()
program._do_discovery([], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('.', 'test*.py', None)])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['fish'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('fish', 'test*.py', None)])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['fish', 'eggs'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('fish', 'eggs', None)])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['fish', 'eggs', 'ham'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('fish', 'eggs', 'ham')])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['-s', 'fish'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('fish', 'test*.py', None)])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['-t', 'fish'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('.', 'test*.py', 'fish')])
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['-p', 'fish'], Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('.', 'fish', None)])
self.assertFalse(program.failfast)
self.assertFalse(program.catchbreak)
Loader.args = []
program = TestableTestProgram()
program._do_discovery(['-p', 'eggs', '-s', 'fish', '-v', '-f', '-c'],
Loader=Loader)
self.assertEqual(program.test, 'tests')
self.assertEqual(Loader.args, [('fish', 'eggs', None)])
self.assertEqual(program.verbosity, 2)
self.assertTrue(program.failfast)
self.assertTrue(program.catchbreak)
def test_detect_module_clash(self):
class Module(object):
__file__ = 'bar/foo.py'
sys.modules['foo'] = Module
full_path = os.path.abspath('foo')
original_listdir = os.listdir
original_isfile = os.path.isfile
original_isdir = os.path.isdir
def cleanup():
os.listdir = original_listdir
os.path.isfile = original_isfile
os.path.isdir = original_isdir
del sys.modules['foo']
if full_path in sys.path:
sys.path.remove(full_path)
self.addCleanup(cleanup)
def listdir(_):
return ['foo.py']
def isfile(_):
return True
def isdir(_):
return True
os.listdir = listdir
os.path.isfile = isfile
os.path.isdir = isdir
loader = unittest.TestLoader()
mod_dir = os.path.abspath('bar')
expected_dir = os.path.abspath('foo')
msg = re.escape(r"'foo' module incorrectly imported from %r. Expected %r. "
"Is this module globally installed?" % (mod_dir, expected_dir))
self.assertRaisesRegex(
ImportError, '^%s$' % msg, loader.discover,
start_dir='foo', pattern='foo.py'
)
self.assertEqual(sys.path[0], full_path)
def test_discovery_from_dotted_path(self):
loader = unittest.TestLoader()
tests = [self]
expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__))
self.wasRun = False
def _find_tests(start_dir, pattern):
self.wasRun = True
self.assertEqual(start_dir, expectedPath)
return tests
loader._find_tests = _find_tests
suite = loader.discover('unittest.test')
self.assertTrue(self.wasRun)
self.assertEqual(suite._tests, tests)
if __name__ == '__main__':
unittest.main()
|
Lekanich/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/contrib/auth/tests/views.py | 71 | import os
import re
import urllib
from django.conf import settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.test import TestCase
from django.core import mail
from django.core.urlresolvers import reverse
from django.http import QueryDict
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
self.old_LANGUAGES = settings.LANGUAGES
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
settings.LANGUAGES = (('en', 'English'),)
settings.LANGUAGE_CODE = 'en'
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
def tearDown(self):
settings.LANGUAGES = self.old_LANGUAGES
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assert_(SESSION_KEY in self.client.session)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEquals(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertContains(response, "That e-mail address doesn't have an associated user account")
self.assertEquals(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
self.assert_("http://" in mail.outbox[0].body)
self.assertEquals(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
self.assertEquals("[email protected]", mail.outbox[0].from_email)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assert_(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertEquals(response.status_code, 200)
self.assert_("Please enter your new password" in response.content)
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' anewpassword'})
# Check the password has not been changed
u = User.objects.get(email='[email protected]')
self.assert_(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEquals(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='[email protected]')
self.assert_(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' x'})
self.assertEquals(response.status_code, 200)
self.assert_("The two password fields didn't match" in response.content)
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 200)
self.assert_("Please enter a correct username and password. Note that both fields are case-sensitive." in response.content)
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEquals(response.status_code, 200)
self.assert_("Your old password was entered incorrectly. Please enter it again." in response.content)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
}
)
self.assertEquals(response.status_code, 200)
self.assert_("The two password fields didn't match." in response.content)
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEquals(response.status_code, 200)
site = Site.objects.get_current()
self.assertEquals(response.context['site'], site)
self.assertEquals(response.context['site_name'], site.name)
self.assert_(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEquals(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/'):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url)
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEquals(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
class LoginURLSettings(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEquals(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url,
'http://testserver%s?%s' % (login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
class LogoutTest(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def confirm_logged_out(self):
self.assert_(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertEquals(200, response.status_code)
self.assert_('Logged out' in response.content)
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
|
radiasoft/pykern | refs/heads/master | tests/pkcompat_test.py | 1 | # -*- coding: utf-8 -*-
u"""pytest for :mod:`pykern.pkcompat`
:copyright: Copyright (c) 2015 Bivio Software, Inc. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import locale
import os
import pytest
import six
def setup_module():
"""Set locale so can test expected outputs.
TODO(robnagler) should test multiple locales.
"""
# Setting locale as a tuple doesn't work. Not clear this is cross-platform
os.environ['LANG'] = 'en_US.UTF-8'
locale.setlocale(locale.LC_ALL)
def test_from_bytes():
from pykern import pkcompat
from pykern.pkunit import pkeq
b = pkcompat.to_bytes('你好')
s = pkcompat.from_bytes(b)
pkeq(s, '你好')
pkeq(b, b'\xe4\xbd\xa0\xe5\xa5\xbd')
if six.PY2:
pkeq(b, s)
else:
pkeq(False, b == s)
def test_locale_str_1():
"""Verify proper conversions"""
from pykern import pkcompat
s = pkcompat.locale_str(b'\xc2\xb0')
if six.PY2:
assert isinstance(s, unicode), \
'When locale_str is converted in PY2, it should return unicode'
else:
assert isinstance(s, str), \
'When locale_str is converted in not PY2, it should return str'
assert u'°' == s, \
'Conversion should be same as literal unicode value'
if six.PY2:
before = unicode(b'\xc2\xb0', 'utf8')
assert before == pkcompat.locale_str(before), \
'When string is already unicode, conversion yields same string'
before = str(123)
assert unicode(before) == pkcompat.locale_str(before), \
'When string is already unicode, conversion yields same string'
before = str(None)
assert unicode(before) == pkcompat.locale_str(before), \
'When string is already unicode, conversion yields same string'
else:
before = str(123)
assert before == pkcompat.locale_str(before), \
'When string is already unicode, conversion yields same string'
before = str(None)
assert before == pkcompat.locale_str(before), \
'When string is already unicode, conversion yields same string'
def test_locale_str_2():
"""Invalid utf8"""
from pykern import pkcompat
with pytest.raises(UnicodeDecodeError):
#TODO(robngler) set the locale?
pkcompat.locale_str(b'\x80')
def test_unicode_unescape():
from pykern import pkcompat
assert '\n' == pkcompat.unicode_unescape(r'\n')
|
fujunwei/chromium-crosswalk | refs/heads/master | tools/telemetry/telemetry/__init__.py | 49 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A library for cross-platform browser tests."""
import sys
# Ensure Python >= 2.7.
if sys.version_info < (2, 7):
print >> sys.stderr, 'Need Python 2.7 or greater.'
sys.exit(-1)
from telemetry.util import global_hooks
global_hooks.InstallHooks()
|
msmolens/girder | refs/heads/master | tests/js_coverage_tool.py | 3 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
This module is used for managing javascript coverage data output by the tests
running in the phantom environment. It is used to reset the coverage, as well
as to combine and report it once all of the tests have been run.
"""
import argparse
import collections
import glob
import os
import six
import sys
import time
import xml.etree.cElementTree as ET
def reset(args):
"""
This simply deletes all of the intermediate coverage result files in the
specified directory.
"""
files = glob.glob(os.path.join(args.coverage_dir, '*.cvg'))
for file in files:
os.remove(file)
def combine_report(args):
"""
Combine all of the intermediate coverage files from each js test, and then
report them into the desired output format(s).
"""
if not os.path.exists(args.coverage_dir):
raise Exception('Coverage directory %s does not exist.' %
args.coverage_dir)
# Step 1: Read and combine intermediate reports
combined = collections.defaultdict(lambda: collections.defaultdict(int))
currentSource = None
files = glob.glob(os.path.join(args.coverage_dir, '*.cvg'))
for file in files:
skip = False
with open(file) as f:
for line in f:
if line[0] == 'F':
path = line[1:].strip()
currentSource = combined[path]
skip = args.skipCore and path.startswith('clients')
elif not skip and line[0] == 'L':
lineNum, hit = [int(x) for x in line[1:].split()]
currentSource[lineNum] |= bool(hit)
# Step 2: Calculate final aggregate and per-file coverage statistics
stats = {
'totalSloc': 0,
'totalHits': 0,
'files': {}
}
for file, lines in six.viewitems(combined):
hits, sloc = 0, 0
for lineNum, hit in six.viewitems(lines):
sloc += 1
hits += hit
stats['totalSloc'] += sloc
stats['totalHits'] += hits
stats['files'][file] = {
'sloc': sloc,
'hits': hits
}
# Step 3: Generate the report
report(args, combined, stats)
def safe_divide(numerator, denominator):
"""
Return numerator / denominator or 0 if denominator <= 0.
"""
numerator = float(numerator)
denominator = float(denominator)
if denominator > 0:
return numerator / denominator
else:
return 0
def report(args, combined, stats):
"""
Generate a cobertura-compliant XML coverage report in the current working
directory.
"""
percent = safe_divide(stats['totalHits'], stats['totalSloc']) * 100
print('Overall total: %s / %s (%.2f%%)' % (
stats['totalHits'], stats['totalSloc'], percent))
coverageEl = ET.Element('coverage', {
'branch-rate': '0',
'line-rate': str(percent / 100),
'version': '3.6',
'timestamp': str(int(time.time()))
})
packagesEl = ET.SubElement(coverageEl, 'packages')
packageEl = ET.SubElement(packagesEl, 'package', {
'branch-rate': '0',
'complexity': '0',
'line-rate': str(percent / 100),
'name': ''
})
classesEl = ET.SubElement(packageEl, 'classes')
for file, data in six.viewitems(combined):
lineRate = safe_divide(stats['files'][file]['hits'],
stats['files'][file]['sloc'])
classEl = ET.SubElement(classesEl, 'class', {
'branch-rate': '0',
'complexity': '0',
'line-rate': str(lineRate),
'filename': file,
'name': file
})
linesEl = ET.SubElement(classEl, 'lines')
ET.SubElement(classEl, 'methods')
for lineNum, hit in six.viewitems(data):
ET.SubElement(linesEl, 'line', {
'number': str(lineNum),
'hits': str(hit)
})
tree = ET.ElementTree(coverageEl)
tree.write('js_coverage.xml')
if percent < args.threshold:
print('FAIL: Coverage below threshold (%s%%)' % args.threshold)
sys.exit(1)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--threshold', type=int, help='The minimum required '
'coverage level, as a percent.', default=0)
parser.add_argument('--source', help='The root directory of the source '
'repository')
parser.add_argument(
'--include-core', dest='skipCore', help='Include core JS files in '
'the coverage calculations', action='store_false')
parser.add_argument(
'--skip-core', dest='skipCore', help='Skip core JS files in the '
'coverage calculations', action='store_true')
parser.set_defaults(skipCore=True)
parser.add_argument('task', help='The task to perform.',
choices=['reset', 'combine_report',
'combine_report_skip'])
parser.add_argument('coverage_dir', help='The directory containing the '
'intermediate coverage files.')
args = parser.parse_args()
if args.task == 'reset':
reset(args)
elif args.task == 'combine_report':
combine_report(args)
|
yamahata/python-tackerclient | refs/heads/tackerclient | tackerclient/shell.py | 1 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Command-line interface to the Tacker APIs
"""
from __future__ import print_function
import argparse
import logging
import os
import sys
from cliff import app
from cliff import commandmanager
from tackerclient.common import clientmanager
from tackerclient.common import exceptions as exc
from tackerclient.common import utils
from tackerclient.openstack.common.gettextutils import _
from tackerclient.openstack.common import strutils
from tackerclient.tacker.v1_0 import extension
from tackerclient.tacker.v1_0.vm import device
from tackerclient.tacker.v1_0.vm import device_template
from tackerclient.tacker.v1_0.vm import service_instance
from tackerclient.version import __version__
VERSION = '1.0'
TACKER_API_VERSION = '1.0'
def run_command(cmd, cmd_parser, sub_argv):
_argv = sub_argv
index = -1
values_specs = []
if '--' in sub_argv:
index = sub_argv.index('--')
_argv = sub_argv[:index]
values_specs = sub_argv[index:]
known_args, _values_specs = cmd_parser.parse_known_args(_argv)
cmd.values_specs = (index == -1 and _values_specs or values_specs)
return cmd.run(known_args)
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
COMMAND_V1 = {
'ext-list': extension.ListExt,
'ext-show': extension.ShowExt,
'device-template-create': device_template.CreateDeviceTemplate,
'device-template-list': device_template.ListDeviceTemplate,
'device-template-show': device_template.ShowDeviceTemplate,
'device-template-update': device_template.UpdateDeviceTemplate,
'device-template-delete': device_template.DeleteDeviceTemplate,
'service-instance-create': service_instance.CreateServiceInstance,
'service-instance-list': service_instance.ListServiceInstance,
'service-instance-show': service_instance.ShowServiceInstance,
'service-instance-update': service_instance.UpdateServiceInstance,
'service-instance-delete': service_instance.DeleteServiceInstance,
'device-create': device.CreateDevice,
'device-list': device.ListDevice,
'device-show': device.ShowDevice,
'device-update': device.UpdateDevice,
'device-delete': device.DeleteDevice,
}
COMMANDS = {'1.0': COMMAND_V1}
class HelpAction(argparse.Action):
"""Provide a custom action so the -h and --help options
to the main app will print a list of the commands.
The commands are determined by checking the CommandManager
instance, passed in as the "default" value for the action.
"""
def __call__(self, parser, namespace, values, option_string=None):
outputs = []
max_len = 0
app = self.default
parser.print_help(app.stdout)
app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version)
command_manager = app.command_manager
for name, ep in sorted(command_manager):
factory = ep.load()
cmd = factory(self, None)
one_liner = cmd.get_description().split('\n')[0]
outputs.append((name, one_liner))
max_len = max(len(name), max_len)
for (name, one_liner) in outputs:
app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner))
sys.exit(0)
class TackerShell(app.App):
# verbose logging levels
WARNING_LEVEL = 0
INFO_LEVEL = 1
DEBUG_LEVEL = 2
CONSOLE_MESSAGE_FORMAT = '%(message)s'
DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s'
log = logging.getLogger(__name__)
def __init__(self, apiversion):
super(TackerShell, self).__init__(
description=__doc__.strip(),
version=VERSION,
command_manager=commandmanager.CommandManager('tacker.cli'), )
self.commands = COMMANDS
for k, v in self.commands[apiversion].items():
self.command_manager.add_command(k, v)
# This is instantiated in initialize_app() only when using
# password flow auth
self.auth_client = None
self.api_version = apiversion
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = argparse.ArgumentParser(
description=description,
add_help=False, )
parser.add_argument(
'--version',
action='version',
version=__version__, )
parser.add_argument(
'-v', '--verbose', '--debug',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help=_('Increase verbosity of output and show tracebacks on'
' errors. You can repeat this option.'))
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help=_('Suppress output except warnings and errors'))
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help=_("Show this help message and exit"))
# Global arguments
parser.add_argument(
'--os-auth-strategy', metavar='<auth-strategy>',
default=env('OS_AUTH_STRATEGY', default='keystone'),
help=_('Authentication strategy (Env: OS_AUTH_STRATEGY'
', default keystone). For now, any other value will'
' disable the authentication'))
parser.add_argument(
'--os_auth_strategy',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-auth-url', metavar='<auth-url>',
default=env('OS_AUTH_URL'),
help=_('Authentication URL (Env: OS_AUTH_URL)'))
parser.add_argument(
'--os_auth_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-tenant-name', metavar='<auth-tenant-name>',
default=env('OS_TENANT_NAME'),
help=_('Authentication tenant name (Env: OS_TENANT_NAME)'))
parser.add_argument(
'--os_tenant_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-tenant-id', metavar='<auth-tenant-id>',
default=env('OS_TENANT_ID'),
help=_('Authentication tenant ID (Env: OS_TENANT_ID)'))
parser.add_argument(
'--os-username', metavar='<auth-username>',
default=utils.env('OS_USERNAME'),
help=_('Authentication username (Env: OS_USERNAME)'))
parser.add_argument(
'--os_username',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-id', metavar='<auth-user-id>',
default=env('OS_USER_ID'),
help=_('Authentication user ID (Env: OS_USER_ID)'))
parser.add_argument(
'--os-password', metavar='<auth-password>',
default=utils.env('OS_PASSWORD'),
help=_('Authentication password (Env: OS_PASSWORD)'))
parser.add_argument(
'--os_password',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-region-name', metavar='<auth-region-name>',
default=env('OS_REGION_NAME'),
help=_('Authentication region name (Env: OS_REGION_NAME)'))
parser.add_argument(
'--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-token', metavar='<token>',
default=env('OS_TOKEN'),
help=_('Defaults to env[OS_TOKEN]'))
parser.add_argument(
'--os_token',
help=argparse.SUPPRESS)
parser.add_argument(
'--service-type', metavar='<service-type>',
default=env('OS_NETWORK_SERVICE_TYPE', default='network'),
help=_('Defaults to env[OS_NETWORK_SERVICE_TYPE] or network.'))
parser.add_argument(
'--endpoint-type', metavar='<endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.'))
parser.add_argument(
'--os-url', metavar='<url>',
default=env('OS_URL'),
help=_('Defaults to env[OS_URL]'))
parser.add_argument(
'--os_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-cacert',
metavar='<ca-certificate>',
default=env('OS_CACERT', default=None),
help=_("Specify a CA bundle file to use in "
"verifying a TLS (https) server certificate. "
"Defaults to env[OS_CACERT]"))
parser.add_argument(
'--insecure',
action='store_true',
default=env('TACKERCLIENT_INSECURE', default=False),
help=_("Explicitly allow tackerclient to perform \"insecure\" "
"SSL (https) requests. The server's certificate will "
"not be verified against any certificate authorities. "
"This option should be used with caution."))
return parser
def _bash_completion(self):
"""Prints all of the commands and options for bash-completion."""
commands = set()
options = set()
for option, _action in self.parser._option_string_actions.items():
options.add(option)
for command_name, command in self.command_manager:
commands.add(command_name)
cmd_factory = command.load()
cmd = cmd_factory(self, None)
cmd_parser = cmd.get_parser('')
for option, _action in cmd_parser._option_string_actions.items():
options.add(option)
print(' '.join(commands | options))
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
index = 0
command_pos = -1
help_pos = -1
help_command_pos = -1
for arg in argv:
if arg == 'bash-completion':
self._bash_completion()
return 0
if arg in self.commands[self.api_version]:
if command_pos == -1:
command_pos = index
elif arg in ('-h', '--help'):
if help_pos == -1:
help_pos = index
elif arg == 'help':
if help_command_pos == -1:
help_command_pos = index
index = index + 1
if command_pos > -1 and help_pos > command_pos:
argv = ['help', argv[command_pos]]
if help_command_pos > -1 and command_pos == -1:
argv[help_command_pos] = '--help'
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err))
raise
else:
self.log.error(unicode(err))
return 1
result = 1
if self.interactive_mode:
_argv = [sys.argv[0]]
sys.argv = _argv
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
def run_subcommand(self, argv):
subcommand = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
return run_command(cmd, cmd_parser, sub_argv)
except Exception as err:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err))
else:
self.log.error(unicode(err))
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err2))
else:
self.log.error(_('Could not clean up: %s'), unicode(err2))
if self.options.verbose_level == self.DEBUG_LEVEL:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.verbose_level == self.DEBUG_LEVEL:
self.log.exception(unicode(err3))
else:
self.log.error(_('Could not clean up: %s'), unicode(err3))
return result
def authenticate_user(self):
"""Make sure the user has provided all of the authentication
info we need.
"""
if self.options.os_auth_strategy == 'keystone':
if self.options.os_token or self.options.os_url:
# Token flow auth takes priority
if not self.options.os_token:
raise exc.CommandError(
_("You must provide a token via"
" either --os-token or env[OS_TOKEN]"))
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
else:
# Validate password flow auth
if (not self.options.os_username
and not self.options.os_user_id):
raise exc.CommandError(
_("You must provide a username or user ID via"
" --os-username, env[OS_USERNAME] or"
" --os-user_id, env[OS_USER_ID]"))
if not self.options.os_password:
raise exc.CommandError(
_("You must provide a password via"
" either --os-password or env[OS_PASSWORD]"))
if (not self.options.os_tenant_name
and not self.options.os_tenant_id):
raise exc.CommandError(
_("You must provide a tenant_name or tenant_id via"
" --os-tenant-name, env[OS_TENANT_NAME]"
" --os-tenant-id, or via env[OS_TENANT_ID]"))
if not self.options.os_auth_url:
raise exc.CommandError(
_("You must provide an auth url via"
" either --os-auth-url or via env[OS_AUTH_URL]"))
else: # not keystone
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
self.client_manager = clientmanager.ClientManager(
token=self.options.os_token,
url=self.options.os_url,
auth_url=self.options.os_auth_url,
tenant_name=self.options.os_tenant_name,
tenant_id=self.options.os_tenant_id,
username=self.options.os_username,
user_id=self.options.os_user_id,
password=self.options.os_password,
region_name=self.options.os_region_name,
api_version=self.api_version,
auth_strategy=self.options.os_auth_strategy,
service_type=self.options.service_type,
endpoint_type=self.options.endpoint_type,
insecure=self.options.insecure,
ca_cert=self.options.os_cacert,
log_credentials=True)
return
def initialize_app(self, argv):
"""Global app init bits:
* set up API versions
* validate authentication info
"""
super(TackerShell, self).initialize_app(argv)
self.api_version = {'network': self.api_version}
# If the user is not asking for help, make sure they
# have given us auth.
cmd_name = None
if argv:
cmd_info = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = cmd_info
if self.interactive_mode or cmd_name != 'help':
self.authenticate_user()
def clean_up(self, cmd, result, err):
self.log.debug('clean_up %s', cmd.__class__.__name__)
if err:
self.log.debug(_('Got an error: %s'), unicode(err))
def configure_logging(self):
"""Create logging handlers for any log output."""
root_logger = logging.getLogger('')
# Set up logging to a file
root_logger.setLevel(logging.DEBUG)
# Send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {self.WARNING_LEVEL: logging.WARNING,
self.INFO_LEVEL: logging.INFO,
self.DEBUG_LEVEL: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
if logging.DEBUG == console_level:
formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT)
else:
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
try:
return TackerShell(TACKER_API_VERSION).run(map(strutils.safe_decode,
argv))
except exc.TackerClientException:
return 1
except Exception as e:
print(unicode(e))
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
bryanveloso/avalonstar-tv | refs/heads/master | avalonstar/settings/development.py | 1 | # -*- coding: utf-8 -*-
from configurations import values
from .base import Base as Settings
class Development(Settings):
MIDDLEWARE_CLASSES = Settings.MIDDLEWARE_CLASSES
# Site Configuration.
# --------------------------------------------------------------------------
ALLOWED_HOSTS = ['*']
# Debug Settings.
# --------------------------------------------------------------------------
DEBUG = values.BooleanValue(True)
# Static File Configuration.
# --------------------------------------------------------------------------
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Media Storage Configuration.
# --------------------------------------------------------------------------
CDN_DOMAIN = 'http://avalonstar-tv.s3.amazonaws.com'
MEDIA_URL = '%s/' % (CDN_DOMAIN)
# django-cors-headers
# --------------------------------------------------------------------------
CORS_ORIGIN_ALLOW_ALL = True
|
linjoahow/W16_test1 | refs/heads/master | static/Brython3.1.3-20150514-095342/Lib/multiprocessing/dummy/__init__.py | 693 | #
# Support for the API of the multiprocessing package using threads
#
# multiprocessing/dummy/__init__.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [
'Process', 'current_process', 'active_children', 'freeze_support',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition',
'Event', 'Barrier', 'Queue', 'Manager', 'Pipe', 'Pool', 'JoinableQueue'
]
#
# Imports
#
import threading
import sys
import weakref
#brython fix me
#import array
from multiprocessing.dummy.connection import Pipe
from threading import Lock, RLock, Semaphore, BoundedSemaphore
from threading import Event, Condition, Barrier
from queue import Queue
#
#
#
class DummyProcess(threading.Thread):
def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
threading.Thread.__init__(self, group, target, name, args, kwargs)
self._pid = None
self._children = weakref.WeakKeyDictionary()
self._start_called = False
self._parent = current_process()
def start(self):
assert self._parent is current_process()
self._start_called = True
if hasattr(self._parent, '_children'):
self._parent._children[self] = None
threading.Thread.start(self)
@property
def exitcode(self):
if self._start_called and not self.is_alive():
return 0
else:
return None
#
#
#
Process = DummyProcess
current_process = threading.current_thread
current_process()._children = weakref.WeakKeyDictionary()
def active_children():
children = current_process()._children
for p in list(children):
if not p.is_alive():
children.pop(p, None)
return list(children)
def freeze_support():
pass
#
#
#
class Namespace(object):
def __init__(self, **kwds):
self.__dict__.update(kwds)
def __repr__(self):
items = list(self.__dict__.items())
temp = []
for name, value in items:
if not name.startswith('_'):
temp.append('%s=%r' % (name, value))
temp.sort()
return 'Namespace(%s)' % str.join(', ', temp)
dict = dict
list = list
#brython fix me
#def Array(typecode, sequence, lock=True):
# return array.array(typecode, sequence)
class Value(object):
def __init__(self, typecode, value, lock=True):
self._typecode = typecode
self._value = value
def _get(self):
return self._value
def _set(self, value):
self._value = value
value = property(_get, _set)
def __repr__(self):
return '<%r(%r, %r)>'%(type(self).__name__,self._typecode,self._value)
def Manager():
return sys.modules[__name__]
def shutdown():
pass
def Pool(processes=None, initializer=None, initargs=()):
from multiprocessing.pool import ThreadPool
return ThreadPool(processes, initializer, initargs)
JoinableQueue = Queue
|
bluedynamics/node.ext.python | refs/heads/master | src/node/ext/python/goparser.py | 1 | # -*- coding: utf-8 -*-
# Copyright 2009, BlueDynamics Alliance - http://bluedynamics.com
# GNU General Public License Version 2
# Georg Gogo. BERNHARD [email protected]
#
import os
import re
import ast
import _ast
import sys
import compiler
class metanode(object):
def __init__(
self,
parent,
astnode,
sourcelines = None,
startline = None,
endline = None,
indent = None,
offset = None,
stuff = None,
):
"""Stores additional info about a ast node.
"""
self.parent = parent
self.children = []
self.astnode = astnode
self.sourcelines = sourcelines
self.startline = startline
self.endline = endline
self.indent = indent
self.stuff = stuff
self.offset = offset
if parent != None:
parent.children.append(self)
self.correct()
def get_sourcelines(self):
"""Returns the lines of code assiciated with the node.
"""
return self.sourcelines[self.startline, self.endline+1]
def strip_comments(self, sourceline):
"""Returns a line without comments, rstripped.
"""
stripped = re.sub('("""|\'\'\'|"|\'|)(.*?)\\1.*?#(.*)', '\\1\\2\\1',
sourceline).rstrip()
return stripped
def is_empty(self, sourceline):
"""Tests if a source line contains just whitespace.
"""
if sourceline.strip() == '':
return True
return False
def remove_trailing_blanklines(self):
s = self.startline
e = self.endline
while e > s and self.is_empty(self.sourcelines[e]):
e -= 1
self.endline = e
def handle_upside_down_ness(self):
"""Would correct startlines that happen to be after endlines.
"""
if self.startline > self.endline:
self.startline, self.endline = self.endline, self.startline
def correct_docstrings(self):
"""Fixes endlines of docstrings.
"""
quotings = ['"""', "'''", "'", '"']
found = None
e = self.endline
while self.is_empty(self.strip_comments(self.sourcelines[e])):
e -= 1
lastline = self.sourcelines[e]
for quoting in quotings:
if lastline.rfind(quoting) != -1:
found = quoting
break
self.quoting = found
s = self.startline
e = self.endline
block = '\n'.join(self.sourcelines[s:e + 1])
while s >= 0 and len(re.findall(found, block)) <= 1:
s -= 1
block = '\n'.join(self.sourcelines[s:e + 1])
self.startline = s
def correct_decorators(self):
"""Decorators should not include the function definition start,
function definitions should not include the decorators.
"""
for i in xrange(self.startline, self.endline+1):
if len(re.findall("^\s*def\s", self.sourcelines[i])) == 1:
self.endline = i - 1
self.parent.startline = i
break
def correct_col_offset(self):
"""Fixes col_offset issues where it would be -1 for multiline strings.
"""
blanks = re.findall("^\s*", self.sourcelines[self.startline])[0]
self.astnode.col_offset = len(blanks)
def correct(self):
""" ixes ast issues.
"""
self.handle_upside_down_ness()
self.remove_trailing_blanklines()
# Deal with wrong start for Docstrings:
if isinstance(self.astnode, _ast.Expr) and \
isinstance(self.astnode.value, _ast.Str):
self.correct_docstrings()
# Deal with decorator line numbers:
if (isinstance(self.astnode, _ast.Call) and \
isinstance(self.parent.astnode, _ast.FunctionDef)) \
or (isinstance(self.astnode, _ast.Name) and \
isinstance(self.parent.astnode, _ast.FunctionDef)):
self.correct_decorators()
# Multiline expressions have wrong col_offset
if isinstance(self.astnode, _ast.Expr) and \
self.astnode.col_offset < 0:
self.correct_col_offset()
def codelines(self):
"""Returns the lines of code that are associated with the node.
"""
return self.sourcelines[self.startline:self.endline+1]
def __repr__(self):
"""Returns a nodes representation.
"""
return "%s (%s-%s)" % ( \
self.astnode.__class__.__name__,
self.startline+self.offset,
self.endline+self.offset,
)
def dump(self):
"""Nice for debugging.
"""
print "--- %d (%d) %s (parent: %s)" % (
self.indent,
self.astnode.col_offset,
repr(self),
repr(self.parent),
)
# print "--- %d (%d)/%03d-%03d/ %s (parent: %s)" % (
# self.indent,
# self.astnode.col_offset,
# self.startline+self.offset,
# self.endline+self.offset,
# # self.astnode.__class__.__name__,
# repr(self),
# repr(self.parent),
# )
# # import pdb;pdb.set_trace()
# print "--- %d (%d) %s" % (
# self.indent,
# self.astnode.col_offset,
# # self.astnode.__class__.__name__,
# repr(self.astnode),
# ),
# for field in self.astnode._fields:
# print "%s:%s " % (field, repr(getattr(self.astnode, field, '-')),),
# print "Parent:", repr(self.parent)
for l in xrange(self.startline, self.endline+1):
print "%03d:%s" % (l + self.offset, repr(self.sourcelines[l])[1:-1])
class GoParser(object):
def __init__(self, source, filename):
"""Creates a parser object.
"""
self.source = source
self.filename = filename
self.removeblanks()
self.nodes = []
def walk(self, parent, nodes, start, end, ind):
""" Iterates nodes of the abstract syntax tree
"""
# try:
nodecount = len(nodes)
# except TypeError:
# # print "avoiding %s - no lineno - break!" % repr(nodes)
# return
for i in xrange(nodecount):
current = nodes[i]
if not hasattr(current, 'lineno'):
# print "avoiding %s - no lineno - break!" % repr(current)
continue
if i < (nodecount - 1):
if nodes[i + 1].lineno != current.lineno:
nend = nodes[i + 1].lineno - 1
else:
nend = nodes[i + 1].lineno
else:
nend = end
start = current.lineno
mnode = metanode(
parent=parent,
astnode=current,
sourcelines=self.lines,
startline=start,
endline=nend,
indent=ind,
offset=self.offset,
stuff=None,
)
mnode.dump()
if parent == None:
self.nodes.append(mnode)
next_set = []
for field in current._fields:
next_item = getattr(current, field, None)
if type(next_item) == type([]):
for item in getattr(current, field, []):
if hasattr(item, 'lineno'):
next_set.append([item.lineno, item])
next_set.sort()
next_set = [i[1] for i in next_set]
self.walk(mnode, next_set, start, nend, ind + 1)
# if hasattr(current, 'body'):
# self.walk(current.body, start, nend, ind + 1)
# # if hasattr(current, 'handlers'):
# # self.walk(current.handlers, start, nend, ind + 1)
def removeblanks(self):
"""Removes trailing blank lines and rstrips source code. This
function sets the offset to use to correct the indexing.
"""
# Strip trailing blanks in lines
self.lines = [i.rstrip() for i in self.source.split(os.linesep)]
self.source = os.linesep.join(self.lines)
# Count number of lines before removing heading blanks
before = len(self.lines)
# Remove heading blanks
self.source = self.source.lstrip()
# Count number of lines after removing heading blanks
self.lines = self.source.split(os.linesep)
after = len(self.lines)
# Remove trailing blanks lines
self.source = self.source.rstrip()
self.lines = self.source.split(os.linesep)
self.startline = 0
self.offset = (before - after)
self.endline = len(self.lines)
def parsegen(self):
"""Reads the input file, parses it and calls a generator method on
each node.
"""
astt = ast.parse(self.source, self.filename)
self.lines = [''] + self.lines
self.walk(None, astt.body, 1, self.endline, 0)
def main(filename):
"""The module can be called with a filename of a python file for testing.
"""
fileinst = open(filename,'r')
source = fileinst.read()
fileinst.close()
P = GoParser(source, filename)
P.parsegen()
print repr(P.nodes)
if __name__ == '__main__':
if len(sys.argv) == 1:
filename = __file__
else:
filename = sys.argv[1]
main(filename)
|
BorisJeremic/Real-ESSI-Examples | refs/heads/master | analytic_solution/test_cases/Contact/Stress_Based_Contact_Verification/HardContact_ElPPlShear/Area/A_1e-2/Normalized_Shear_Stress_Plot.py | 48 | #!/usr/bin/python
import h5py
import matplotlib.pylab as plt
import matplotlib as mpl
import sys
import numpy as np;
plt.rcParams.update({'font.size': 28})
# set tick width
mpl.rcParams['xtick.major.size'] = 10
mpl.rcParams['xtick.major.width'] = 5
mpl.rcParams['xtick.minor.size'] = 10
mpl.rcParams['xtick.minor.width'] = 5
plt.rcParams['xtick.labelsize']=24
mpl.rcParams['ytick.major.size'] = 10
mpl.rcParams['ytick.major.width'] = 5
mpl.rcParams['ytick.minor.size'] = 10
mpl.rcParams['ytick.minor.width'] = 5
plt.rcParams['ytick.labelsize']=24
###############################################################
## Analytical Solution
###############################################################
# Go over each feioutput and plot each one.
thefile = "Analytical_Solution_Shear.feioutput";
finput = h5py.File(thefile)
# Read the time and displacement
times = finput["time"][:]
shear_strain_x = finput["/Model/Elements/Element_Outputs"][4,:]
shear_strain_y = finput["/Model/Elements/Element_Outputs"][5,:]
shear_stress_x = finput["/Model/Elements/Element_Outputs"][7,:]
shear_stress_y = finput["/Model/Elements/Element_Outputs"][8,:]
normal_stress = -finput["/Model/Elements/Element_Outputs"][9,:];
shear_strain = np.sqrt(shear_strain_x*shear_strain_x + shear_strain_y*shear_strain_y) ;
shear_stress = np.sqrt(shear_stress_x*shear_stress_x + shear_stress_y*shear_stress_y );
shear_stress = shear_stress_x;
shear_strain = shear_strain_x;
# Configure the figure filename, according to the input filename.
outfig=thefile.replace("_","-")
outfigname=outfig.replace("h5.feioutput","pdf")
# Plot the figure. Add labels and titles.
plt.figure(figsize=(12,10))
plt.plot(shear_strain*5,shear_stress/normal_stress,'-r',label='Analytical Solution', Linewidth=4)
plt.xlabel(r"Shear Displacement $\Delta_t [mm]$")
plt.ylabel(r"Normalized Shear Stress $\tau/\sigma_n$")
###############################################################
## Numerical Solution
###############################################################
# Go over each feioutput and plot each one.
thefile = "Monotonic_Contact_Behaviour_Adding_Tangential_Load.h5.feioutput";
finput = h5py.File(thefile)
# Read the time and displacement
times = finput["time"][:]
shear_strain_x = finput["/Model/Elements/Element_Outputs"][4,:]
shear_strain_y = finput["/Model/Elements/Element_Outputs"][5,:]
shear_stress_x = finput["/Model/Elements/Element_Outputs"][7,:]
shear_stress_y = finput["/Model/Elements/Element_Outputs"][8,:]
normal_stress = -finput["/Model/Elements/Element_Outputs"][9,:];
shear_strain = np.sqrt(shear_strain_x*shear_strain_x + shear_strain_y*shear_strain_y) ;
shear_stress = np.sqrt(shear_stress_x*shear_stress_x + shear_stress_y*shear_stress_y );
shear_stress = shear_stress_x;
shear_strain = shear_strain_x;
# Configure the figure filename, according to the input filename.
outfig=thefile.replace("_","-")
outfigname=outfig.replace("h5.feioutput","pdf")
# Plot the figure. Add labels and titles.
plt.plot(shear_strain*5,shear_stress/normal_stress,'-k',label='Numerical Solution', Linewidth=4)
plt.xlabel(r"Shear Displacement $\Delta_t [mm]$")
plt.ylabel(r"Normalized Shear Stress $\tau/\sigma_n$")
########################################################
# # axes = plt.gca()
# # axes.set_xlim([-7,7])
# # axes.set_ylim([-1,1])
outfigname = "Normalized_Shear_Stress.pdf";
legend = plt.legend()
legend.get_frame().set_linewidth(0.0)
legend.get_frame().set_facecolor('none')
plt.savefig(outfigname, bbox_inches='tight')
# plt.show()
|
p4datasystems/CarnotKE | refs/heads/master | jyhton/lib-python/2.7/encodings/shift_jis.py | 816 | #
# shift_jis.py: Python Unicode Codec for SHIFT_JIS
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('shift_jis')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='shift_jis',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
kura/blackhole | refs/heads/master | tests/test_worker_child_communication.py | 1 | # -*- coding: utf-8 -*-
# (The MIT License)
#
# Copyright (c) 2013-2020 Kura
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import socket
import time
import pytest
from blackhole.control import server
from blackhole.worker import Worker
from ._utils import ( # noqa: F401; isort:skip
Args,
cleandir,
create_config,
create_file,
reset,
)
try:
import uvloop # noqa
except ImportError:
pass
@pytest.mark.usefixtures("reset", "cleandir")
@pytest.mark.asyncio
@pytest.mark.slow
async def test_worker_ping_pong(unused_tcp_port, event_loop):
aserver = server("127.0.0.1", unused_tcp_port, socket.AF_INET)
started = time.monotonic()
worker = Worker("1", [aserver], loop=event_loop)
assert worker._started is True
await asyncio.sleep(35)
worker.stop()
assert worker._started is False
assert worker.ping > started
assert worker.ping_count == 2
aserver["sock"].close()
@pytest.mark.usefixtures("reset", "cleandir")
@pytest.mark.asyncio
@pytest.mark.slow
async def test_restart(unused_tcp_port, event_loop):
aserver = server("127.0.0.1", unused_tcp_port, socket.AF_INET)
started = time.monotonic()
worker = Worker("1", [aserver], loop=event_loop)
assert worker._started is True
await asyncio.sleep(25)
worker.ping = time.monotonic() - 120
old_pid = worker.pid
await asyncio.sleep(15)
assert worker.pid is not old_pid
worker.stop()
assert worker._started is False
assert worker.ping > started
assert worker.ping_count == 0
aserver["sock"].close()
|
sfpprxy/py-reminder | refs/heads/master | libs/contrib/geoa/form.py | 44 | from flask_admin.model.form import converts
from flask_admin.contrib.sqla.form import AdminModelConverter as SQLAAdminConverter
from .fields import GeoJSONField
class AdminModelConverter(SQLAAdminConverter):
@converts('Geography', 'Geometry')
def convert_geom(self, column, field_args, **extra):
field_args['geometry_type'] = column.type.geometry_type
field_args['srid'] = column.type.srid
field_args['session'] = self.session
return GeoJSONField(**field_args)
|
justajeffy/arsenalsuite | refs/heads/master | cpp/lib/PyQt4/pyuic/uic/autoconnect.py | 11 | from PyQt4 import QtCore
from itertools import ifilter
def is_autoconnect_slot((name, attr)):
return callable(attr) and name.startswith("on_")
def signals(child):
meta = child.metaObject()
for idx in xrange(meta.methodOffset(),
meta.methodOffset() + meta.methodCount()):
methodinfo = meta.method(idx)
if methodinfo.methodType() == QtCore.QMetaMethod.Signal:
yield methodinfo
def connectSlotsByName(ui_impl):
for name, slot in ifilter(is_autoconnect_slot,
ui_impl.__class__.__dict__.iteritems()):
try:
# is it safe to assume that there are
# never underscores in signals?
idx = name.rindex("_")
objectname, signalname = name[3:idx], name[idx+1:]
child = ui_impl.findChild(QtCore.QObject, objectname)
assert child != None
for signal in signals(child):
if signal.signature().startswith(signalname):
QtCore.QObject.connect(child,
QtCore.SIGNAL(signal.signature()),
getattr(ui_impl, name))
break
except:
pass
|
Mhynlo/SickRage | refs/heads/master | lib/enzyme/exceptions.py | 76 | # -*- coding: utf-8 -*-
__all__ = ['Error', 'MalformedMKVError', 'ParserError', 'ReadError', 'SizeError']
class Error(Exception):
"""Base class for enzyme exceptions"""
pass
class MalformedMKVError(Error):
"""Wrong or malformed element found"""
pass
class ParserError(Error):
"""Base class for exceptions in parsers"""
pass
class ReadError(ParserError):
"""Unable to correctly read"""
pass
class SizeError(ParserError):
"""Mismatch between the type of the element and the size of its data"""
pass
|
TEDICpy/write-it | refs/heads/master | mailit/migrations/0003_auto__add_field_mailittemplate_content_html_template.py | 2 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'MailItTemplate.content_html_template'
db.add_column(u'mailit_mailittemplate', 'content_html_template',
self.gf('django.db.models.fields.TextField')(default='Hello {{ person }}: <br />\nYou have a new message: <br />\n<strong>subject:</strong> {{ subject }} <br />\n<strong>content:</strong> {{ content }} <br />\n\n\nIf you want to see all the other messages please visit {{ writeit_url }}.<br />\nSeeya<br />\n--<br /><br />\nYou writeIt and we deliverit.'),
keep_default=False)
def backwards(self, orm):
# Deleting field 'MailItTemplate.content_html_template'
db.delete_column(u'mailit_mailittemplate', 'content_html_template')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contactos.contact': {
'Meta': {'object_name': 'Contact'},
'contact_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contactos.ContactType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bounced': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contacts'", 'to': u"orm['auth.User']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.Person']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
u'contactos.contacttype': {
'Meta': {'object_name': 'ContactType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'mailit.bouncedmessagerecord': {
'Meta': {'object_name': 'BouncedMessageRecord'},
'bounce_text': ('django.db.models.fields.TextField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'outbound_message': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['nuntium.OutboundMessage']", 'unique': 'True'})
},
u'mailit.mailittemplate': {
'Meta': {'object_name': 'MailItTemplate'},
'content_html_template': ('django.db.models.fields.TextField', [], {'default': "'Hello {{ person }}: <br />\\nYou have a new message: <br />\\n<strong>subject:</strong> {{ subject }} <br />\\n<strong>content:</strong> {{ content }} <br />\\n\\n\\nIf you want to see all the other messages please visit {{ writeit_url }}.<br />\\nSeeya<br />\\n--<br /><br />\\nYou writeIt and we deliverit.'"}),
'content_template': ('django.db.models.fields.TextField', [], {'default': "'Hello {{ person }}:\\nYou have a new message:\\nsubject: {{ subject }} \\ncontent: {{ content }}\\n\\n\\nIf you want to see all the other messages please visit {{ writeit_url }}.\\nSeeya\\n--\\nYou writeIt and we deliverit.'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subject_template': ('django.db.models.fields.CharField', [], {'default': "'[WriteIT] Message: %(subject)s'", 'max_length': '255'}),
'writeitinstance': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'mailit_template'", 'unique': 'True', 'to': u"orm['nuntium.WriteItInstance']"})
},
u'nuntium.membership': {
'Meta': {'object_name': 'Membership'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.Person']"}),
'writeitinstance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['nuntium.WriteItInstance']"})
},
u'nuntium.message': {
'Meta': {'object_name': 'Message'},
'author_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'author_name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'confirmated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderated': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'writeitinstance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['nuntium.WriteItInstance']"})
},
u'nuntium.outboundmessage': {
'Meta': {'object_name': 'OutboundMessage'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contactos.Contact']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['nuntium.Message']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': "'10'"})
},
u'nuntium.writeitinstance': {
'Meta': {'object_name': 'WriteItInstance'},
'allow_messages_using_form': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'autoconfirm_api_messages': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderation_needed_in_all_messages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notify_owner_when_new_answer': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writeitinstances'", 'to': u"orm['auth.User']"}),
'persons': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'writeit_instances'", 'symmetrical': 'False', 'through': u"orm['nuntium.Membership']", 'to': u"orm['popit.Person']"}),
'rate_limiter': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'})
},
u'popit.apiinstance': {
'Meta': {'object_name': 'ApiInstance'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('popit.fields.ApiInstanceURLField', [], {'unique': 'True', 'max_length': '200'})
},
u'popit.person': {
'Meta': {'object_name': 'Person'},
'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.ApiInstance']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'popit_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'popit_url': ('popit.fields.PopItURLField', [], {'default': "''", 'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['mailit'] |
supergentle/migueltutorial | refs/heads/master | flask/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py | 915 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
import re
from . import _base
from .. import ihatexml
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation, fullTree=False):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class Element(_base.Node):
def __init__(self, name, namespace=None):
self._name = name
self._namespace = namespace
self._element = ElementTree.Element(self._getETreeTag(name,
namespace))
if namespace is None:
self.nameTuple = namespaces["html"], self._name
else:
self.nameTuple = self._namespace, self._name
self.parent = None
self._childNodes = []
self._flags = []
def _getETreeTag(self, name, namespace):
if namespace is None:
etree_tag = name
else:
etree_tag = "{%s}%s" % (namespace, name)
return etree_tag
def _setName(self, name):
self._name = name
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getName(self):
return self._name
name = property(_getName, _setName)
def _setNamespace(self, namespace):
self._namespace = namespace
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getNamespace(self):
return self._namespace
namespace = property(_getNamespace, _setNamespace)
def _getAttributes(self):
return self._element.attrib
def _setAttributes(self, attributes):
# Delete existing attributes first
# XXX - there may be a better way to do this...
for key in list(self._element.attrib.keys()):
del self._element.attrib[key]
for key, value in attributes.items():
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], key[1])
else:
name = key
self._element.set(name, value)
attributes = property(_getAttributes, _setAttributes)
def _getChildNodes(self):
return self._childNodes
def _setChildNodes(self, value):
del self._element[:]
self._childNodes = []
for element in value:
self.insertChild(element)
childNodes = property(_getChildNodes, _setChildNodes)
def hasContent(self):
"""Return true if the node has children or text"""
return bool(self._element.text or len(self._element))
def appendChild(self, node):
self._childNodes.append(node)
self._element.append(node._element)
node.parent = self
def insertBefore(self, node, refNode):
index = list(self._element).index(refNode._element)
self._element.insert(index, node._element)
node.parent = self
def removeChild(self, node):
self._element.remove(node._element)
node.parent = None
def insertText(self, data, insertBefore=None):
if not(len(self._element)):
if not self._element.text:
self._element.text = ""
self._element.text += data
elif insertBefore is None:
# Insert the text as the tail of the last child element
if not self._element[-1].tail:
self._element[-1].tail = ""
self._element[-1].tail += data
else:
# Insert the text before the specified node
children = list(self._element)
index = children.index(insertBefore._element)
if index > 0:
if not self._element[index - 1].tail:
self._element[index - 1].tail = ""
self._element[index - 1].tail += data
else:
if not self._element.text:
self._element.text = ""
self._element.text += data
def cloneNode(self):
element = type(self)(self.name, self.namespace)
for name, value in self.attributes.items():
element.attributes[name] = value
return element
def reparentChildren(self, newParent):
if newParent.childNodes:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if not newParent._element.text:
newParent._element.text = ""
if self._element.text is not None:
newParent._element.text += self._element.text
self._element.text = ""
_base.Node.reparentChildren(self, newParent)
class Comment(Element):
def __init__(self, data):
# Use the superclass constructor to set all properties on the
# wrapper element
self._element = ElementTree.Comment(data)
self.parent = None
self._childNodes = []
self._flags = []
def _getData(self):
return self._element.text
def _setData(self, value):
self._element.text = value
data = property(_getData, _setData)
class DocumentType(Element):
def __init__(self, name, publicId, systemId):
Element.__init__(self, "<!DOCTYPE>")
self._element.text = name
self.publicId = publicId
self.systemId = systemId
def _getPublicId(self):
return self._element.get("publicId", "")
def _setPublicId(self, value):
if value is not None:
self._element.set("publicId", value)
publicId = property(_getPublicId, _setPublicId)
def _getSystemId(self):
return self._element.get("systemId", "")
def _setSystemId(self, value):
if value is not None:
self._element.set("systemId", value)
systemId = property(_getSystemId, _setSystemId)
class Document(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_ROOT")
class DocumentFragment(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_FRAGMENT")
def testSerializer(element):
rv = []
def serializeElement(element, indent=0):
if not(hasattr(element, "tag")):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
rv.append("#document")
if element.text is not None:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
elif element.tag == ElementTreeCommentType:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
else:
assert isinstance(element.tag, text_type), \
"Expected unicode, got %s, %s" % (type(element.tag), element.tag)
nsmatch = tag_regexp.match(element.tag)
if nsmatch is None:
name = element.tag
else:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
name = "%s %s" % (prefix, name)
rv.append("|%s<%s>" % (' ' * indent, name))
if hasattr(element, "attrib"):
attributes = []
for name, value in element.attrib.items():
nsmatch = tag_regexp.match(name)
if nsmatch is not None:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
attr_string = "%s %s" % (prefix, name)
else:
attr_string = name
attributes.append((attr_string, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
if element.text:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
indent += 2
for child in element:
serializeElement(child, indent)
if element.tail:
rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
serializeElement(element, 0)
return "\n".join(rv)
def tostring(element):
"""Serialize an element and its child nodes to a string"""
rv = []
filter = ihatexml.InfosetFilter()
def serializeElement(element):
if isinstance(element, ElementTree.ElementTree):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
if element.text is not None:
rv.append(element.text)
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
for child in element:
serializeElement(child)
elif element.tag == ElementTreeCommentType:
rv.append("<!--%s-->" % (element.text,))
else:
# This is assumed to be an ordinary element
if not element.attrib:
rv.append("<%s>" % (filter.fromXmlName(element.tag),))
else:
attr = " ".join(["%s=\"%s\"" % (
filter.fromXmlName(name), value)
for name, value in element.attrib.items()])
rv.append("<%s %s>" % (element.tag, attr))
if element.text:
rv.append(element.text)
for child in element:
serializeElement(child)
rv.append("</%s>" % (element.tag,))
if element.tail:
rv.append(element.tail)
serializeElement(element)
return "".join(rv)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = Element
commentClass = Comment
fragmentClass = DocumentFragment
implementation = ElementTreeImplementation
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
if fullTree:
return self.document._element
else:
if self.defaultNamespace is not None:
return self.document._element.find(
"{%s}html" % self.defaultNamespace)
else:
return self.document._element.find("html")
def getFragment(self):
return _base.TreeBuilder.getFragment(self)._element
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
|
jpaton/xen-4.1-LJX1 | refs/heads/master | tools/python/xen/xm/dumppolicy.py | 49 | #============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2006 International Business Machines Corp.
# Author: Reiner Sailer <[email protected]>
#============================================================================
"""Display currently enforced policy (low-level hypervisor representation).
"""
import os
import sys
import base64
import tempfile
import commands
from xen.util.xsm.xsm import XSMError, err, dump_policy, dump_policy_file
from xen.xm.opts import OptionError
from xen.xm import main as xm_main
from xen.xm.main import server
from xen.util import xsconstants
DOM0_UUID = "00000000-0000-0000-0000-000000000000"
def help():
return """
Retrieve and print currently enforced hypervisor policy information
(low-level)."""
def main(argv):
if len(argv) != 1:
raise OptionError("No arguments expected.")
if xm_main.serverType == xm_main.SERVER_XEN_API:
try:
bin_pol = server.xenapi.ACMPolicy.get_enforced_binary()
if bin_pol:
dom0_ssid = server.xenapi.ACMPolicy.get_VM_ssidref(DOM0_UUID)
bin = base64.b64decode(bin_pol)
try:
fd, filename = tempfile.mkstemp(suffix=".bin")
os.write(fd, bin)
os.close(fd)
dump_policy_file(filename, dom0_ssid)
finally:
os.unlink(filename)
else:
err("No policy is installed.")
except Exception, e:
err("An error occurred getting the running policy: %s" % str(e))
else:
dump_policy()
if __name__ == '__main__':
try:
main(sys.argv)
except Exception, e:
sys.stderr.write('Error: %s\n' % str(e))
sys.exit(-1)
|
wbc2010/django1.2.5 | refs/heads/master | tests/regressiontests/makemessages/tests.py | 49 | import os
import re
from subprocess import Popen, PIPE
def find_command(cmd, path=None, pathext=None):
if path is None:
path = os.environ.get('PATH', []).split(os.pathsep)
if isinstance(path, basestring):
path = [path]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
# don't use extensions if the command ends with one of them
for ext in pathext:
if cmd.endswith(ext):
pathext = ['']
break
# check if we find the command on PATH
for p in path:
f = os.path.join(p, cmd)
if os.path.isfile(f):
return f
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None
# checks if it can find xgettext on the PATH and
# imports the extraction tests if yes
xgettext_cmd = find_command('xgettext')
if xgettext_cmd:
p = Popen('%s --version' % xgettext_cmd, shell=True, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt', universal_newlines=True)
output = p.communicate()[0]
match = re.search(r'(?P<major>\d+)\.(?P<minor>\d+)', output)
if match:
xversion = (int(match.group('major')), int(match.group('minor')))
if xversion >= (0, 15):
from extraction import *
del p
|
Jumpscale/web | refs/heads/master | pythonlib/werkzeug/testsuite/contrib/wrappers.py | 146 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.contrib.wrappers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Added tests for the sessions.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import unittest
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.contrib import wrappers
from werkzeug import routing
from werkzeug.wrappers import Request, Response
class WrappersTestCase(WerkzeugTestCase):
def test_reverse_slash_behavior(self):
class MyRequest(wrappers.ReverseSlashBehaviorRequestMixin, Request):
pass
req = MyRequest.from_values('/foo/bar', 'http://example.com/test')
assert req.url == 'http://example.com/test/foo/bar'
assert req.path == 'foo/bar'
assert req.script_root == '/test/'
# make sure the routing system works with the slashes in
# reverse order as well.
map = routing.Map([routing.Rule('/foo/bar', endpoint='foo')])
adapter = map.bind_to_environ(req.environ)
assert adapter.match() == ('foo', {})
adapter = map.bind(req.host, req.script_root)
assert adapter.match(req.path) == ('foo', {})
def test_dynamic_charset_request_mixin(self):
class MyRequest(wrappers.DynamicCharsetRequestMixin, Request):
pass
env = {'CONTENT_TYPE': 'text/html'}
req = MyRequest(env)
assert req.charset == 'latin1'
env = {'CONTENT_TYPE': 'text/html; charset=utf-8'}
req = MyRequest(env)
assert req.charset == 'utf-8'
env = {'CONTENT_TYPE': 'application/octet-stream'}
req = MyRequest(env)
assert req.charset == 'latin1'
assert req.url_charset == 'latin1'
MyRequest.url_charset = 'utf-8'
env = {'CONTENT_TYPE': 'application/octet-stream'}
req = MyRequest(env)
assert req.charset == 'latin1'
assert req.url_charset == 'utf-8'
def return_ascii(x):
return "ascii"
env = {'CONTENT_TYPE': 'text/plain; charset=x-weird-charset'}
req = MyRequest(env)
req.unknown_charset = return_ascii
assert req.charset == 'ascii'
assert req.url_charset == 'utf-8'
def test_dynamic_charset_response_mixin(self):
class MyResponse(wrappers.DynamicCharsetResponseMixin, Response):
default_charset = 'utf-7'
resp = MyResponse(mimetype='text/html')
assert resp.charset == 'utf-7'
resp.charset = 'utf-8'
assert resp.charset == 'utf-8'
assert resp.mimetype == 'text/html'
assert resp.mimetype_params == {'charset': 'utf-8'}
resp.mimetype_params['charset'] = 'iso-8859-15'
assert resp.charset == 'iso-8859-15'
resp.set_data(u'Hällo Wörld')
assert b''.join(resp.iter_encoded()) == \
u'Hällo Wörld'.encode('iso-8859-15')
del resp.headers['content-type']
try:
resp.charset = 'utf-8'
except TypeError as e:
pass
else:
assert False, 'expected type error on charset setting without ct'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(WrappersTestCase))
return suite
|
HackFisher/depot_tools | refs/heads/master | third_party/gsutil/gslib/commands/getcors.py | 51 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import xml
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.exception import CommandException
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil getcors uri
<B>DESCRIPTION</B>
Gets the Cross-Origin Resource Sharing (CORS) configuration for a given
bucket. This command is supported for buckets only, not objects and you
can get the CORS settings for only one bucket at a time. The output from
getcors can be redirected into a file, edited and then updated via the
setcors sub-command. The CORS configuration is expressed by an XML document
with the following structure:
<?xml version="1.0" ?>
<CorsConfig>
<Cors>
<Origins>
<Origin>origin1.example.com</Origin>
</Origins>
<Methods>
<Method>GET</Method>
</Methods>
<ResponseHeaders>
<ResponseHeader>Content-Type</ResponseHeader>
</ResponseHeaders>
</Cors>
</CorsConfig>
For more info about CORS, see http://www.w3.org/TR/cors/.
""")
class GetCorsCommand(Command):
"""Implementation of gsutil getcors command."""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'getcors',
# List of command name aliases.
COMMAND_NAME_ALIASES : [],
# Min number of args required by this command.
MIN_ARGS : 1,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : 1,
# Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : '',
# True if file URIs acceptable for this command.
FILE_URIS_OK : False,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 0,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : True,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'getcors',
# List of help name aliases.
HELP_NAME_ALIASES : [],
# Type of help)
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Get a bucket\'s CORS XML document',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
# Wildcarding is allowed but must resolve to just one bucket.
uris = list(self.WildcardIterator(self.args[0]).IterUris())
if len(uris) == 0:
raise CommandException('No URIs matched')
if len(uris) != 1:
raise CommandException('%s matched more than one URI, which is not\n'
'allowed by the %s command' % (self.args[0], self.command_name))
uri = uris[0]
if not uri.names_bucket():
raise CommandException('"%s" command must specify a bucket' %
self.command_name)
cors = uri.get_cors(False, self.headers)
# Pretty-print the XML to make it more easily human editable.
parsed_xml = xml.dom.minidom.parseString(cors.to_xml().encode('utf-8'))
sys.stdout.write(parsed_xml.toprettyxml(indent=' '))
return 0
|
ammarkhann/FinalSeniorCode | refs/heads/master | lib/python2.7/site-packages/pandas/tests/plotting/test_hist_method.py | 6 | # coding: utf-8
""" Test cases for .hist method """
import pytest
from pandas import Series, DataFrame
import pandas.util.testing as tm
from pandas.util.testing import slow
import numpy as np
from numpy.random import randn
from pandas.plotting._core import grouped_hist
from pandas.tests.plotting.common import (TestPlotBase, _check_plot_works)
tm._skip_module_if_no_mpl()
class TestSeriesPlots(TestPlotBase):
def setup_method(self, method):
TestPlotBase.setup_method(self, method)
import matplotlib as mpl
mpl.rcdefaults()
self.ts = tm.makeTimeSeries()
self.ts.name = 'ts'
@slow
def test_hist_legacy(self):
_check_plot_works(self.ts.hist)
_check_plot_works(self.ts.hist, grid=False)
_check_plot_works(self.ts.hist, figsize=(8, 10))
# _check_plot_works adds an ax so catch warning. see GH #13188
with tm.assert_produces_warning(UserWarning):
_check_plot_works(self.ts.hist, by=self.ts.index.month)
with tm.assert_produces_warning(UserWarning):
_check_plot_works(self.ts.hist, by=self.ts.index.month, bins=5)
fig, ax = self.plt.subplots(1, 1)
_check_plot_works(self.ts.hist, ax=ax)
_check_plot_works(self.ts.hist, ax=ax, figure=fig)
_check_plot_works(self.ts.hist, figure=fig)
tm.close()
fig, (ax1, ax2) = self.plt.subplots(1, 2)
_check_plot_works(self.ts.hist, figure=fig, ax=ax1)
_check_plot_works(self.ts.hist, figure=fig, ax=ax2)
with pytest.raises(ValueError):
self.ts.hist(by=self.ts.index, figure=fig)
@slow
def test_hist_bins_legacy(self):
df = DataFrame(np.random.randn(10, 2))
ax = df.hist(bins=2)[0][0]
assert len(ax.patches) == 2
@slow
def test_hist_layout(self):
df = self.hist_df
with pytest.raises(ValueError):
df.height.hist(layout=(1, 1))
with pytest.raises(ValueError):
df.height.hist(layout=[1, 1])
@slow
def test_hist_layout_with_by(self):
df = self.hist_df
# _check_plot_works adds an `ax` kwarg to the method call
# so we get a warning about an axis being cleared, even
# though we don't explicing pass one, see GH #13188
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist, by=df.gender,
layout=(2, 1))
self._check_axes_shape(axes, axes_num=2, layout=(2, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist, by=df.gender,
layout=(3, -1))
self._check_axes_shape(axes, axes_num=2, layout=(3, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist, by=df.category,
layout=(4, 1))
self._check_axes_shape(axes, axes_num=4, layout=(4, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(
df.height.hist, by=df.category, layout=(2, -1))
self._check_axes_shape(axes, axes_num=4, layout=(2, 2))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(
df.height.hist, by=df.category, layout=(3, -1))
self._check_axes_shape(axes, axes_num=4, layout=(3, 2))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(
df.height.hist, by=df.category, layout=(-1, 4))
self._check_axes_shape(axes, axes_num=4, layout=(1, 4))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(
df.height.hist, by=df.classroom, layout=(2, 2))
self._check_axes_shape(axes, axes_num=3, layout=(2, 2))
axes = df.height.hist(by=df.category, layout=(4, 2), figsize=(12, 7))
self._check_axes_shape(
axes, axes_num=4, layout=(4, 2), figsize=(12, 7))
@slow
def test_hist_no_overlap(self):
from matplotlib.pyplot import subplot, gcf
x = Series(randn(2))
y = Series(randn(2))
subplot(121)
x.hist()
subplot(122)
y.hist()
fig = gcf()
axes = fig.axes if self.mpl_ge_1_5_0 else fig.get_axes()
assert len(axes) == 2
@slow
def test_hist_by_no_extra_plots(self):
df = self.hist_df
axes = df.height.hist(by=df.gender) # noqa
assert len(self.plt.get_fignums()) == 1
@slow
def test_plot_fails_when_ax_differs_from_figure(self):
from pylab import figure
fig1 = figure()
fig2 = figure()
ax1 = fig1.add_subplot(111)
with pytest.raises(AssertionError):
self.ts.hist(ax=ax1, figure=fig2)
class TestDataFramePlots(TestPlotBase):
@slow
def test_hist_df_legacy(self):
from matplotlib.patches import Rectangle
with tm.assert_produces_warning(UserWarning):
_check_plot_works(self.hist_df.hist)
# make sure layout is handled
df = DataFrame(randn(100, 3))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.hist, grid=False)
self._check_axes_shape(axes, axes_num=3, layout=(2, 2))
assert not axes[1, 1].get_visible()
df = DataFrame(randn(100, 1))
_check_plot_works(df.hist)
# make sure layout is handled
df = DataFrame(randn(100, 6))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.hist, layout=(4, 2))
self._check_axes_shape(axes, axes_num=6, layout=(4, 2))
# make sure sharex, sharey is handled
with tm.assert_produces_warning(UserWarning):
_check_plot_works(df.hist, sharex=True, sharey=True)
# handle figsize arg
with tm.assert_produces_warning(UserWarning):
_check_plot_works(df.hist, figsize=(8, 10))
# check bins argument
with tm.assert_produces_warning(UserWarning):
_check_plot_works(df.hist, bins=5)
# make sure xlabelsize and xrot are handled
ser = df[0]
xf, yf = 20, 18
xrot, yrot = 30, 40
axes = ser.hist(xlabelsize=xf, xrot=xrot, ylabelsize=yf, yrot=yrot)
self._check_ticks_props(axes, xlabelsize=xf, xrot=xrot,
ylabelsize=yf, yrot=yrot)
xf, yf = 20, 18
xrot, yrot = 30, 40
axes = df.hist(xlabelsize=xf, xrot=xrot, ylabelsize=yf, yrot=yrot)
self._check_ticks_props(axes, xlabelsize=xf, xrot=xrot,
ylabelsize=yf, yrot=yrot)
tm.close()
# make sure kwargs to hist are handled
ax = ser.hist(normed=True, cumulative=True, bins=4)
# height of last bin (index 5) must be 1.0
rects = [x for x in ax.get_children() if isinstance(x, Rectangle)]
tm.assert_almost_equal(rects[-1].get_height(), 1.0)
tm.close()
ax = ser.hist(log=True)
# scale of y must be 'log'
self._check_ax_scales(ax, yaxis='log')
tm.close()
# propagate attr exception from matplotlib.Axes.hist
with pytest.raises(AttributeError):
ser.hist(foo='bar')
@slow
def test_hist_layout(self):
df = DataFrame(randn(100, 3))
layout_to_expected_size = (
{'layout': None, 'expected_size': (2, 2)}, # default is 2x2
{'layout': (2, 2), 'expected_size': (2, 2)},
{'layout': (4, 1), 'expected_size': (4, 1)},
{'layout': (1, 4), 'expected_size': (1, 4)},
{'layout': (3, 3), 'expected_size': (3, 3)},
{'layout': (-1, 4), 'expected_size': (1, 4)},
{'layout': (4, -1), 'expected_size': (4, 1)},
{'layout': (-1, 2), 'expected_size': (2, 2)},
{'layout': (2, -1), 'expected_size': (2, 2)}
)
for layout_test in layout_to_expected_size:
axes = df.hist(layout=layout_test['layout'])
expected = layout_test['expected_size']
self._check_axes_shape(axes, axes_num=3, layout=expected)
# layout too small for all 4 plots
with pytest.raises(ValueError):
df.hist(layout=(1, 1))
# invalid format for layout
with pytest.raises(ValueError):
df.hist(layout=(1,))
with pytest.raises(ValueError):
df.hist(layout=(-1, -1))
@slow
# GH 9351
def test_tight_layout(self):
if self.mpl_ge_2_0_1:
df = DataFrame(randn(100, 3))
_check_plot_works(df.hist)
self.plt.tight_layout()
tm.close()
class TestDataFrameGroupByPlots(TestPlotBase):
@slow
def test_grouped_hist_legacy(self):
from matplotlib.patches import Rectangle
df = DataFrame(randn(500, 2), columns=['A', 'B'])
df['C'] = np.random.randint(0, 4, 500)
df['D'] = ['X'] * 500
axes = grouped_hist(df.A, by=df.C)
self._check_axes_shape(axes, axes_num=4, layout=(2, 2))
tm.close()
axes = df.hist(by=df.C)
self._check_axes_shape(axes, axes_num=4, layout=(2, 2))
tm.close()
# group by a key with single value
axes = df.hist(by='D', rot=30)
self._check_axes_shape(axes, axes_num=1, layout=(1, 1))
self._check_ticks_props(axes, xrot=30)
tm.close()
# make sure kwargs to hist are handled
xf, yf = 20, 18
xrot, yrot = 30, 40
axes = grouped_hist(df.A, by=df.C, normed=True, cumulative=True,
bins=4, xlabelsize=xf, xrot=xrot,
ylabelsize=yf, yrot=yrot)
# height of last bin (index 5) must be 1.0
for ax in axes.ravel():
rects = [x for x in ax.get_children() if isinstance(x, Rectangle)]
height = rects[-1].get_height()
tm.assert_almost_equal(height, 1.0)
self._check_ticks_props(axes, xlabelsize=xf, xrot=xrot,
ylabelsize=yf, yrot=yrot)
tm.close()
axes = grouped_hist(df.A, by=df.C, log=True)
# scale of y must be 'log'
self._check_ax_scales(axes, yaxis='log')
tm.close()
# propagate attr exception from matplotlib.Axes.hist
with pytest.raises(AttributeError):
grouped_hist(df.A, by=df.C, foo='bar')
with tm.assert_produces_warning(FutureWarning):
df.hist(by='C', figsize='default')
@slow
def test_grouped_hist_legacy2(self):
n = 10
weight = Series(np.random.normal(166, 20, size=n))
height = Series(np.random.normal(60, 10, size=n))
with tm.RNGContext(42):
gender_int = np.random.choice([0, 1], size=n)
df_int = DataFrame({'height': height, 'weight': weight,
'gender': gender_int})
gb = df_int.groupby('gender')
axes = gb.hist()
assert len(axes) == 2
assert len(self.plt.get_fignums()) == 2
tm.close()
@slow
def test_grouped_hist_layout(self):
df = self.hist_df
pytest.raises(ValueError, df.hist, column='weight', by=df.gender,
layout=(1, 1))
pytest.raises(ValueError, df.hist, column='height', by=df.category,
layout=(1, 3))
pytest.raises(ValueError, df.hist, column='height', by=df.category,
layout=(-1, -1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.hist, column='height', by=df.gender,
layout=(2, 1))
self._check_axes_shape(axes, axes_num=2, layout=(2, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.hist, column='height', by=df.gender,
layout=(2, -1))
self._check_axes_shape(axes, axes_num=2, layout=(2, 1))
axes = df.hist(column='height', by=df.category, layout=(4, 1))
self._check_axes_shape(axes, axes_num=4, layout=(4, 1))
axes = df.hist(column='height', by=df.category, layout=(-1, 1))
self._check_axes_shape(axes, axes_num=4, layout=(4, 1))
axes = df.hist(column='height', by=df.category,
layout=(4, 2), figsize=(12, 8))
self._check_axes_shape(
axes, axes_num=4, layout=(4, 2), figsize=(12, 8))
tm.close()
# GH 6769
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(
df.hist, column='height', by='classroom', layout=(2, 2))
self._check_axes_shape(axes, axes_num=3, layout=(2, 2))
# without column
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.hist, by='classroom')
self._check_axes_shape(axes, axes_num=3, layout=(2, 2))
axes = df.hist(by='gender', layout=(3, 5))
self._check_axes_shape(axes, axes_num=2, layout=(3, 5))
axes = df.hist(column=['height', 'weight', 'category'])
self._check_axes_shape(axes, axes_num=3, layout=(2, 2))
@slow
def test_grouped_hist_multiple_axes(self):
# GH 6970, GH 7069
df = self.hist_df
fig, axes = self.plt.subplots(2, 3)
returned = df.hist(column=['height', 'weight', 'category'], ax=axes[0])
self._check_axes_shape(returned, axes_num=3, layout=(1, 3))
tm.assert_numpy_array_equal(returned, axes[0])
assert returned[0].figure is fig
returned = df.hist(by='classroom', ax=axes[1])
self._check_axes_shape(returned, axes_num=3, layout=(1, 3))
tm.assert_numpy_array_equal(returned, axes[1])
assert returned[0].figure is fig
with pytest.raises(ValueError):
fig, axes = self.plt.subplots(2, 3)
# pass different number of axes from required
axes = df.hist(column='height', ax=axes)
@slow
def test_axis_share_x(self):
df = self.hist_df
# GH4089
ax1, ax2 = df.hist(column='height', by=df.gender, sharex=True)
# share x
assert ax1._shared_x_axes.joined(ax1, ax2)
assert ax2._shared_x_axes.joined(ax1, ax2)
# don't share y
assert not ax1._shared_y_axes.joined(ax1, ax2)
assert not ax2._shared_y_axes.joined(ax1, ax2)
@slow
def test_axis_share_y(self):
df = self.hist_df
ax1, ax2 = df.hist(column='height', by=df.gender, sharey=True)
# share y
assert ax1._shared_y_axes.joined(ax1, ax2)
assert ax2._shared_y_axes.joined(ax1, ax2)
# don't share x
assert not ax1._shared_x_axes.joined(ax1, ax2)
assert not ax2._shared_x_axes.joined(ax1, ax2)
@slow
def test_axis_share_xy(self):
df = self.hist_df
ax1, ax2 = df.hist(column='height', by=df.gender, sharex=True,
sharey=True)
# share both x and y
assert ax1._shared_x_axes.joined(ax1, ax2)
assert ax2._shared_x_axes.joined(ax1, ax2)
assert ax1._shared_y_axes.joined(ax1, ax2)
assert ax2._shared_y_axes.joined(ax1, ax2)
|
mgit-at/ansible | refs/heads/devel | lib/ansible/modules/network/cloudengine/ce_vlan.py | 7 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_vlan
version_added: "2.4"
short_description: Manages VLAN resources and attributes on Huawei CloudEngine switches.
description:
- Manages VLAN configurations on Huawei CloudEngine switches.
author: QijunPan (@QijunPan)
options:
vlan_id:
description:
- Single VLAN ID, in the range from 1 to 4094.
vlan_range:
description:
- Range of VLANs such as C(2-10) or C(2,5,10-15), etc.
name:
description:
- Name of VLAN, minimum of 1 character, maximum of 31 characters.
description:
description:
- Specify VLAN description, minimum of 1 character, maximum of 80 characters.
state:
description:
- Manage the state of the resource.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: vlan module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Ensure a range of VLANs are not present on the switch
ce_vlan:
vlan_range: "2-10,20,50,55-60,100-150"
state: absent
provider: "{{ cli }}"
- name: Ensure VLAN 50 exists with the name WEB
ce_vlan:
vlan_id: 50
name: WEB
state: absent
provider: "{{ cli }}"
- name: Ensure VLAN is NOT on the device
ce_vlan:
vlan_id: 50
state: absent
provider: "{{ cli }}"
'''
RETURN = '''
proposed_vlans_list:
description: list of VLANs being proposed
returned: always
type: list
sample: ["100"]
existing_vlans_list:
description: list of existing VLANs on the switch prior to making changes
returned: always
type: list
sample: ["1", "2", "3", "4", "5", "20"]
end_state_vlans_list:
description: list of VLANs after the module is executed
returned: always
type: list
sample: ["1", "2", "3", "4", "5", "20", "100"]
proposed:
description: k/v pairs of parameters passed into module (does not include
vlan_id or vlan_range)
returned: always
type: dict
sample: {"vlan_id":"20", "name": "VLAN_APP", "description": "vlan for app" }
existing:
description: k/v pairs of existing vlan or null when using vlan_range
returned: always
type: dict
sample: {"vlan_id":"20", "name": "VLAN_APP", "description": "" }
end_state:
description: k/v pairs of the VLAN after executing module or null
when using vlan_range
returned: always
type: dict
sample: {"vlan_id":"20", "name": "VLAN_APP", "description": "vlan for app" }
updates:
description: command string sent to the device
returned: always
type: list
sample: ["vlan 20", "name VLAN20"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, execute_nc_action, ce_argument_spec
CE_NC_CREATE_VLAN = """
<config>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan operation="create">
<vlanId>%s</vlanId>
<vlanName>%s</vlanName>
<vlanDesc>%s</vlanDesc>
<vlanType></vlanType>
<subVlans/>
</vlan>
</vlans>
</vlan>
</config>
"""
CE_NC_DELETE_VLAN = """
<config>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan operation="delete">
<vlanId>%s</vlanId>
</vlan>
</vlans>
</vlan>
</config>
"""
CE_NC_MERGE_VLAN_DES = """
<config>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan operation="merge">
<vlanId>%s</vlanId>
<vlanDesc>%s</vlanDesc>
<vlanType></vlanType>
<subVlans/>
</vlan>
</vlans>
</vlan>
</config>
"""
CE_NC_MERGE_VLAN_NAME = """
<config>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan operation="merge">
<vlanId>%s</vlanId>
<vlanName>%s</vlanName>
<vlanType></vlanType>
<subVlans/>
</vlan>
</vlans>
</vlan>
</config>
"""
CE_NC_MERGE_VLAN = """
<config>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan operation="merge">
<vlanId>%s</vlanId>
<vlanName>%s</vlanName>
<vlanDesc>%s</vlanDesc>
<vlanType></vlanType>
<subVlans/>
</vlan>
</vlans>
</vlan>
</config>
"""
CE_NC_GET_VLAN = """
<filter type="subtree">
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan>
<vlanId>%s</vlanId>
<vlanDesc/>
<vlanName/>
</vlan>
</vlans>
</vlan>
</filter>
"""
CE_NC_GET_VLANS = """
<filter type="subtree">
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan>
<vlanId/>
<vlanName/>
</vlan>
</vlans>
</vlan>
</filter>
"""
CE_NC_CREATE_VLAN_BATCH = """
<action>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<shVlanBatchCrt>
<vlans>%s:%s</vlans>
</shVlanBatchCrt>
</vlan>
</action>
"""
CE_NC_DELETE_VLAN_BATCH = """
<action>
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<shVlanBatchDel>
<vlans>%s:%s</vlans>
</shVlanBatchDel>
</vlan>
</action>
"""
class Vlan(object):
"""
Manages VLAN resources and attributes
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# vlan config info
self.vlan_id = self.module.params['vlan_id']
self.vlan_range = self.module.params['vlan_range']
self.name = self.module.params['name']
self.description = self.module.params['description']
self.state = self.module.params['state']
# state
self.changed = False
self.vlan_exist = False
self.vlan_attr_exist = None
self.vlans_list_exist = list()
self.vlans_list_change = list()
self.updates_cmd = list()
self.results = dict()
self.vlan_attr_end = dict()
def init_module(self):
"""
init ansible NetworkModule.
"""
required_one_of = [["vlan_id", "vlan_range"]]
mutually_exclusive = [["vlan_id", "vlan_range"]]
self.module = AnsibleModule(
argument_spec=self.spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed."""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def config_vlan(self, vlan_id, name='', description=''):
"""Create vlan."""
if name is None:
name = ''
if description is None:
description = ''
conf_str = CE_NC_CREATE_VLAN % (vlan_id, name, description)
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "CREATE_VLAN")
self.changed = True
def merge_vlan(self, vlan_id, name, description):
"""Merge vlan."""
conf_str = None
if not name and description:
conf_str = CE_NC_MERGE_VLAN_DES % (vlan_id, description)
if not description and name:
conf_str = CE_NC_MERGE_VLAN_NAME % (vlan_id, name)
if description and name:
conf_str = CE_NC_MERGE_VLAN % (vlan_id, name, description)
if not conf_str:
return
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "MERGE_VLAN")
self.changed = True
def create_vlan_batch(self, vlan_list):
"""Create vlan batch."""
if not vlan_list:
return
vlan_bitmap = self.vlan_list_to_bitmap(vlan_list)
xmlstr = CE_NC_CREATE_VLAN_BATCH % (vlan_bitmap, vlan_bitmap)
recv_xml = execute_nc_action(self.module, xmlstr)
self.check_response(recv_xml, "CREATE_VLAN_BATCH")
self.updates_cmd.append('vlan batch %s' % (
self.vlan_range.replace(',', ' ').replace('-', ' to ')))
self.changed = True
def delete_vlan_batch(self, vlan_list):
"""Delete vlan batch."""
if not vlan_list:
return
vlan_bitmap = self.vlan_list_to_bitmap(vlan_list)
xmlstr = CE_NC_DELETE_VLAN_BATCH % (vlan_bitmap, vlan_bitmap)
recv_xml = execute_nc_action(self.module, xmlstr)
self.check_response(recv_xml, "DELETE_VLAN_BATCH")
self.updates_cmd.append('undo vlan batch %s' % (
self.vlan_range.replace(',', ' ').replace('-', ' to ')))
self.changed = True
def undo_config_vlan(self, vlanid):
"""Delete vlan."""
conf_str = CE_NC_DELETE_VLAN % vlanid
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "DELETE_VLAN")
self.changed = True
self.updates_cmd.append('undo vlan %s' % self.vlan_id)
def get_vlan_attr(self, vlan_id):
""" get vlan attributes."""
conf_str = CE_NC_GET_VLAN % vlan_id
xml_str = get_nc_config(self.module, conf_str)
attr = dict()
if "<data/>" in xml_str:
return attr
else:
re_find = re.findall(r'.*<vlanId>(.*)</vlanId>.*\s*'
r'<vlanName>(.*)</vlanName>.*\s*'
r'<vlanDesc>(.*)</vlanDesc>.*', xml_str)
if re_find:
attr = dict(vlan_id=re_find[0][0], name=re_find[0][1],
description=re_find[0][2])
return attr
def get_vlans_name(self):
""" get all vlan vid and its name list,
sample: [ ("20", "VLAN_NAME_20"), ("30", "VLAN_NAME_30") ]"""
conf_str = CE_NC_GET_VLANS
xml_str = get_nc_config(self.module, conf_str)
vlan_list = list()
if "<data/>" in xml_str:
return vlan_list
else:
vlan_list = re.findall(
r'.*<vlanId>(.*)</vlanId>.*\s*<vlanName>(.*)</vlanName>.*', xml_str)
return vlan_list
def get_vlans_list(self):
""" get all vlan vid list, sample: [ "20", "30", "31" ]"""
conf_str = CE_NC_GET_VLANS
xml_str = get_nc_config(self.module, conf_str)
vlan_list = list()
if "<data/>" in xml_str:
return vlan_list
else:
vlan_list = re.findall(
r'.*<vlanId>(.*)</vlanId>.*', xml_str)
return vlan_list
def vlan_series(self, vlanid_s):
""" convert vlan range to list """
vlan_list = []
peerlistlen = len(vlanid_s)
if peerlistlen != 2:
self.module.fail_json(msg='Error: Format of vlanid is invalid.')
for num in range(peerlistlen):
if not vlanid_s[num].isdigit():
self.module.fail_json(
msg='Error: Format of vlanid is invalid.')
if int(vlanid_s[0]) > int(vlanid_s[1]):
self.module.fail_json(msg='Error: Format of vlanid is invalid.')
elif int(vlanid_s[0]) == int(vlanid_s[1]):
vlan_list.append(str(vlanid_s[0]))
return vlan_list
for num in range(int(vlanid_s[0]), int(vlanid_s[1])):
vlan_list.append(str(num))
vlan_list.append(vlanid_s[1])
return vlan_list
def vlan_region(self, vlanid_list):
""" convert vlan range to vlan list """
vlan_list = []
peerlistlen = len(vlanid_list)
for num in range(peerlistlen):
if vlanid_list[num].isdigit():
vlan_list.append(vlanid_list[num])
else:
vlan_s = self.vlan_series(vlanid_list[num].split('-'))
vlan_list.extend(vlan_s)
return vlan_list
def vlan_range_to_list(self, vlan_range):
""" convert vlan range to vlan list """
vlan_list = self.vlan_region(vlan_range.split(','))
return vlan_list
def vlan_list_to_bitmap(self, vlanlist):
""" convert vlan list to vlan bitmap """
vlan_bit = ['0'] * 1024
bit_int = [0] * 1024
vlan_list_len = len(vlanlist)
for num in range(vlan_list_len):
tagged_vlans = int(vlanlist[num])
if tagged_vlans <= 0 or tagged_vlans > 4094:
self.module.fail_json(
msg='Error: Vlan id is not in the range from 1 to 4094.')
j = tagged_vlans / 4
bit_int[j] |= 0x8 >> (tagged_vlans % 4)
vlan_bit[j] = hex(bit_int[j])[2]
vlan_xml = ''.join(vlan_bit)
return vlan_xml
def check_params(self):
"""Check all input params"""
if not self.vlan_id and self.description:
self.module.fail_json(
msg='Error: Vlan description could be set only at one vlan.')
if not self.vlan_id and self.name:
self.module.fail_json(
msg='Error: Vlan name could be set only at one vlan.')
# check vlan id
if self.vlan_id:
if not self.vlan_id.isdigit():
self.module.fail_json(
msg='Error: Vlan id is not digit.')
if int(self.vlan_id) <= 0 or int(self.vlan_id) > 4094:
self.module.fail_json(
msg='Error: Vlan id is not in the range from 1 to 4094.')
# check vlan description
if self.description:
if len(self.description) > 81 or len(self.description.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: vlan description is not in the range from 1 to 80.')
# check vlan name
if self.name:
if len(self.name) > 31 or len(self.name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: Vlan name is not in the range from 1 to 31.')
def get_proposed(self):
"""
get proposed config.
"""
if self.vlans_list_change:
if self.state == 'present':
proposed_vlans_tmp = list(self.vlans_list_change)
proposed_vlans_tmp.extend(self.vlans_list_exist)
self.results['proposed_vlans_list'] = list(
set(proposed_vlans_tmp))
else:
self.results['proposed_vlans_list'] = list(
set(self.vlans_list_exist) - set(self.vlans_list_change))
self.results['proposed_vlans_list'].sort()
else:
self.results['proposed_vlans_list'] = self.vlans_list_exist
if self.vlan_id:
if self.state == "present":
self.results['proposed'] = dict(
vlan_id=self.vlan_id,
name=self.name,
description=self.description
)
else:
self.results['proposed'] = None
else:
self.results['proposed'] = None
def get_existing(self):
"""
get existing config.
"""
self.results['existing_vlans_list'] = self.vlans_list_exist
if self.vlan_id:
if self.vlan_attr_exist:
self.results['existing'] = dict(
vlan_id=self.vlan_attr_exist['vlan_id'],
name=self.vlan_attr_exist['name'],
description=self.vlan_attr_exist['description']
)
else:
self.results['existing'] = None
else:
self.results['existing'] = None
def get_end_state(self):
"""
get end state config.
"""
self.results['end_state_vlans_list'] = self.get_vlans_list()
if self.vlan_id:
if self.vlan_attr_end:
self.results['end_state'] = dict(
vlan_id=self.vlan_attr_end['vlan_id'],
name=self.vlan_attr_end['name'],
description=self.vlan_attr_end['description']
)
else:
self.results['end_state'] = None
else:
self.results['end_state'] = None
def work(self):
"""
worker.
"""
# check param
self.check_params()
# get all vlan info
self.vlans_list_exist = self.get_vlans_list()
# get vlan attributes
if self.vlan_id:
self.vlans_list_change.append(self.vlan_id)
self.vlan_attr_exist = self.get_vlan_attr(self.vlan_id)
if self.vlan_attr_exist:
self.vlan_exist = True
if self.vlan_range:
new_vlans_tmp = self.vlan_range_to_list(self.vlan_range)
if self.state == 'present':
self.vlans_list_change = list(
set(new_vlans_tmp) - set(self.vlans_list_exist))
else:
self.vlans_list_change = [
val for val in new_vlans_tmp if val in self.vlans_list_exist]
if self.state == 'present':
if self.vlan_id:
if not self.vlan_exist:
# create a new vlan
self.config_vlan(self.vlan_id, self.name, self.description)
elif self.description and self.description != self.vlan_attr_exist['description']:
# merge vlan description
self.merge_vlan(self.vlan_id, self.name, self.description)
elif self.name and self.name != self.vlan_attr_exist['name']:
# merge vlan name
self.merge_vlan(self.vlan_id, self.name, self.description)
# update command for results
if self.changed:
self.updates_cmd.append('vlan %s' % self.vlan_id)
if self.name:
self.updates_cmd.append('name %s' % self.name)
if self.description:
self.updates_cmd.append(
'description %s' % self.description)
elif self.vlan_range and self.vlans_list_change:
self.create_vlan_batch(self.vlans_list_change)
else: # absent
if self.vlan_id:
if self.vlan_exist:
# delete the vlan
self.undo_config_vlan(self.vlan_id)
elif self.vlan_range and self.vlans_list_change:
self.delete_vlan_batch(self.vlans_list_change)
# result
if self.vlan_id:
self.vlan_attr_end = self.get_vlan_attr(self.vlan_id)
self.get_existing()
self.get_proposed()
self.get_end_state()
self.results['changed'] = self.changed
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
""" module main """
argument_spec = dict(
vlan_id=dict(required=False),
vlan_range=dict(required=False, type='str'),
name=dict(required=False, type='str'),
description=dict(required=False, type='str'),
state=dict(choices=['absent', 'present'],
default='present', required=False),
)
argument_spec.update(ce_argument_spec)
vlancfg = Vlan(argument_spec)
vlancfg.work()
if __name__ == '__main__':
main()
|
nicobustillos/odoo | refs/heads/8.0 | addons/hr_payroll/wizard/__init__.py | 442 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_payroll_payslips_by_employees
import hr_payroll_contribution_register_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
iostackproject/Swift-Microcontroller | refs/heads/master | Engine/swift/vertigo_middleware/gateways/docker/datagram.py | 1 | import json
import os
import syslog
SBUS_FD_OUTPUT_OBJECT = 1
SBUS_CMD_NOP = 9
class Datagram(object):
'''@summary: This class aggregates data to be transferred
using SBus functionality.
'''
command_dict_key_name_ = 'command'
task_id_dict_key_name_ = 'taskId'
def __init__(self):
'''@summary: CTOR
@ivar e_command_ : A command to Storlet Daemon.
@type e_command_ : Integer. SBusStorletCommand enumerated value.
@ivar h_files_: List of open file descriptors.
@type h_files_: List of integers.
@ivar n_files_: Quantity of file descriptors.
@type n_files_: Integer.
@ivar files_metadata_: Meta-data for the file descriptors.
@type files_metadata_: List of Dictionaries String-to-String.
@ivar exec_params_: Auxiliary parameters for e_command_.
@type exec_params_: Dictionary String-to-String.
@invariant: Quantity of entries in files_metadata_ list
is the same as in h_files_, i.e. n_files_.
'''
self.e_command_ = SBUS_CMD_NOP
self.task_id_ = None
self.h_files_ = None
self.n_files_ = 0
self.files_metadata_ = None
self.exec_params_ = None
@staticmethod
def create_service_datagram(command, outfd):
'''@summary: Datagram static factory.
Create "service" datagram, i.e.
- command shall be one of
{PING, START/STOP/STATUS-DAEMON}
- single output file descriptor
@param command: Command to send
@type command: SBusStorletCommand
@param outfd: Output stream for command execution results
@type outfd: File descriptor or Integer
@return: A datagram with the required data
@rtype: SBusDatagram
'''
dtg = Datagram()
dtg.set_command(command)
meta = {}
meta[0] = {'type': SBUS_FD_OUTPUT_OBJECT}
files = []
if isinstance(outfd, file):
files.append(outfd.fileno())
else:
files.append(outfd)
dtg.set_files(files)
dtg.set_metadata(meta)
return dtg
def from_raw_data(self, h_files, str_json_metadata, str_json_params):
'''@summary: CTOR
Construct object from file list and
two JSON-encoded strings.
@param h_files: List of file descriptors.
@type h_files: List of integers.
@param str_json_metadata: JSON encoding of file descriptors meta-data.
@type str_json_metadata: String.
@param str_json_params: JSON encoding for execution parameters.
@type str_json_params: String.
@rtype: void
'''
self.set_files(h_files)
self.extract_metadata(str_json_metadata)
self.extract_params(str_json_params)
def extract_metadata(self, str_json_metadata):
'''@summary: Extract files_metadata array
of dictionaries form a JSON string
@requires: n_files_ has to be se
@param str_json_metadata: JSON encoding of file descriptors meta-data.
@type str_json_metadata: String.
@rtype: void
'''
if self.get_num_files() > 0:
all_metadata = json.loads(str_json_metadata)
self.files_metadata_ = []
for i in range(self.get_num_files()):
str_curr_metadata = all_metadata[str(i)]
self.files_metadata_.append(json.loads(str_curr_metadata))
def extract_params(self, str_json_params):
'''@summary: Extract command field and exec_params
dictionary form a JSON string
@param str_json_params: JSON encoding for the execution parameters.
@type str_json_params: string.
@rtype: void
'''
ext_params = json.loads(str_json_params)
cmd = self.command_dict_key_name_
tid = self.task_id_dict_key_name_
if cmd in ext_params:
self.e_command_ = ext_params[cmd]
ext_params.pop(cmd, None)
elif tid in ext_params:
self.task_id_ = ext_params[tid]
ext_params.pop(tid, None)
else:
self.e_command_ = SBUS_CMD_NOP
b_exec_params_is_not_empty = len(ext_params.keys()) > 0
if b_exec_params_is_not_empty:
self.exec_params_ = ext_params.copy()
else:
self.exec_params_ = None
def get_params_and_cmd_as_json(self):
'''@summary: Convert command field and execution parameters
dictionary into JSON as the following -
1. Copy exec_params_. Initialize the combined dictionary.
2. Push the next pair into the combined dictionary
key - 'command'
value - e_command_
@return: JSON encoded representation of exec_params_ and command_
@rtype: string
'''
exec_params = {}
if self.exec_params_:
exec_params = self.exec_params_.copy()
cmd = self.command_dict_key_name_
exec_params[cmd] = self.e_command_
if self.task_id_:
tid = self.task_id_dict_key_name_
exec_params[tid] = self.task_id_
str_result = json.dumps(exec_params)
return str_result
def get_files_metadata_as_json(self):
'''@summary: Encode the list of dictionaries into JSON as the following
1. Create a combined dictionary (Integer-to-String)
Key - index in the original list
Value - JSON encoding of the certain dictionary
2. Encode the combined dictionary into JSON
@return: List of dictionaries into a JSON string.
@rtype: string
'''
all_metadata = {}
str_result = None
for i in range(self.get_num_files()):
all_metadata[str(i)] = json.dumps(self.files_metadata_[i])
if self.get_num_files() > 0:
str_result = json.dumps(all_metadata)
return str_result
def get_num_files(self):
'''@summary: Getter.
@return: The quantity of file descriptors.
@rtype: integer
'''
return self.n_files_
def get_files(self):
'''@summary: Getter.
@return: The list of file descriptors.
@rtype: List of integers
'''
return self.h_files_
def set_files(self, h_files):
'''@summary: Setter.
Assign file handlers list and update n_files_ field
@param h_files: File descriptors.
@type h_files: List of integers
@rtype: void
'''
if not h_files:
self.n_files_ = 0
else:
self.n_files_ = len(h_files)
self.h_files_ = None
if 0 < self.n_files_:
self.h_files_ = []
for i in range(self.n_files_):
if isinstance(h_files[i], file):
self.h_files_.append(h_files[i].fileno())
else:
self.h_files_.append(h_files[i])
def get_first_file_of_type(self, file_type):
'''@summary: Iterate through file list and metadata.
Find the first file with the required type
@param file_type: The file type to look for
@type file_type: Integer, SBusFileDescription enumerator
@return: File descriptor or None if not found
@rtype: File
'''
required_file = None
for i in range(self.get_num_files()):
if (self.get_metadata()[i])['type'] == file_type:
try:
required_file = os.fdopen(self.get_files()[i], 'w')
except IOError as err:
syslog.syslog(syslog.LOG_DEBUG,
'Failed to open file: %s' % err.strerror)
return required_file
def get_metadata(self):
'''@summary: Getter.
@return: The list of meta-data dictionaries.
@rtype: List of dictionaries
'''
return self.files_metadata_
def set_metadata(self, metadata):
'''@summary: Setter.
Assign file_metadata_ field
@param metadata: File descriptors meta-data dictionaries.
@type metadata: List of dictionaries
@rtype: void
'''
self.files_metadata_ = metadata
def get_exec_params(self):
'''@summary: Getter.
@return: The execution parameters dictionary.
@rtype: Dictionary
'''
return self.exec_params_
def set_exec_params(self, params):
'''@summary: Setter.
Assign execution parameters dictionary.
@param params: Execution parameters to assign
@type params: Dictionary
@rtype: void
'''
self.exec_params_ = params
def add_exec_param(self, param_name, param_value):
'''@summary: Add a single pair to the exec_params_ dictionary
Don't change if the parameter exists already
@param param_name: Execution parameter name to be added
@type param_name: string
@param param_value: Execution parameter value
@type param_value: Unknown
@return: False if param_name exists already
@rtype: boolean
'''
b_status = True
if not self.get_exec_params():
exec_params = {}
exec_params[param_name] = param_value
self.set_exec_params(exec_params)
elif param_name in self.get_exec_params():
b_status = False
else:
self.get_exec_params()[param_name] = param_value
return b_status
def get_command(self):
'''@summary: Getter.
@return: The Storlet Daemon command.
@rtype: SBusStorletCommand
'''
return self.e_command_
def set_command(self, cmd):
'''@summary: Setter.
Assign Storlet Daemon command.
@param cmd: Command to assign
@type cmd: SBusStorletCommand enumerator
@rtype: void
'''
self.e_command_ = cmd
def get_task_id(self):
'''@summary: Getter.
@return: The task id.
@rtype: string
'''
return self.task_id_
def set_task_id(self, taskId):
'''@summary: Setter.
Assign task id
@param taskId: Command to assign
@type taskId: string
@rtype: void
'''
self.task_id_ = taskId
@staticmethod
def dictionaies_equal(d1, d2):
'''@summary: Check whether two dictionaries has the same content.
The order of the entries is not considered.
@return: The answer to the above
@rtype: boolean.
'''
diffr = set(d1.items()) ^ set(d2.items())
return (0 == len(diffr))
|
dhoffman34/django | refs/heads/master | django/templatetags/future.py | 45 | import warnings
from django.template import Library
from django.template import defaulttags
from django.utils.deprecation import RemovedInDjango19Warning, RemovedInDjango20Warning
register = Library()
@register.tag
def ssi(parser, token):
warnings.warn(
"Loading the `ssi` tag from the `future` library is deprecated and "
"will be removed in Django 1.9. Use the default `ssi` tag instead.",
RemovedInDjango19Warning)
return defaulttags.ssi(parser, token)
@register.tag
def url(parser, token):
warnings.warn(
"Loading the `url` tag from the `future` library is deprecated and "
"will be removed in Django 1.9. Use the default `url` tag instead.",
RemovedInDjango19Warning)
return defaulttags.url(parser, token)
@register.tag
def cycle(parser, token):
"""
This is the future version of `cycle` with auto-escaping.
The deprecation is now complete and this version is no different
from the non-future version so this is deprecated.
By default all strings are escaped.
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% cycle var1 var2 var3 as somecycle %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% cycle var1 var2|safe var3|safe as somecycle %}
"""
warnings.warn(
"Loading the `cycle` tag from the `future` library is deprecated and "
"will be removed in Django 2.0. Use the default `cycle` tag instead.",
RemovedInDjango20Warning)
return defaulttags.cycle(parser, token)
@register.tag
def firstof(parser, token):
"""
This is the future version of `firstof` with auto-escaping.
The deprecation is now complete and this version is no different
from the non-future version so this is deprecated.
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
warnings.warn(
"Loading the `firstof` tag from the `future` library is deprecated and "
"will be removed in Django 2.0. Use the default `firstof` tag instead.",
RemovedInDjango20Warning)
return defaulttags.firstof(parser, token)
|
tsuru/tsuru-autoscale-dashboard | refs/heads/master | tsuru_autoscale/datasource/views.py | 1 | from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from tsuru_autoscale.datasource.forms import DataSourceForm
from tsuru_autoscale.datasource import client
def new(request):
form = DataSourceForm(request.POST or None)
if form.is_valid():
token = request.GET.get("TSURU_TOKEN")
response = client.new(form.cleaned_data, token)
if response.status_code > 399:
messages.error(request, response.text)
else:
messages.success(request, u"Data source saved.")
url = "{}?TSURU_TOKEN={}".format(reverse('datasource-list'), token)
return redirect(url)
context = {"form": form}
return render(request, "datasource/new.html", context)
def list(request):
token = request.GET.get("TSURU_TOKEN")
datasources = client.list(token).json()
context = {
"list": datasources,
}
return render(request, "datasource/list.html", context)
def remove(request, name):
token = request.GET.get("TSURU_TOKEN")
client.remove(name, token)
messages.success(request, u"Data source {} remove.".format(name))
url = "{}?TSURU_TOKEN={}".format(reverse('datasource-list'), token)
return redirect(url)
def get(request, name):
token = request.GET.get("TSURU_TOKEN")
datasource = client.get(name, token).json()
context = {
"item": datasource,
}
return render(request, "datasource/get.html", context)
|
aviciimaxwell/odoo | refs/heads/8.0 | addons/account/wizard/account_state_open.py | 341 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class account_state_open(osv.osv_memory):
_name = 'account.state.open'
_description = 'Account State Open'
def change_inv_state(self, cr, uid, ids, context=None):
proxy = self.pool.get('account.invoice')
if context is None:
context = {}
active_ids = context.get('active_ids')
if isinstance(active_ids, list):
invoice = proxy.browse(cr, uid, active_ids[0], context=context)
if invoice.reconciled:
raise osv.except_osv(_('Warning!'), _('Invoice is already reconciled.'))
invoice.signal_workflow('open_test')
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
zachjanicki/osf.io | refs/heads/develop | website/addons/github/__init__.py | 21 | import os
from website.addons.github import routes, views, model
MODELS = [
model.GitHubUserSettings,
model.GitHubNodeSettings,
]
USER_SETTINGS_MODEL = model.GitHubUserSettings
NODE_SETTINGS_MODEL = model.GitHubNodeSettings
ROUTES = [routes.api_routes]
SHORT_NAME = 'github'
FULL_NAME = 'GitHub'
OWNERS = ['user', 'node']
ADDED_DEFAULT = []
ADDED_MANDATORY = []
VIEWS = []
CONFIGS = ['accounts', 'node']
CATEGORIES = ['storage']
INCLUDE_JS = {}
INCLUDE_CSS = {}
HAS_HGRID_FILES = True
GET_HGRID_DATA = views.github_hgrid_data
# Note: Even though GitHub supports file sizes over 1 MB, uploads and
# downloads through their API are capped at 1 MB.
MAX_FILE_SIZE = 100
HERE = os.path.dirname(os.path.abspath(__file__))
NODE_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'github_node_settings.mako')
USER_SETTINGS_TEMPLATE = None # Use default template
|
kalahbrown/HueBigSQL | refs/heads/master | desktop/core/ext-py/pysaml2-2.4.0/src/xmldsig/__init__.py | 32 | #!/usr/bin/env python
#
# Generated Mon May 2 14:23:33 2011 by parse_xsd.py version 0.4.
#
import saml2
from saml2 import SamlBase
NAMESPACE = 'http://www.w3.org/2000/09/xmldsig#'
ENCODING_BASE64 = 'http://www.w3.org/2000/09/xmldsig#base64'
# digest and signature algorithms (not implemented = commented out)
DIGEST_MD5 = 'http://www.w3.org/2001/04/xmldsig-more#md5' # test framework only!
DIGEST_SHA1 = 'http://www.w3.org/2000/09/xmldsig#sha1'
DIGEST_SHA224 = 'http://www.w3.org/2001/04/xmldsig-more#sha224'
DIGEST_SHA256 = 'http://www.w3.org/2001/04/xmlenc#sha256'
DIGEST_SHA384 = 'http://www.w3.org/2001/04/xmldsig-more#sha384'
DIGEST_SHA512 = 'http://www.w3.org/2001/04/xmlenc#sha512'
DIGEST_RIPEMD160 = 'http://www.w3.org/2001/04/xmlenc#ripemd160'
digest_default = DIGEST_SHA1
DIGEST_ALLOWED_ALG = (('DIGEST_SHA1', DIGEST_SHA1),
('DIGEST_SHA224', DIGEST_SHA224),
('DIGEST_SHA256', DIGEST_SHA256),
('DIGEST_SHA384', DIGEST_SHA384),
('DIGEST_SHA512', DIGEST_SHA512),
('DIGEST_RIPEMD160', DIGEST_RIPEMD160))
DIGEST_AVAIL_ALG = DIGEST_ALLOWED_ALG + (('DIGEST_MD5', DIGEST_MD5), )
#SIG_DSA_SHA1 = 'http,//www.w3.org/2000/09/xmldsig#dsa-sha1'
#SIG_DSA_SHA256 = 'http://www.w3.org/2009/xmldsig11#dsa-sha256'
#SIG_ECDSA_SHA1 = 'http://www.w3.org/2001/04/xmldsig-more#ECDSA_sha1'
#SIG_ECDSA_SHA224 = 'http://www.w3.org/2001/04/xmldsig-more#ECDSA_sha224'
#SIG_ECDSA_SHA256 = 'http://www.w3.org/2001/04/xmldsig-more#ECDSA_sha256'
#SIG_ECDSA_SHA384 = 'http://www.w3.org/2001/04/xmldsig-more#ECDSA_sha384'
#SIG_ECDSA_SHA512 = 'http://www.w3.org/2001/04/xmldsig-more#ECDSA_sha512'
SIG_RSA_MD5 = 'http://www.w3.org/2001/04/xmldsig-more#rsa-md5' # test framework
SIG_RSA_SHA1 = 'http://www.w3.org/2000/09/xmldsig#rsa-sha1'
SIG_RSA_SHA224 = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha224'
SIG_RSA_SHA256 = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha256'
SIG_RSA_SHA384 = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha384'
SIG_RSA_SHA512 = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha512'
#SIG_RSA_RIPEMD160 = 'http://www.w3.org/2001/04/xmldsig-more#rsa-ripemd160'
sig_default = SIG_RSA_SHA1
SIG_ALLOWED_ALG = (('SIG_RSA_SHA1', SIG_RSA_SHA1),
('SIG_RSA_SHA224', SIG_RSA_SHA224),
('SIG_RSA_SHA256', SIG_RSA_SHA256),
('SIG_RSA_SHA384', SIG_RSA_SHA384),
('SIG_RSA_SHA512', SIG_RSA_SHA512))
SIG_AVAIL_ALG = SIG_ALLOWED_ALG + (('SIG_RSA_MD5', SIG_RSA_MD5), )
MAC_SHA1 = 'http://www.w3.org/2000/09/xmldsig#hmac-sha1'
C14N = 'http://www.w3.org/TR/2001/REC-xml-c14n-20010315'
C14N_WITH_C = 'http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments'
ALG_EXC_C14N = 'http://www.w3.org/2001/10/xml-exc-c14n#'
TRANSFORM_XSLT = 'http://www.w3.org/TR/1999/REC-xslt-19991116'
TRANSFORM_XPATH = 'http://www.w3.org/TR/1999/REC-xpath-19991116'
TRANSFORM_ENVELOPED = 'http://www.w3.org/2000/09/xmldsig#enveloped-signature'
class CryptoBinary_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:CryptoBinary element """
c_tag = 'CryptoBinary'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def crypto_binary__from_string(xml_string):
return saml2.create_class_from_xml_string(CryptoBinary_, xml_string)
class SignatureValueType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureValueType element """
c_tag = 'SignatureValueType'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Id'] = ('id', 'ID', False)
def __init__(self,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.id=id
def signature_value_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureValueType_, xml_string)
class CanonicalizationMethodType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:CanonicalizationMethodType element """
c_tag = 'CanonicalizationMethodType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Algorithm'] = ('algorithm', 'anyURI', True)
def __init__(self,
algorithm=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.algorithm=algorithm
def canonicalization_method_type__from_string(xml_string):
return saml2.create_class_from_xml_string(CanonicalizationMethodType_,
xml_string)
class TransformType_XPath(SamlBase):
c_tag = 'XPath'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def transform_type__x_path_from_string(xml_string):
return saml2.create_class_from_xml_string(TransformType_XPath, xml_string)
class TransformType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:TransformType element """
c_tag = 'TransformType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}XPath'] = ('x_path',
[TransformType_XPath])
c_cardinality['x_path'] = {"min":0}
c_attributes['Algorithm'] = ('algorithm', 'anyURI', True)
c_child_order.extend(['x_path'])
def __init__(self,
x_path=None,
algorithm=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.x_path=x_path or []
self.algorithm=algorithm
def transform_type__from_string(xml_string):
return saml2.create_class_from_xml_string(TransformType_, xml_string)
class DigestMethodType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:DigestMethodType element """
c_tag = 'DigestMethodType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Algorithm'] = ('algorithm', 'anyURI', True)
def __init__(self,
algorithm=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.algorithm=algorithm
def digest_method_type__from_string(xml_string):
return saml2.create_class_from_xml_string(DigestMethodType_, xml_string)
class DigestValueType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:DigestValueType element """
c_tag = 'DigestValueType'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def digest_value_type__from_string(xml_string):
return saml2.create_class_from_xml_string(DigestValueType_, xml_string)
class KeyName(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:KeyName element """
c_tag = 'KeyName'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def key_name_from_string(xml_string):
return saml2.create_class_from_xml_string(KeyName, xml_string)
class MgmtData(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:MgmtData element """
c_tag = 'MgmtData'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def mgmt_data_from_string(xml_string):
return saml2.create_class_from_xml_string(MgmtData, xml_string)
class X509IssuerName(SamlBase):
c_tag = 'X509IssuerName'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def x509_issuer_name_from_string(xml_string):
return saml2.create_class_from_xml_string(X509IssuerName, xml_string)
class X509SerialNumber(SamlBase):
c_tag = 'X509SerialNumber'
c_namespace = NAMESPACE
c_value_type = {'base': 'integer'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def x509_serial_number_from_string(xml_string):
return saml2.create_class_from_xml_string(X509SerialNumber, xml_string)
class X509IssuerSerialType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:X509IssuerSerialType element """
c_tag = 'X509IssuerSerialType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}X509IssuerName'] = ('x509_issuer_name', X509IssuerName)
c_children['{http://www.w3.org/2000/09/xmldsig#}X509SerialNumber'] = ('x509_serial_number', X509SerialNumber)
c_child_order.extend(['x509_issuer_name', 'x509_serial_number'])
def __init__(self,
x509_issuer_name=None,
x509_serial_number=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.x509_issuer_name=x509_issuer_name
self.x509_serial_number=x509_serial_number
def x509_issuer_serial_type__from_string(xml_string):
return saml2.create_class_from_xml_string(X509IssuerSerialType_, xml_string)
class PGPKeyID(SamlBase):
c_tag = 'PGPKeyID'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def pgp_key_id_from_string(xml_string):
return saml2.create_class_from_xml_string(PGPKeyID, xml_string)
class PGPKeyPacket(SamlBase):
c_tag = 'PGPKeyPacket'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def pgp_key_packet_from_string(xml_string):
return saml2.create_class_from_xml_string(PGPKeyPacket, xml_string)
class PGPDataType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:PGPDataType element """
c_tag = 'PGPDataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}PGPKeyID'] = ('pgp_key_id', PGPKeyID)
c_children['{http://www.w3.org/2000/09/xmldsig#}PGPKeyPacket'] = ('pgp_key_packet', PGPKeyPacket)
c_cardinality['pgp_key_packet'] = {"min":0, "max":1}
c_child_order.extend(['pgp_key_id', 'pgp_key_packet'])
def __init__(self,
pgp_key_id=None,
pgp_key_packet=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.pgp_key_id=pgp_key_id
self.pgp_key_packet=pgp_key_packet
def pgp_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(PGPDataType_, xml_string)
class SPKISexp(SamlBase):
c_tag = 'SPKISexp'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def spki_sexp_from_string(xml_string):
return saml2.create_class_from_xml_string(SPKISexp, xml_string)
class SPKIDataType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SPKIDataType element """
c_tag = 'SPKIDataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}SPKISexp'] = ('spki_sexp',
[SPKISexp])
c_cardinality['spki_sexp'] = {"min":1}
c_child_order.extend(['spki_sexp'])
def __init__(self,
spki_sexp=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.spki_sexp=spki_sexp or []
def spki_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SPKIDataType_, xml_string)
class ObjectType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:ObjectType element """
c_tag = 'ObjectType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Id'] = ('id', 'ID', False)
c_attributes['MimeType'] = ('mime_type', 'string', False)
c_attributes['Encoding'] = ('encoding', 'anyURI', False)
def __init__(self,
id=None,
mime_type=None,
encoding=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.id=id
self.mime_type=mime_type
self.encoding=encoding
def object_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ObjectType_, xml_string)
class SignaturePropertyType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SignaturePropertyType element """
c_tag = 'SignaturePropertyType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Target'] = ('target', 'anyURI', True)
c_attributes['Id'] = ('id', 'ID', False)
def __init__(self,
target=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.target=target
self.id=id
def signature_property_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SignaturePropertyType_, xml_string)
class HMACOutputLengthType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:HMACOutputLengthType element """
c_tag = 'HMACOutputLengthType'
c_namespace = NAMESPACE
c_value_type = {'base': 'integer'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def hmac_output_length_type__from_string(xml_string):
return saml2.create_class_from_xml_string(HMACOutputLengthType_, xml_string)
class P(CryptoBinary_):
c_tag = 'P'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def p_from_string(xml_string):
return saml2.create_class_from_xml_string(P, xml_string)
class Q(CryptoBinary_):
c_tag = 'Q'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def q_from_string(xml_string):
return saml2.create_class_from_xml_string(Q, xml_string)
class G(CryptoBinary_):
c_tag = 'G'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def g_from_string(xml_string):
return saml2.create_class_from_xml_string(G, xml_string)
class Y(CryptoBinary_):
c_tag = 'Y'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def y_from_string(xml_string):
return saml2.create_class_from_xml_string(Y, xml_string)
class J(CryptoBinary_):
c_tag = 'J'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def j_from_string(xml_string):
return saml2.create_class_from_xml_string(J, xml_string)
class Seed(CryptoBinary_):
c_tag = 'Seed'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def seed_from_string(xml_string):
return saml2.create_class_from_xml_string(Seed, xml_string)
class PgenCounter(CryptoBinary_):
c_tag = 'PgenCounter'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def pgen_counter_from_string(xml_string):
return saml2.create_class_from_xml_string(PgenCounter, xml_string)
class DSAKeyValueType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:DSAKeyValueType element """
c_tag = 'DSAKeyValueType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}P'] = ('p', P)
c_cardinality['p'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}Q'] = ('q', Q)
c_cardinality['q'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}G'] = ('g', G)
c_cardinality['g'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}Y'] = ('y', Y)
c_children['{http://www.w3.org/2000/09/xmldsig#}J'] = ('j', J)
c_cardinality['j'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}Seed'] = ('seed', Seed)
c_cardinality['seed'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}PgenCounter'] = ('pgen_counter',
PgenCounter)
c_cardinality['pgen_counter'] = {"min":0, "max":1}
c_child_order.extend(['p', 'q', 'g', 'y', 'j', 'seed', 'pgen_counter'])
def __init__(self,
p=None,
q=None,
g=None,
y=None,
j=None,
seed=None,
pgen_counter=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.p=p
self.q=q
self.g=g
self.y=y
self.j=j
self.seed=seed
self.pgen_counter=pgen_counter
def dsa_key_value_type__from_string(xml_string):
return saml2.create_class_from_xml_string(DSAKeyValueType_, xml_string)
class Modulus(CryptoBinary_):
c_tag = 'Modulus'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def modulus_from_string(xml_string):
return saml2.create_class_from_xml_string(Modulus, xml_string)
class Exponent(CryptoBinary_):
c_tag = 'Exponent'
c_namespace = NAMESPACE
c_children = CryptoBinary_.c_children.copy()
c_attributes = CryptoBinary_.c_attributes.copy()
c_child_order = CryptoBinary_.c_child_order[:]
c_cardinality = CryptoBinary_.c_cardinality.copy()
def exponent_from_string(xml_string):
return saml2.create_class_from_xml_string(Exponent, xml_string)
class RSAKeyValueType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:RSAKeyValueType element """
c_tag = 'RSAKeyValueType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}Modulus'] = ('modulus',
Modulus)
c_children['{http://www.w3.org/2000/09/xmldsig#}Exponent'] = ('exponent',
Exponent)
c_child_order.extend(['modulus', 'exponent'])
def __init__(self,
modulus=None,
exponent=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.modulus=modulus
self.exponent=exponent
def rsa_key_value_type__from_string(xml_string):
return saml2.create_class_from_xml_string(RSAKeyValueType_, xml_string)
class SignatureValue(SignatureValueType_):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureValue element """
c_tag = 'SignatureValue'
c_namespace = NAMESPACE
c_children = SignatureValueType_.c_children.copy()
c_attributes = SignatureValueType_.c_attributes.copy()
c_child_order = SignatureValueType_.c_child_order[:]
c_cardinality = SignatureValueType_.c_cardinality.copy()
def signature_value_from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureValue, xml_string)
class CanonicalizationMethod(CanonicalizationMethodType_):
"""The http://www.w3.org/2000/09/xmldsig#:CanonicalizationMethod element """
c_tag = 'CanonicalizationMethod'
c_namespace = NAMESPACE
c_children = CanonicalizationMethodType_.c_children.copy()
c_attributes = CanonicalizationMethodType_.c_attributes.copy()
c_child_order = CanonicalizationMethodType_.c_child_order[:]
c_cardinality = CanonicalizationMethodType_.c_cardinality.copy()
def canonicalization_method_from_string(xml_string):
return saml2.create_class_from_xml_string(CanonicalizationMethod,
xml_string)
class HMACOutputLength(HMACOutputLengthType_):
c_tag = 'HMACOutputLength'
c_namespace = NAMESPACE
c_children = HMACOutputLengthType_.c_children.copy()
c_attributes = HMACOutputLengthType_.c_attributes.copy()
c_child_order = HMACOutputLengthType_.c_child_order[:]
c_cardinality = HMACOutputLengthType_.c_cardinality.copy()
def hmac_output_length_from_string(xml_string):
return saml2.create_class_from_xml_string(HMACOutputLength, xml_string)
class SignatureMethodType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureMethodType element """
c_tag = 'SignatureMethodType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}HMACOutputLength'] = ('hmac_output_length', HMACOutputLength)
c_cardinality['hmac_output_length'] = {"min":0, "max":1}
c_attributes['Algorithm'] = ('algorithm', 'anyURI', True)
c_child_order.extend(['hmac_output_length'])
def __init__(self,
hmac_output_length=None,
algorithm=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.hmac_output_length=hmac_output_length
self.algorithm=algorithm
def signature_method_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureMethodType_, xml_string)
class Transform(TransformType_):
"""The http://www.w3.org/2000/09/xmldsig#:Transform element """
c_tag = 'Transform'
c_namespace = NAMESPACE
c_children = TransformType_.c_children.copy()
c_attributes = TransformType_.c_attributes.copy()
c_child_order = TransformType_.c_child_order[:]
c_cardinality = TransformType_.c_cardinality.copy()
def transform_from_string(xml_string):
return saml2.create_class_from_xml_string(Transform, xml_string)
class DigestMethod(DigestMethodType_):
"""The http://www.w3.org/2000/09/xmldsig#:DigestMethod element """
c_tag = 'DigestMethod'
c_namespace = NAMESPACE
c_children = DigestMethodType_.c_children.copy()
c_attributes = DigestMethodType_.c_attributes.copy()
c_child_order = DigestMethodType_.c_child_order[:]
c_cardinality = DigestMethodType_.c_cardinality.copy()
def digest_method_from_string(xml_string):
return saml2.create_class_from_xml_string(DigestMethod, xml_string)
class DigestValue(DigestValueType_):
"""The http://www.w3.org/2000/09/xmldsig#:DigestValue element """
c_tag = 'DigestValue'
c_namespace = NAMESPACE
c_children = DigestValueType_.c_children.copy()
c_attributes = DigestValueType_.c_attributes.copy()
c_child_order = DigestValueType_.c_child_order[:]
c_cardinality = DigestValueType_.c_cardinality.copy()
def digest_value_from_string(xml_string):
return saml2.create_class_from_xml_string(DigestValue, xml_string)
class X509IssuerSerial(X509IssuerSerialType_):
c_tag = 'X509IssuerSerial'
c_namespace = NAMESPACE
c_children = X509IssuerSerialType_.c_children.copy()
c_attributes = X509IssuerSerialType_.c_attributes.copy()
c_child_order = X509IssuerSerialType_.c_child_order[:]
c_cardinality = X509IssuerSerialType_.c_cardinality.copy()
def x509_issuer_serial_from_string(xml_string):
return saml2.create_class_from_xml_string(X509IssuerSerial, xml_string)
class X509SKI(SamlBase):
c_tag = 'X509SKI'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def x509_ski_from_string(xml_string):
return saml2.create_class_from_xml_string(X509SKI, xml_string)
class X509SubjectName(SamlBase):
c_tag = 'X509SubjectName'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def x509_subject_name_from_string(xml_string):
return saml2.create_class_from_xml_string(X509SubjectName, xml_string)
class X509Certificate(SamlBase):
c_tag = 'X509Certificate'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def x509_certificate_from_string(xml_string):
return saml2.create_class_from_xml_string(X509Certificate, xml_string)
class X509CRL(SamlBase):
c_tag = 'X509CRL'
c_namespace = NAMESPACE
c_value_type = {'base': 'base64Binary'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def x509_crl_from_string(xml_string):
return saml2.create_class_from_xml_string(X509CRL, xml_string)
class X509DataType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:X509DataType element """
c_tag = 'X509DataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}X509IssuerSerial'] = ('x509_issuer_serial',
X509IssuerSerial)
c_cardinality['x509_issuer_serial'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}X509SKI'] = ('x509_ski',
X509SKI)
c_cardinality['x509_ski'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}X509SubjectName'] = ('x509_subject_name',
X509SubjectName)
c_cardinality['x509_subject_name'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}X509Certificate'] = ('x509_certificate',
X509Certificate)
c_cardinality['x509_certificate'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}X509CRL'] = ('x509_crl',
X509CRL)
c_cardinality['x509_crl'] = {"min":0, "max":1}
c_child_order.extend(['x509_issuer_serial', 'x509_ski', 'x509_subject_name',
'x509_certificate', 'x509_crl'])
def __init__(self,
x509_issuer_serial=None,
x509_ski=None,
x509_subject_name=None,
x509_certificate=None,
x509_crl=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.x509_issuer_serial=x509_issuer_serial
self.x509_ski=x509_ski
self.x509_subject_name=x509_subject_name
self.x509_certificate=x509_certificate
self.x509_crl=x509_crl
def x509_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(X509DataType_, xml_string)
class PGPData(PGPDataType_):
"""The http://www.w3.org/2000/09/xmldsig#:PGPData element """
c_tag = 'PGPData'
c_namespace = NAMESPACE
c_children = PGPDataType_.c_children.copy()
c_attributes = PGPDataType_.c_attributes.copy()
c_child_order = PGPDataType_.c_child_order[:]
c_cardinality = PGPDataType_.c_cardinality.copy()
def pgp_data_from_string(xml_string):
return saml2.create_class_from_xml_string(PGPData, xml_string)
class SPKIData(SPKIDataType_):
"""The http://www.w3.org/2000/09/xmldsig#:SPKIData element """
c_tag = 'SPKIData'
c_namespace = NAMESPACE
c_children = SPKIDataType_.c_children.copy()
c_attributes = SPKIDataType_.c_attributes.copy()
c_child_order = SPKIDataType_.c_child_order[:]
c_cardinality = SPKIDataType_.c_cardinality.copy()
def spki_data_from_string(xml_string):
return saml2.create_class_from_xml_string(SPKIData, xml_string)
class Object(ObjectType_):
"""The http://www.w3.org/2000/09/xmldsig#:Object element """
c_tag = 'Object'
c_namespace = NAMESPACE
c_children = ObjectType_.c_children.copy()
c_attributes = ObjectType_.c_attributes.copy()
c_child_order = ObjectType_.c_child_order[:]
c_cardinality = ObjectType_.c_cardinality.copy()
def object_from_string(xml_string):
return saml2.create_class_from_xml_string(Object, xml_string)
class SignatureProperty(SignaturePropertyType_):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureProperty element """
c_tag = 'SignatureProperty'
c_namespace = NAMESPACE
c_children = SignaturePropertyType_.c_children.copy()
c_attributes = SignaturePropertyType_.c_attributes.copy()
c_child_order = SignaturePropertyType_.c_child_order[:]
c_cardinality = SignaturePropertyType_.c_cardinality.copy()
def signature_property_from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureProperty, xml_string)
class DSAKeyValue(DSAKeyValueType_):
"""The http://www.w3.org/2000/09/xmldsig#:DSAKeyValue element """
c_tag = 'DSAKeyValue'
c_namespace = NAMESPACE
c_children = DSAKeyValueType_.c_children.copy()
c_attributes = DSAKeyValueType_.c_attributes.copy()
c_child_order = DSAKeyValueType_.c_child_order[:]
c_cardinality = DSAKeyValueType_.c_cardinality.copy()
def dsa_key_value_from_string(xml_string):
return saml2.create_class_from_xml_string(DSAKeyValue, xml_string)
class RSAKeyValue(RSAKeyValueType_):
"""The http://www.w3.org/2000/09/xmldsig#:RSAKeyValue element """
c_tag = 'RSAKeyValue'
c_namespace = NAMESPACE
c_children = RSAKeyValueType_.c_children.copy()
c_attributes = RSAKeyValueType_.c_attributes.copy()
c_child_order = RSAKeyValueType_.c_child_order[:]
c_cardinality = RSAKeyValueType_.c_cardinality.copy()
def rsa_key_value_from_string(xml_string):
return saml2.create_class_from_xml_string(RSAKeyValue, xml_string)
class SignatureMethod(SignatureMethodType_):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureMethod element """
c_tag = 'SignatureMethod'
c_namespace = NAMESPACE
c_children = SignatureMethodType_.c_children.copy()
c_attributes = SignatureMethodType_.c_attributes.copy()
c_child_order = SignatureMethodType_.c_child_order[:]
c_cardinality = SignatureMethodType_.c_cardinality.copy()
def signature_method_from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureMethod, xml_string)
class TransformsType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:TransformsType element """
c_tag = 'TransformsType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}Transform'] = ('transform',
[Transform])
c_cardinality['transform'] = {"min":1}
c_child_order.extend(['transform'])
def __init__(self,
transform=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.transform=transform or []
def transforms_type__from_string(xml_string):
return saml2.create_class_from_xml_string(TransformsType_, xml_string)
class KeyValueType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:KeyValueType element """
c_tag = 'KeyValueType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}DSAKeyValue'] = ('dsa_key_value',
DSAKeyValue)
c_cardinality['dsa_key_value'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}RSAKeyValue'] = ('rsa_key_value',
RSAKeyValue)
c_cardinality['rsa_key_value'] = {"min":0, "max":1}
c_child_order.extend(['dsa_key_value', 'rsa_key_value'])
def __init__(self,
dsa_key_value=None,
rsa_key_value=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.dsa_key_value=dsa_key_value
self.rsa_key_value=rsa_key_value
def key_value_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeyValueType_, xml_string)
class X509Data(X509DataType_):
"""The http://www.w3.org/2000/09/xmldsig#:X509Data element """
c_tag = 'X509Data'
c_namespace = NAMESPACE
c_children = X509DataType_.c_children.copy()
c_attributes = X509DataType_.c_attributes.copy()
c_child_order = X509DataType_.c_child_order[:]
c_cardinality = X509DataType_.c_cardinality.copy()
def x509_data_from_string(xml_string):
return saml2.create_class_from_xml_string(X509Data, xml_string)
class SignaturePropertiesType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SignaturePropertiesType element """
c_tag = 'SignaturePropertiesType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}SignatureProperty'] = ('signature_property', [SignatureProperty])
c_cardinality['signature_property'] = {"min":1}
c_attributes['Id'] = ('id', 'ID', False)
c_child_order.extend(['signature_property'])
def __init__(self,
signature_property=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.signature_property=signature_property or []
self.id=id
def signature_properties_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SignaturePropertiesType_, xml_string)
class Transforms(TransformsType_):
"""The http://www.w3.org/2000/09/xmldsig#:Transforms element """
c_tag = 'Transforms'
c_namespace = NAMESPACE
c_children = TransformsType_.c_children.copy()
c_attributes = TransformsType_.c_attributes.copy()
c_child_order = TransformsType_.c_child_order[:]
c_cardinality = TransformsType_.c_cardinality.copy()
def transforms_from_string(xml_string):
return saml2.create_class_from_xml_string(Transforms, xml_string)
class KeyValue(KeyValueType_):
"""The http://www.w3.org/2000/09/xmldsig#:KeyValue element """
c_tag = 'KeyValue'
c_namespace = NAMESPACE
c_children = KeyValueType_.c_children.copy()
c_attributes = KeyValueType_.c_attributes.copy()
c_child_order = KeyValueType_.c_child_order[:]
c_cardinality = KeyValueType_.c_cardinality.copy()
def key_value_from_string(xml_string):
return saml2.create_class_from_xml_string(KeyValue, xml_string)
class RetrievalMethodType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:RetrievalMethodType element """
c_tag = 'RetrievalMethodType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}Transforms'] = ('transforms',
Transforms)
c_cardinality['transforms'] = {"min":0, "max":1}
c_attributes['URI'] = ('uri', 'anyURI', False)
c_attributes['Type'] = ('type', 'anyURI', False)
c_child_order.extend(['transforms'])
def __init__(self,
transforms=None,
uri=None,
type=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.transforms=transforms
self.uri=uri
self.type=type
def retrieval_method_type__from_string(xml_string):
return saml2.create_class_from_xml_string(RetrievalMethodType_, xml_string)
class SignatureProperties(SignaturePropertiesType_):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureProperties element """
c_tag = 'SignatureProperties'
c_namespace = NAMESPACE
c_children = SignaturePropertiesType_.c_children.copy()
c_attributes = SignaturePropertiesType_.c_attributes.copy()
c_child_order = SignaturePropertiesType_.c_child_order[:]
c_cardinality = SignaturePropertiesType_.c_cardinality.copy()
def signature_properties_from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureProperties, xml_string)
class ReferenceType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:ReferenceType element """
c_tag = 'ReferenceType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}Transforms'] = ('transforms',
Transforms)
c_cardinality['transforms'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}DigestMethod'] = ('digest_method',
DigestMethod)
c_children['{http://www.w3.org/2000/09/xmldsig#}DigestValue'] = ('digest_value',
DigestValue)
c_attributes['Id'] = ('id', 'ID', False)
c_attributes['URI'] = ('uri', 'anyURI', False)
c_attributes['Type'] = ('type', 'anyURI', False)
c_child_order.extend(['transforms', 'digest_method', 'digest_value'])
def __init__(self,
transforms=None,
digest_method=None,
digest_value=None,
id=None,
uri=None,
type=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.transforms=transforms
self.digest_method=digest_method
self.digest_value=digest_value
self.id=id
self.uri=uri
self.type=type
def reference_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ReferenceType_, xml_string)
class RetrievalMethod(RetrievalMethodType_):
"""The http://www.w3.org/2000/09/xmldsig#:RetrievalMethod element """
c_tag = 'RetrievalMethod'
c_namespace = NAMESPACE
c_children = RetrievalMethodType_.c_children.copy()
c_attributes = RetrievalMethodType_.c_attributes.copy()
c_child_order = RetrievalMethodType_.c_child_order[:]
c_cardinality = RetrievalMethodType_.c_cardinality.copy()
def retrieval_method_from_string(xml_string):
return saml2.create_class_from_xml_string(RetrievalMethod, xml_string)
class Reference(ReferenceType_):
"""The http://www.w3.org/2000/09/xmldsig#:Reference element """
c_tag = 'Reference'
c_namespace = NAMESPACE
c_children = ReferenceType_.c_children.copy()
c_attributes = ReferenceType_.c_attributes.copy()
c_child_order = ReferenceType_.c_child_order[:]
c_cardinality = ReferenceType_.c_cardinality.copy()
def reference_from_string(xml_string):
return saml2.create_class_from_xml_string(Reference, xml_string)
#import xmlenc as enc
class KeyInfoType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:KeyInfoType element """
c_tag = 'KeyInfoType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}KeyName'] = ('key_name',
[KeyName])
c_cardinality['key_name'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmldsig#}KeyValue'] = ('key_value',
[KeyValue])
c_cardinality['key_value'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmldsig#}RetrievalMethod'] = (
'retrieval_method', [RetrievalMethod])
c_cardinality['retrieval_method'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmldsig#}X509Data'] = ('x509_data',
[X509Data])
c_cardinality['x509_data'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmldsig#}PGPData'] = ('pgp_data',
[PGPData])
c_cardinality['pgp_data'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmldsig#}SPKIData'] = ('spki_data',
[SPKIData])
c_cardinality['spki_data'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmldsig#}MgmtData'] = ('mgmt_data',
[MgmtData])
c_cardinality['mgmt_data'] = {"min":0}
c_children['{http://www.w3.org/2000/09/xmlenc#}EncryptedKey'] = (
'encrypted_key', None)
c_cardinality['key_info'] = {"min":0, "max":1}
c_attributes['Id'] = ('id', 'ID', False)
c_child_order.extend(['key_name', 'key_value', 'retrieval_method',
'x509_data', 'pgp_data', 'spki_data', 'mgmt_data',
'encrypted_key'])
def __init__(self,
key_name=None,
key_value=None,
retrieval_method=None,
x509_data=None,
pgp_data=None,
spki_data=None,
mgmt_data=None,
encrypted_key=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes
)
self.key_name = key_name or []
self.key_value = key_value or []
self.retrieval_method = retrieval_method or []
self.x509_data = x509_data or []
self.pgp_data = pgp_data or []
self.spki_data = spki_data or []
self.mgmt_data = mgmt_data or []
self.encrypted_key = encrypted_key
self.id = id
def key_info_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeyInfoType_, xml_string)
class ManifestType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:ManifestType element """
c_tag = 'ManifestType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}Reference'] = ('reference',
[Reference])
c_cardinality['reference'] = {"min":1}
c_attributes['Id'] = ('id', 'ID', False)
c_child_order.extend(['reference'])
def __init__(self,
reference=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.reference=reference or []
self.id=id
def manifest_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ManifestType_, xml_string)
class SignedInfoType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SignedInfoType element """
c_tag = 'SignedInfoType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}CanonicalizationMethod'] = ('canonicalization_method', CanonicalizationMethod)
c_children['{http://www.w3.org/2000/09/xmldsig#}SignatureMethod'] = ('signature_method',
SignatureMethod)
c_children['{http://www.w3.org/2000/09/xmldsig#}Reference'] = ('reference',
[Reference])
c_cardinality['reference'] = {"min":1}
c_attributes['Id'] = ('id', 'ID', False)
c_child_order.extend(['canonicalization_method', 'signature_method',
'reference'])
def __init__(self,
canonicalization_method=None,
signature_method=None,
reference=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.canonicalization_method=canonicalization_method
self.signature_method=signature_method
self.reference=reference or []
self.id=id
def signed_info_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SignedInfoType_, xml_string)
class KeyInfo(KeyInfoType_):
"""The http://www.w3.org/2000/09/xmldsig#:KeyInfo element """
c_tag = 'KeyInfo'
c_namespace = NAMESPACE
c_children = KeyInfoType_.c_children.copy()
c_attributes = KeyInfoType_.c_attributes.copy()
c_child_order = KeyInfoType_.c_child_order[:]
c_cardinality = KeyInfoType_.c_cardinality.copy()
def key_info_from_string(xml_string):
return saml2.create_class_from_xml_string(KeyInfo, xml_string)
class Manifest(ManifestType_):
"""The http://www.w3.org/2000/09/xmldsig#:Manifest element """
c_tag = 'Manifest'
c_namespace = NAMESPACE
c_children = ManifestType_.c_children.copy()
c_attributes = ManifestType_.c_attributes.copy()
c_child_order = ManifestType_.c_child_order[:]
c_cardinality = ManifestType_.c_cardinality.copy()
def manifest_from_string(xml_string):
return saml2.create_class_from_xml_string(Manifest, xml_string)
class SignedInfo(SignedInfoType_):
"""The http://www.w3.org/2000/09/xmldsig#:SignedInfo element """
c_tag = 'SignedInfo'
c_namespace = NAMESPACE
c_children = SignedInfoType_.c_children.copy()
c_attributes = SignedInfoType_.c_attributes.copy()
c_child_order = SignedInfoType_.c_child_order[:]
c_cardinality = SignedInfoType_.c_cardinality.copy()
def signed_info_from_string(xml_string):
return saml2.create_class_from_xml_string(SignedInfo, xml_string)
class SignatureType_(SamlBase):
"""The http://www.w3.org/2000/09/xmldsig#:SignatureType element """
c_tag = 'SignatureType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}SignedInfo'] = ('signed_info',
SignedInfo)
c_children['{http://www.w3.org/2000/09/xmldsig#}SignatureValue'] = ('signature_value', SignatureValue)
c_children['{http://www.w3.org/2000/09/xmldsig#}KeyInfo'] = ('key_info',
KeyInfo)
c_cardinality['key_info'] = {"min":0, "max":1}
c_children['{http://www.w3.org/2000/09/xmldsig#}Object'] = ('object',
[Object])
c_cardinality['object'] = {"min":0}
c_attributes['Id'] = ('id', 'ID', False)
c_child_order.extend(['signed_info', 'signature_value', 'key_info',
'object'])
def __init__(self,
signed_info=None,
signature_value=None,
key_info=None,
object=None,
id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.signed_info=signed_info
self.signature_value=signature_value
self.key_info=key_info
self.object=object or []
self.id=id
def signature_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SignatureType_, xml_string)
class Signature(SignatureType_):
"""The http://www.w3.org/2000/09/xmldsig#:Signature element """
c_tag = 'Signature'
c_namespace = NAMESPACE
c_children = SignatureType_.c_children.copy()
c_attributes = SignatureType_.c_attributes.copy()
c_child_order = SignatureType_.c_child_order[:]
c_cardinality = SignatureType_.c_cardinality.copy()
def signature_from_string(xml_string):
return saml2.create_class_from_xml_string(Signature, xml_string)
ELEMENT_FROM_STRING = {
CryptoBinary_.c_tag: crypto_binary__from_string,
Signature.c_tag: signature_from_string,
SignatureType_.c_tag: signature_type__from_string,
SignatureValue.c_tag: signature_value_from_string,
SignatureValueType_.c_tag: signature_value_type__from_string,
SignedInfo.c_tag: signed_info_from_string,
SignedInfoType_.c_tag: signed_info_type__from_string,
CanonicalizationMethod.c_tag: canonicalization_method_from_string,
CanonicalizationMethodType_.c_tag: canonicalization_method_type__from_string,
SignatureMethod.c_tag: signature_method_from_string,
SignatureMethodType_.c_tag: signature_method_type__from_string,
Reference.c_tag: reference_from_string,
ReferenceType_.c_tag: reference_type__from_string,
Transforms.c_tag: transforms_from_string,
TransformsType_.c_tag: transforms_type__from_string,
Transform.c_tag: transform_from_string,
TransformType_.c_tag: transform_type__from_string,
DigestMethod.c_tag: digest_method_from_string,
DigestMethodType_.c_tag: digest_method_type__from_string,
DigestValue.c_tag: digest_value_from_string,
DigestValueType_.c_tag: digest_value_type__from_string,
KeyInfo.c_tag: key_info_from_string,
KeyInfoType_.c_tag: key_info_type__from_string,
KeyName.c_tag: key_name_from_string,
MgmtData.c_tag: mgmt_data_from_string,
KeyValue.c_tag: key_value_from_string,
KeyValueType_.c_tag: key_value_type__from_string,
RetrievalMethod.c_tag: retrieval_method_from_string,
RetrievalMethodType_.c_tag: retrieval_method_type__from_string,
X509Data.c_tag: x509_data_from_string,
X509DataType_.c_tag: x509_data_type__from_string,
X509IssuerSerialType_.c_tag: x509_issuer_serial_type__from_string,
PGPData.c_tag: pgp_data_from_string,
PGPDataType_.c_tag: pgp_data_type__from_string,
SPKIData.c_tag: spki_data_from_string,
SPKIDataType_.c_tag: spki_data_type__from_string,
Object.c_tag: object_from_string,
ObjectType_.c_tag: object_type__from_string,
Manifest.c_tag: manifest_from_string,
ManifestType_.c_tag: manifest_type__from_string,
SignatureProperties.c_tag: signature_properties_from_string,
SignaturePropertiesType_.c_tag: signature_properties_type__from_string,
SignatureProperty.c_tag: signature_property_from_string,
SignaturePropertyType_.c_tag: signature_property_type__from_string,
HMACOutputLengthType_.c_tag: hmac_output_length_type__from_string,
DSAKeyValue.c_tag: dsa_key_value_from_string,
DSAKeyValueType_.c_tag: dsa_key_value_type__from_string,
RSAKeyValue.c_tag: rsa_key_value_from_string,
RSAKeyValueType_.c_tag: rsa_key_value_type__from_string,
TransformType_XPath.c_tag: transform_type__x_path_from_string,
X509IssuerName.c_tag: x509_issuer_name_from_string,
X509SerialNumber.c_tag: x509_serial_number_from_string,
PGPKeyID.c_tag: pgp_key_id_from_string,
PGPKeyPacket.c_tag: pgp_key_packet_from_string,
SPKISexp.c_tag: spki_sexp_from_string,
P.c_tag: p_from_string,
Q.c_tag: q_from_string,
G.c_tag: g_from_string,
Y.c_tag: y_from_string,
J.c_tag: j_from_string,
Seed.c_tag: seed_from_string,
PgenCounter.c_tag: pgen_counter_from_string,
Modulus.c_tag: modulus_from_string,
Exponent.c_tag: exponent_from_string,
HMACOutputLength.c_tag: hmac_output_length_from_string,
X509IssuerSerial.c_tag: x509_issuer_serial_from_string,
X509SKI.c_tag: x509_ski_from_string,
X509SubjectName.c_tag: x509_subject_name_from_string,
X509Certificate.c_tag: x509_certificate_from_string,
X509CRL.c_tag: x509_crl_from_string,
}
ELEMENT_BY_TAG = {
'CryptoBinary': CryptoBinary_,
'Signature': Signature,
'SignatureType': SignatureType_,
'SignatureValue': SignatureValue,
'SignatureValueType': SignatureValueType_,
'SignedInfo': SignedInfo,
'SignedInfoType': SignedInfoType_,
'CanonicalizationMethod': CanonicalizationMethod,
'CanonicalizationMethodType': CanonicalizationMethodType_,
'SignatureMethod': SignatureMethod,
'SignatureMethodType': SignatureMethodType_,
'Reference': Reference,
'ReferenceType': ReferenceType_,
'Transforms': Transforms,
'TransformsType': TransformsType_,
'Transform': Transform,
'TransformType': TransformType_,
'DigestMethod': DigestMethod,
'DigestMethodType': DigestMethodType_,
'DigestValue': DigestValue,
'DigestValueType': DigestValueType_,
'KeyInfo': KeyInfo,
'KeyInfoType': KeyInfoType_,
'KeyName': KeyName,
'MgmtData': MgmtData,
'KeyValue': KeyValue,
'KeyValueType': KeyValueType_,
'RetrievalMethod': RetrievalMethod,
'RetrievalMethodType': RetrievalMethodType_,
'X509Data': X509Data,
'X509DataType': X509DataType_,
'X509IssuerSerialType': X509IssuerSerialType_,
'PGPData': PGPData,
'PGPDataType': PGPDataType_,
'SPKIData': SPKIData,
'SPKIDataType': SPKIDataType_,
'Object': Object,
'ObjectType': ObjectType_,
'Manifest': Manifest,
'ManifestType': ManifestType_,
'SignatureProperties': SignatureProperties,
'SignaturePropertiesType': SignaturePropertiesType_,
'SignatureProperty': SignatureProperty,
'SignaturePropertyType': SignaturePropertyType_,
'HMACOutputLengthType': HMACOutputLengthType_,
'DSAKeyValue': DSAKeyValue,
'DSAKeyValueType': DSAKeyValueType_,
'RSAKeyValue': RSAKeyValue,
'RSAKeyValueType': RSAKeyValueType_,
'XPath': TransformType_XPath,
'X509IssuerName': X509IssuerName,
'X509SerialNumber': X509SerialNumber,
'PGPKeyID': PGPKeyID,
'PGPKeyPacket': PGPKeyPacket,
'SPKISexp': SPKISexp,
'P': P,
'Q': Q,
'G': G,
'Y': Y,
'J': J,
'Seed': Seed,
'PgenCounter': PgenCounter,
'Modulus': Modulus,
'Exponent': Exponent,
'HMACOutputLength': HMACOutputLength,
'X509IssuerSerial': X509IssuerSerial,
'X509SKI': X509SKI,
'X509SubjectName': X509SubjectName,
'X509Certificate': X509Certificate,
'X509CRL': X509CRL,
}
def factory(tag, **kwargs):
return ELEMENT_BY_TAG[tag](**kwargs)
|
fusion809/fusion809.github.io-old | refs/heads/master | vendor/bundle/ruby/2.2.0/gems/pygments.rb-0.6.3/vendor/simplejson/simplejson/tests/test_item_sort_key.py | 60 | from unittest import TestCase
import simplejson as json
from operator import itemgetter
class TestItemSortKey(TestCase):
def test_simple_first(self):
a = {'a': 1, 'c': 5, 'jack': 'jill', 'pick': 'axe', 'array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'}
self.assertEquals(
'{"a": 1, "c": 5, "crate": "dog", "jack": "jill", "pick": "axe", "zeak": "oh", "array": [1, 5, 6, 9], "tuple": [83, 12, 3]}',
json.dumps(a, item_sort_key=json.simple_first))
def test_case(self):
a = {'a': 1, 'c': 5, 'Jack': 'jill', 'pick': 'axe', 'Array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'}
self.assertEquals(
'{"Array": [1, 5, 6, 9], "Jack": "jill", "a": 1, "c": 5, "crate": "dog", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}',
json.dumps(a, item_sort_key=itemgetter(0)))
self.assertEquals(
'{"a": 1, "Array": [1, 5, 6, 9], "c": 5, "crate": "dog", "Jack": "jill", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}',
json.dumps(a, item_sort_key=lambda kv: kv[0].lower()))
|
bop/rango | refs/heads/master | lib/python2.7/site-packages/django/contrib/gis/db/backends/oracle/compiler.py | 148 | from django.contrib.gis.db.models.sql.compiler import GeoSQLCompiler as BaseGeoSQLCompiler
from django.db.backends.oracle import compiler
SQLCompiler = compiler.SQLCompiler
class GeoSQLCompiler(BaseGeoSQLCompiler, SQLCompiler):
pass
class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler):
pass
class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler):
pass
|
PalmBeachPost/panda | refs/heads/1.2.0 | panda/tests/test_solr.py | 6 | #!/usr/bin/env python
import datetime
from django.test import TestCase
from panda import solr as solrjson
class TestSolrJSONEncoder(TestCase):
def test_datetime(self):
v = { 'datetime': datetime.datetime(2012, 4, 11, 11, 3, 0) }
self.assertEqual(solrjson.dumps(v), '{"datetime": "2012-04-11T11:03:00Z"}')
def test_date(self):
v = { 'date': datetime.date(2012, 4, 11) }
self.assertEqual(solrjson.dumps(v), '{"date": "2012-04-11"}')
def test_time(self):
v = { 'time': datetime.time(11, 3, 0) }
self.assertEqual(solrjson.dumps(v), '{"time": "11:03:00"}')
def test_int(self):
v = { 'int': 123 }
self.assertEqual(solrjson.dumps(v), '{"int": 123}')
|
PetePriority/home-assistant | refs/heads/dev | homeassistant/components/sensor/srp_energy.py | 4 | """
Platform for retrieving energy data from SRP.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/sensor.srp_energy/
"""
from datetime import datetime, timedelta
import logging
from requests.exceptions import (
ConnectionError as ConnectError, HTTPError, Timeout)
import voluptuous as vol
from homeassistant.const import (
CONF_NAME, CONF_PASSWORD,
CONF_USERNAME, CONF_ID)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['srpenergy==1.0.5']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Powered by SRP Energy"
DEFAULT_NAME = 'SRP Energy'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1440)
ENERGY_KWH = 'kWh'
ATTR_READING_COST = "reading_cost"
ATTR_READING_TIME = 'datetime'
ATTR_READING_USAGE = 'reading_usage'
ATTR_DAILY_USAGE = 'daily_usage'
ATTR_USAGE_HISTORY = 'usage_history'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SRP energy."""
name = config[CONF_NAME]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
account_id = config[CONF_ID]
from srpenergy.client import SrpEnergyClient
srp_client = SrpEnergyClient(account_id, username, password)
if not srp_client.validate():
_LOGGER.error("Couldn't connect to %s. Check credentials", name)
return
add_entities([SrpEnergy(name, srp_client)], True)
class SrpEnergy(Entity):
"""Representation of an srp usage."""
def __init__(self, name, client):
"""Initialize SRP Usage."""
self._state = None
self._name = name
self._client = client
self._history = None
self._usage = None
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def state(self):
"""Return the current state."""
if self._state is None:
return None
return "{0:.2f}".format(self._state)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return ENERGY_KWH
@property
def history(self):
"""Return the energy usage history of this entity, if any."""
if self._usage is None:
return None
history = [{
ATTR_READING_TIME: isodate,
ATTR_READING_USAGE: kwh,
ATTR_READING_COST: cost
} for _, _, isodate, kwh, cost in self._usage]
return history
@property
def device_state_attributes(self):
"""Return the state attributes."""
attributes = {
ATTR_USAGE_HISTORY: self.history
}
return attributes
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest usage from SRP Energy."""
start_date = datetime.now() + timedelta(days=-1)
end_date = datetime.now()
try:
usage = self._client.usage(start_date, end_date)
daily_usage = 0.0
for _, _, _, kwh, _ in usage:
daily_usage += float(kwh)
if usage:
self._state = daily_usage
self._usage = usage
else:
_LOGGER.error("Unable to fetch data from SRP. No data")
except (ConnectError, HTTPError, Timeout) as error:
_LOGGER.error("Unable to connect to SRP. %s", error)
except ValueError as error:
_LOGGER.error("Value error connecting to SRP. %s", error)
except TypeError as error:
_LOGGER.error("Type error connecting to SRP. "
"Check username and password. %s", error)
|
akretion/odoo | refs/heads/12-patch-paging-100-in-o2m | addons/account/models/reconciliation_widget.py | 6 | # -*- coding: utf-8 -*-
import copy
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.osv import expression
from odoo.tools import pycompat
from odoo.tools.misc import formatLang
from odoo.tools import misc
class AccountReconciliation(models.AbstractModel):
_name = 'account.reconciliation.widget'
_description = 'Account Reconciliation widget'
####################################################
# Public
####################################################
@api.model
def process_bank_statement_line(self, st_line_ids, data):
""" Handles data sent from the bank statement reconciliation widget
(and can otherwise serve as an old-API bridge)
:param st_line_ids
:param list of dicts data: must contains the keys
'counterpart_aml_dicts', 'payment_aml_ids' and 'new_aml_dicts',
whose value is the same as described in process_reconciliation
except that ids are used instead of recordsets.
"""
st_lines = self.env['account.bank.statement.line'].browse(st_line_ids)
AccountMoveLine = self.env['account.move.line']
ctx = dict(self._context, force_price_include=False)
for st_line, datum in pycompat.izip(st_lines, copy.deepcopy(data)):
payment_aml_rec = AccountMoveLine.browse(datum.get('payment_aml_ids', []))
for aml_dict in datum.get('counterpart_aml_dicts', []):
aml_dict['move_line'] = AccountMoveLine.browse(aml_dict['counterpart_aml_id'])
del aml_dict['counterpart_aml_id']
if datum.get('partner_id') is not None:
st_line.write({'partner_id': datum['partner_id']})
st_line.with_context(ctx).process_reconciliation(
datum.get('counterpart_aml_dicts', []),
payment_aml_rec,
datum.get('new_aml_dicts', []))
@api.model
def get_move_lines_for_bank_statement_line(self, st_line_id, partner_id=None, excluded_ids=None, search_str=False, offset=0, limit=None):
""" Returns move lines for the bank statement reconciliation widget,
formatted as a list of dicts
:param st_line_id: ids of the statement lines
:param partner_id: optional partner id to select only the moves
line corresponding to the partner
:param excluded_ids: optional move lines ids excluded from the
result
:param search_str: optional search (can be the amout, display_name,
partner name, move line name)
:param offset: offset of the search result (to display pager)
:param limit: number of the result to search
"""
st_line = self.env['account.bank.statement.line'].browse(st_line_id)
# Blue lines = payment on bank account not assigned to a statement yet
aml_accounts = [
st_line.journal_id.default_credit_account_id.id,
st_line.journal_id.default_debit_account_id.id
]
if partner_id is None:
partner_id = st_line.partner_id.id
domain = self._domain_move_lines_for_reconciliation(st_line, aml_accounts, partner_id, excluded_ids=excluded_ids, search_str=search_str)
recs_count = self.env['account.move.line'].search_count(domain)
aml_recs = self.env['account.move.line'].search(domain, offset=offset, limit=limit, order="date_maturity desc, id desc")
target_currency = st_line.currency_id or st_line.journal_id.currency_id or st_line.journal_id.company_id.currency_id
return self._prepare_move_lines(aml_recs, target_currency=target_currency, target_date=st_line.date, recs_count=recs_count)
@api.model
def _get_bank_statement_line_partners(self, st_lines):
params = []
# Add the res.partner.ban's IR rules. In case partners are not shared between companies,
# identical bank accounts may exist in a company we don't have access to.
ir_rules_query = self.env['res.partner.bank']._where_calc([])
self.env['res.partner.bank']._apply_ir_rules(ir_rules_query, 'read')
from_clause, where_clause, where_clause_params = ir_rules_query.get_sql()
if where_clause:
where_bank = ('AND %s' % where_clause).replace('res_partner_bank', 'bank')
params += where_clause_params
else:
where_bank = ''
# Add the res.partner's IR rules. In case partners are not shared between companies,
# identical partners may exist in a company we don't have access to.
ir_rules_query = self.env['res.partner']._where_calc([])
self.env['res.partner']._apply_ir_rules(ir_rules_query, 'read')
from_clause, where_clause, where_clause_params = ir_rules_query.get_sql()
if where_clause:
where_partner = ('AND %s' % where_clause).replace('res_partner', 'p3')
params += where_clause_params
else:
where_partner = ''
query = '''
SELECT
st_line.id AS id,
COALESCE(p1.id,p2.id,p3.id) AS partner_id
FROM account_bank_statement_line st_line
'''
query += 'LEFT JOIN res_partner_bank bank ON bank.id = st_line.bank_account_id OR bank.acc_number = st_line.account_number %s\n' % (where_bank)
query += 'LEFT JOIN res_partner p1 ON st_line.partner_id=p1.id \n'
query += 'LEFT JOIN res_partner p2 ON bank.partner_id=p2.id \n'
# By definition the commercial partner_id doesn't have a parent_id set
query += 'LEFT JOIN res_partner p3 ON p3.name ILIKE st_line.partner_name %s AND p3.parent_id is NULL \n' % (where_partner)
query += 'WHERE st_line.id IN %s'
params += [tuple(st_lines.ids)]
self._cr.execute(query, params)
result = {}
for res in self._cr.dictfetchall():
result[res['id']] = res['partner_id']
return result
@api.model
def get_bank_statement_line_data(self, st_line_ids, excluded_ids=None):
""" Returns the data required to display a reconciliation widget, for
each statement line in self
:param st_line_id: ids of the statement lines
:param excluded_ids: optional move lines ids excluded from the
result
"""
excluded_ids = excluded_ids or []
# Make a search to preserve the table's order.
bank_statement_lines = self.env['account.bank.statement.line'].search([('id', 'in', st_line_ids)])
reconcile_model = self.env['account.reconcile.model'].search([('rule_type', '!=', 'writeoff_button')])
# Search for missing partners when opening the reconciliation widget.
if bank_statement_lines:
partner_map = self._get_bank_statement_line_partners(bank_statement_lines)
matching_amls = reconcile_model._apply_rules(bank_statement_lines, excluded_ids=excluded_ids, partner_map=partner_map)
results = {
'lines': [],
'value_min': 0,
'value_max': len(bank_statement_lines),
'reconciled_aml_ids': [],
}
# Iterate on st_lines to keep the same order in the results list.
bank_statements_left = self.env['account.bank.statement']
for line in bank_statement_lines:
if matching_amls[line.id].get('status') == 'reconciled':
reconciled_move_lines = matching_amls[line.id].get('reconciled_lines')
results['value_min'] += 1
results['reconciled_aml_ids'] += reconciled_move_lines and reconciled_move_lines.ids or []
else:
aml_ids = matching_amls[line.id]['aml_ids']
bank_statements_left += line.statement_id
target_currency = line.currency_id or line.journal_id.currency_id or line.journal_id.company_id.currency_id
amls = aml_ids and self.env['account.move.line'].browse(aml_ids)
line_vals = {
'st_line': self._get_statement_line(line),
'reconciliation_proposition': aml_ids and self._prepare_move_lines(amls, target_currency=target_currency, target_date=line.date) or [],
'model_id': matching_amls[line.id].get('model') and matching_amls[line.id]['model'].id,
'write_off': matching_amls[line.id].get('status') == 'write_off',
}
if not line.partner_id and partner_map.get(line.id):
partner = self.env['res.partner'].browse(partner_map[line.id])
line_vals.update({
'partner_id': partner.id,
'partner_name': partner.name,
})
results['lines'].append(line_vals)
return results
@api.model
def get_bank_statement_data(self, bank_statement_ids):
""" Get statement lines of the specified statements or all unreconciled
statement lines and try to automatically reconcile them / find them
a partner.
Return ids of statement lines left to reconcile and other data for
the reconciliation widget.
:param st_line_id: ids of the bank statement
"""
bank_statements = self.env['account.bank.statement'].browse(bank_statement_ids)
query = '''
SELECT line.id
FROM account_bank_statement_line line
WHERE account_id IS NULL
AND line.amount != 0.0
AND line.statement_id IN %s
AND NOT EXISTS (SELECT 1 from account_move_line aml WHERE aml.statement_line_id = line.id)
'''
self.env.cr.execute(query, [tuple(bank_statements.ids)])
bank_statement_lines = self.env['account.bank.statement.line'].browse([line.get('id') for line in self.env.cr.dictfetchall()])
results = self.get_bank_statement_line_data(bank_statement_lines.ids)
bank_statement_lines_left = self.env['account.bank.statement.line'].browse([line['st_line']['id'] for line in results['lines']])
bank_statements_left = bank_statement_lines_left.mapped('statement_id')
results.update({
'statement_name': len(bank_statements_left) == 1 and bank_statements_left.name or False,
'journal_id': bank_statements and bank_statements[0].journal_id.id or False,
'notifications': []
})
if len(results['lines']) < len(bank_statement_lines):
results['notifications'].append({
'type': 'info',
'template': 'reconciliation.notification.reconciled',
'reconciled_aml_ids': results['reconciled_aml_ids'],
'nb_reconciled_lines': results['value_min'],
'details': {
'name': _('Journal Items'),
'model': 'account.move.line',
'ids': results['reconciled_aml_ids'],
}
})
return results
@api.model
def get_move_lines_for_manual_reconciliation(self, account_id, partner_id=False, excluded_ids=None, search_str=False, offset=0, limit=None, target_currency_id=False):
""" Returns unreconciled move lines for an account or a partner+account, formatted for the manual reconciliation widget """
Account_move_line = self.env['account.move.line']
Account = self.env['account.account']
Currency = self.env['res.currency']
domain = self._domain_move_lines_for_manual_reconciliation(account_id, partner_id, excluded_ids, search_str)
recs_count = Account_move_line.search_count(domain)
lines = Account_move_line.search(domain, offset=offset, limit=limit, order="date_maturity desc, id desc")
if target_currency_id:
target_currency = Currency.browse(target_currency_id)
else:
account = Account.browse(account_id)
target_currency = account.currency_id or account.company_id.currency_id
return self._prepare_move_lines(lines, target_currency=target_currency,recs_count=recs_count)
@api.model
def get_all_data_for_manual_reconciliation(self, partner_ids, account_ids):
""" Returns the data required for the invoices & payments matching of partners/accounts.
If an argument is None, fetch all related reconciliations. Use [] to fetch nothing.
"""
MoveLine = self.env['account.move.line']
aml_ids = self._context.get('active_ids') and self._context.get('active_model') == 'account.move.line' and tuple(self._context.get('active_ids'))
if aml_ids:
aml = MoveLine.browse(aml_ids)
aml._check_reconcile_validity()
account = aml[0].account_id
currency = account.currency_id or account.company_id.currency_id
return {
'accounts': [{
'reconciliation_proposition': self._prepare_move_lines(aml, target_currency=currency),
'company_id': account.company_id.id,
'currency_id': currency.id,
'mode': 'accounts',
'account_id': account.id,
'account_name': account.name,
'account_code': account.code,
}],
'customers': [],
'suppliers': [],
}
return {
'customers': self.get_data_for_manual_reconciliation('partner', partner_ids, 'receivable'),
'suppliers': self.get_data_for_manual_reconciliation('partner', partner_ids, 'payable'),
'accounts': self.get_data_for_manual_reconciliation('account', account_ids),
}
@api.model
def get_data_for_manual_reconciliation(self, res_type, res_ids=None, account_type=None):
""" Returns the data required for the invoices & payments matching of partners/accounts (list of dicts).
If no res_ids is passed, returns data for all partners/accounts that can be reconciled.
:param res_type: either 'partner' or 'account'
:param res_ids: ids of the partners/accounts to reconcile, use None to fetch data indiscriminately
of the id, use [] to prevent from fetching any data at all.
:param account_type: if a partner is both customer and vendor, you can use 'payable' to reconcile
the vendor-related journal entries and 'receivable' for the customer-related entries.
"""
Account = self.env['account.account']
Partner = self.env['res.partner']
if res_ids is not None and len(res_ids) == 0:
# Note : this short-circuiting is better for performances, but also required
# since postgresql doesn't implement empty list (so 'AND id in ()' is useless)
return []
res_ids = res_ids and tuple(res_ids)
assert res_type in ('partner', 'account')
assert account_type in ('payable', 'receivable', None)
is_partner = res_type == 'partner'
res_alias = is_partner and 'p' or 'a'
aml_ids = self._context.get('active_ids') and self._context.get('active_model') == 'account.move.line' and tuple(self._context.get('active_ids'))
query = ("""
SELECT {0} account_id, account_name, account_code, max_date,
to_char(last_time_entries_checked, 'YYYY-MM-DD') AS last_time_entries_checked
FROM (
SELECT {1}
{res_alias}.last_time_entries_checked AS last_time_entries_checked,
a.id AS account_id,
a.name AS account_name,
a.code AS account_code,
MAX(l.write_date) AS max_date
FROM
account_move_line l
RIGHT JOIN account_account a ON (a.id = l.account_id)
RIGHT JOIN account_account_type at ON (at.id = a.user_type_id)
{2}
WHERE
a.reconcile IS TRUE
AND l.full_reconcile_id is NULL
{3}
{4}
{5}
AND l.company_id = {6}
AND EXISTS (
SELECT NULL
FROM account_move_line l
JOIN account_move move ON move.id = l.move_id
JOIN account_journal journal ON journal.id = move.journal_id
WHERE l.account_id = a.id
{7}
AND (move.state = 'posted' OR (journal.post_at_bank_rec AND move.state = 'draft'))
AND l.amount_residual > 0
)
AND EXISTS (
SELECT NULL
FROM account_move_line l
JOIN account_move move ON move.id = l.move_id
JOIN account_journal journal ON journal.id = move.journal_id
WHERE l.account_id = a.id
{7}
AND (move.state = 'posted' OR (journal.post_at_bank_rec AND move.state = 'draft'))
AND l.amount_residual < 0
)
{8}
GROUP BY {9} a.id, a.name, a.code, {res_alias}.last_time_entries_checked
ORDER BY {res_alias}.last_time_entries_checked
) as s
WHERE (last_time_entries_checked IS NULL OR max_date > last_time_entries_checked)
""".format(
is_partner and 'partner_id, partner_name,' or ' ',
is_partner and 'p.id AS partner_id, p.name AS partner_name,' or ' ',
is_partner and 'RIGHT JOIN res_partner p ON (l.partner_id = p.id)' or ' ',
is_partner and ' ' or "AND at.type <> 'payable' AND at.type <> 'receivable'",
account_type and "AND at.type = %(account_type)s" or '',
res_ids and 'AND ' + res_alias + '.id in %(res_ids)s' or '',
self.env.user.company_id.id,
is_partner and 'AND l.partner_id = p.id' or ' ',
aml_ids and 'AND l.id IN %(aml_ids)s' or '',
is_partner and 'l.partner_id, p.id,' or ' ',
res_alias=res_alias
))
self.env.cr.execute(query, locals())
# Apply ir_rules by filtering out
rows = self.env.cr.dictfetchall()
ids = [x['account_id'] for x in rows]
allowed_ids = set(Account.browse(ids).ids)
rows = [row for row in rows if row['account_id'] in allowed_ids]
if is_partner:
ids = [x['partner_id'] for x in rows]
allowed_ids = set(Partner.browse(ids).ids)
rows = [row for row in rows if row['partner_id'] in allowed_ids]
# Keep mode for future use in JS
if res_type == 'account':
mode = 'accounts'
else:
mode = 'customers' if account_type == 'receivable' else 'suppliers'
# Fetch other data
for row in rows:
account = Account.browse(row['account_id'])
currency = account.currency_id or account.company_id.currency_id
row['currency_id'] = currency.id
partner_id = is_partner and row['partner_id'] or None
rec_prop = aml_ids and self.env['account.move.line'].browse(aml_ids) or self._get_move_line_reconciliation_proposition(account.id, partner_id)
row['reconciliation_proposition'] = self._prepare_move_lines(rec_prop, target_currency=currency)
row['mode'] = mode
row['company_id'] = account.company_id.id
# Return the partners with a reconciliation proposition first, since they are most likely to
# be reconciled.
return [r for r in rows if r['reconciliation_proposition']] + [r for r in rows if not r['reconciliation_proposition']]
@api.model
def process_move_lines(self, data):
""" Used to validate a batch of reconciliations in a single call
:param data: list of dicts containing:
- 'type': either 'partner' or 'account'
- 'id': id of the affected res.partner or account.account
- 'mv_line_ids': ids of existing account.move.line to reconcile
- 'new_mv_line_dicts': list of dicts containing values suitable for account_move_line.create()
"""
Partner = self.env['res.partner']
Account = self.env['account.account']
for datum in data:
if len(datum['mv_line_ids']) >= 1 or len(datum['mv_line_ids']) + len(datum['new_mv_line_dicts']) >= 2:
self._process_move_lines(datum['mv_line_ids'], datum['new_mv_line_dicts'])
if datum['type'] == 'partner':
partners = Partner.browse(datum['id'])
partners.mark_as_reconciled()
if datum['type'] == 'account':
accounts = Account.browse(datum['id'])
accounts.mark_as_reconciled()
####################################################
# Private
####################################################
@api.model
def _domain_move_lines(self, search_str):
""" Returns the domain from the search_str search
:param search_str: search string
"""
if not search_str:
return []
str_domain = [
'|', ('move_id.name', 'ilike', search_str),
'|', ('move_id.ref', 'ilike', search_str),
'|', ('date_maturity', 'like', search_str),
'&', ('name', '!=', '/'), ('name', 'ilike', search_str)
]
if search_str[0] in ['-', '+']:
try:
amounts_str = search_str.split('|')
for amount_str in amounts_str:
amount = amount_str[0] == '-' and float(amount_str) or float(amount_str[1:])
amount_domain = [
'|', ('amount_residual', '=', amount),
'|', ('amount_residual_currency', '=', amount),
'|', (amount_str[0] == '-' and 'credit' or 'debit', '=', float(amount_str[1:])),
('amount_currency', '=', amount),
]
str_domain = expression.OR([str_domain, amount_domain])
except:
pass
else:
try:
amount = float(search_str)
amount_domain = [
'|', ('amount_residual', '=', amount),
'|', ('amount_residual_currency', '=', amount),
'|', ('amount_residual', '=', -amount),
'|', ('amount_residual_currency', '=', -amount),
'&', ('account_id.internal_type', '=', 'liquidity'),
'|', '|', '|', ('debit', '=', amount), ('credit', '=', amount), ('amount_currency', '=', amount), ('amount_currency', '=', -amount),
]
str_domain = expression.OR([str_domain, amount_domain])
except:
pass
return str_domain
@api.model
def _domain_move_lines_for_reconciliation(self, st_line, aml_accounts, partner_id, excluded_ids=None, search_str=False):
""" Return the domain for account.move.line records which can be used for bank statement reconciliation.
:param aml_accounts:
:param partner_id:
:param excluded_ids:
:param search_str:
"""
domain_reconciliation = [
'&', '&', '&',
('statement_line_id', '=', False),
('account_id', 'in', aml_accounts),
('payment_id', '<>', False),
('balance', '!=', 0.0),
]
# default domain matching
domain_matching = [
'&', '&',
('reconciled', '=', False),
('account_id.reconcile', '=', True),
('balance', '!=', 0.0),
]
domain = expression.OR([domain_reconciliation, domain_matching])
if partner_id:
domain = expression.AND([domain, [('partner_id', '=', partner_id)]])
# Domain factorized for all reconciliation use cases
if search_str:
str_domain = self._domain_move_lines(search_str=search_str)
str_domain = expression.OR([
str_domain,
[('partner_id.name', 'ilike', search_str)]
])
domain = expression.AND([
domain,
str_domain
])
if excluded_ids:
domain = expression.AND([
[('id', 'not in', excluded_ids)],
domain
])
# filter on account.move.line having the same company as the statement line
domain = expression.AND([domain, [('company_id', '=', st_line.company_id.id)]])
if st_line.company_id.account_bank_reconciliation_start:
domain = expression.AND([domain, [('date', '>=', st_line.company_id.account_bank_reconciliation_start)]])
return domain
@api.model
def _domain_move_lines_for_manual_reconciliation(self, account_id, partner_id=False, excluded_ids=None, search_str=False):
""" Create domain criteria that are relevant to manual reconciliation. """
domain = [
'&',
'&',
('reconciled', '=', False),
('account_id', '=', account_id),
'|',
('move_id.state', '=', 'posted'),
'&',
('move_id.state', '=', 'draft'),
('move_id.journal_id.post_at_bank_rec', '=', True),
]
domain = expression.AND([domain, [('balance', '!=', 0.0)]])
if partner_id:
domain = expression.AND([domain, [('partner_id', '=', partner_id)]])
if excluded_ids:
domain = expression.AND([[('id', 'not in', excluded_ids)], domain])
if search_str:
str_domain = self._domain_move_lines(search_str=search_str)
str_domain = expression.OR([
str_domain,
[('partner_id.name', 'ilike', search_str)]
])
domain = expression.AND([domain, str_domain])
# filter on account.move.line having the same company as the given account
account = self.env['account.account'].browse(account_id)
domain = expression.AND([domain, [('company_id', '=', account.company_id.id)]])
return domain
@api.model
def _prepare_move_lines(self, move_lines, target_currency=False, target_date=False, recs_count=0):
""" Returns move lines formatted for the manual/bank reconciliation widget
:param move_line_ids:
:param target_currency: currency (browse) you want the move line debit/credit converted into
:param target_date: date to use for the monetary conversion
"""
context = dict(self._context or {})
ret = []
for line in move_lines:
company_currency = line.company_id.currency_id
line_currency = (line.currency_id and line.amount_currency) and line.currency_id or company_currency
date_maturity = misc.format_date(self.env, line.date_maturity, lang_code=self.env.user.lang)
ret_line = {
'id': line.id,
'name': line.name and line.name != '/' and line.move_id.name + ': ' + line.name or line.move_id.name,
'ref': line.move_id.ref or '',
# For reconciliation between statement transactions and already registered payments (eg. checks)
# NB : we don't use the 'reconciled' field because the line we're selecting is not the one that gets reconciled
'account_id': [line.account_id.id, line.account_id.display_name],
'already_paid': line.account_id.internal_type == 'liquidity',
'account_code': line.account_id.code,
'account_name': line.account_id.name,
'account_type': line.account_id.internal_type,
'date_maturity': date_maturity,
'date': line.date,
'journal_id': [line.journal_id.id, line.journal_id.display_name],
'partner_id': line.partner_id.id,
'partner_name': line.partner_id.name,
'currency_id': line_currency.id,
}
debit = line.debit
credit = line.credit
amount = line.amount_residual
amount_currency = line.amount_residual_currency
# For already reconciled lines, don't use amount_residual(_currency)
if line.account_id.internal_type == 'liquidity':
amount = debit - credit
amount_currency = line.amount_currency
target_currency = target_currency or company_currency
# Use case:
# Let's assume that company currency is in USD and that we have the 3 following move lines
# Debit Credit Amount currency Currency
# 1) 25 0 0 NULL
# 2) 17 0 25 EUR
# 3) 33 0 25 YEN
#
# If we ask to see the information in the reconciliation widget in company currency, we want to see
# The following information
# 1) 25 USD (no currency information)
# 2) 17 USD [25 EUR] (show 25 euro in currency information, in the little bill)
# 3) 33 USD [25 YEN] (show 25 yen in currency information)
#
# If we ask to see the information in another currency than the company let's say EUR
# 1) 35 EUR [25 USD]
# 2) 25 EUR (no currency information)
# 3) 50 EUR [25 YEN]
# In that case, we have to convert the debit-credit to the currency we want and we show next to it
# the value of the amount_currency or the debit-credit if no amount currency
if target_currency == company_currency:
if line_currency == target_currency:
amount = amount
amount_currency = ""
total_amount = debit - credit
total_amount_currency = ""
else:
amount = amount
amount_currency = amount_currency
total_amount = debit - credit
total_amount_currency = line.amount_currency
if target_currency != company_currency:
if line_currency == target_currency:
amount = amount_currency
amount_currency = ""
total_amount = line.amount_currency
total_amount_currency = ""
else:
amount_currency = line.currency_id and amount_currency or amount
company = line.account_id.company_id
date = target_date or line.date
amount = company_currency._convert(amount, target_currency, company, date)
total_amount = company_currency._convert((line.debit - line.credit), target_currency, company, date)
total_amount_currency = line.currency_id and line.amount_currency or (line.debit - line.credit)
ret_line['recs_count'] = recs_count
ret_line['debit'] = amount > 0 and amount or 0
ret_line['credit'] = amount < 0 and -amount or 0
ret_line['amount_currency'] = amount_currency
ret_line['amount_str'] = formatLang(self.env, abs(amount), currency_obj=target_currency)
ret_line['total_amount_str'] = formatLang(self.env, abs(total_amount), currency_obj=target_currency)
ret_line['amount_currency_str'] = amount_currency and formatLang(self.env, abs(amount_currency), currency_obj=line_currency) or ""
ret_line['total_amount_currency_str'] = total_amount_currency and formatLang(self.env, abs(total_amount_currency), currency_obj=line_currency) or ""
ret.append(ret_line)
return ret
@api.model
def _get_statement_line(self, st_line):
""" Returns the data required by the bank statement reconciliation widget to display a statement line """
statement_currency = st_line.journal_id.currency_id or st_line.journal_id.company_id.currency_id
if st_line.amount_currency and st_line.currency_id:
amount = st_line.amount_currency
amount_currency = st_line.amount
amount_currency_str = formatLang(self.env, abs(amount_currency), currency_obj=statement_currency)
else:
amount = st_line.amount
amount_currency = amount
amount_currency_str = ""
amount_str = formatLang(self.env, abs(amount), currency_obj=st_line.currency_id or statement_currency)
date = misc.format_date(self.env, st_line.date, lang_code=self.env.user.lang)
data = {
'id': st_line.id,
'ref': st_line.ref,
'note': st_line.note or "",
'name': st_line.name,
'date': date,
'amount': amount,
'amount_str': amount_str, # Amount in the statement line currency
'currency_id': st_line.currency_id.id or statement_currency.id,
'partner_id': st_line.partner_id.id,
'journal_id': st_line.journal_id.id,
'statement_id': st_line.statement_id.id,
'account_id': [st_line.journal_id.default_debit_account_id.id, st_line.journal_id.default_debit_account_id.display_name],
'account_code': st_line.journal_id.default_debit_account_id.code,
'account_name': st_line.journal_id.default_debit_account_id.name,
'partner_name': st_line.partner_id.name,
'communication_partner_name': st_line.partner_name,
'amount_currency_str': amount_currency_str, # Amount in the statement currency
'amount_currency': amount_currency, # Amount in the statement currency
'has_no_partner': not st_line.partner_id.id,
'company_id': st_line.company_id.id,
}
if st_line.partner_id:
if amount > 0:
data['open_balance_account_id'] = st_line.partner_id.property_account_receivable_id.id
else:
data['open_balance_account_id'] = st_line.partner_id.property_account_payable_id.id
return data
@api.model
def _get_move_line_reconciliation_proposition(self, account_id, partner_id=None):
""" Returns two lines whose amount are opposite """
Account_move_line = self.env['account.move.line']
ir_rules_query = Account_move_line._where_calc([])
Account_move_line._apply_ir_rules(ir_rules_query, 'read')
from_clause, where_clause, where_clause_params = ir_rules_query.get_sql()
where_str = where_clause and (" WHERE %s" % where_clause) or ''
# Get pairs
query = """
SELECT a.id, b.id
FROM account_move_line a, account_move_line b,
account_move move_a, account_move move_b,
account_journal journal_a, account_journal journal_b
WHERE a.id != b.id
AND move_a.id = a.move_id
AND (move_a.state = 'posted' OR (move_a.state = 'draft' AND journal_a.post_at_bank_rec))
AND move_a.journal_id = journal_a.id
AND move_b.id = b.move_id
AND move_b.journal_id = journal_b.id
AND (move_b.state = 'posted' OR (move_b.state = 'draft' AND journal_b.post_at_bank_rec))
AND a.amount_residual = -b.amount_residual
AND a.balance != 0.0
AND b.balance != 0.0
AND NOT a.reconciled
AND a.account_id = %s
AND (%s IS NULL AND b.account_id = %s)
AND (%s IS NULL AND NOT b.reconciled OR b.id = %s)
AND (%s is NULL OR (a.partner_id = %s AND b.partner_id = %s))
AND a.id IN (SELECT "account_move_line".id FROM {0})
AND b.id IN (SELECT "account_move_line".id FROM {0})
ORDER BY a.date desc
LIMIT 1
""".format(from_clause + where_str)
move_line_id = self.env.context.get('move_line_id') or None
params = [
account_id,
move_line_id, account_id,
move_line_id, move_line_id,
partner_id, partner_id, partner_id,
] + where_clause_params + where_clause_params
self.env.cr.execute(query, params)
pairs = self.env.cr.fetchall()
if pairs:
return Account_move_line.browse(pairs[0])
return Account_move_line
@api.model
def _process_move_lines(self, move_line_ids, new_mv_line_dicts):
""" Create new move lines from new_mv_line_dicts (if not empty) then call reconcile_partial on self and new move lines
:param new_mv_line_dicts: list of dicts containing values suitable for account_move_line.create()
"""
if len(move_line_ids) < 1 or len(move_line_ids) + len(new_mv_line_dicts) < 2:
raise UserError(_('A reconciliation must involve at least 2 move lines.'))
account_move_line = self.env['account.move.line'].browse(move_line_ids)
writeoff_lines = self.env['account.move.line']
# Create writeoff move lines
if len(new_mv_line_dicts) > 0:
company_currency = account_move_line[0].account_id.company_id.currency_id
same_currency = False
currencies = list(set([aml.currency_id or company_currency for aml in account_move_line]))
if len(currencies) == 1 and currencies[0] != company_currency:
same_currency = True
# We don't have to convert debit/credit to currency as all values in the reconciliation widget are displayed in company currency
# If all the lines are in the same currency, create writeoff entry with same currency also
for mv_line_dict in new_mv_line_dicts:
if not same_currency:
mv_line_dict['amount_currency'] = False
writeoff_lines += account_move_line._create_writeoff([mv_line_dict])
(account_move_line + writeoff_lines).reconcile()
else:
account_move_line.reconcile()
|
phy0/namebench | refs/heads/master | nb_third_party/dns/ttl.py | 248 | # Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS TTL conversion."""
import dns.exception
class BadTTL(dns.exception.SyntaxError):
pass
def from_text(text):
"""Convert the text form of a TTL to an integer.
The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported.
@param text: the textual TTL
@type text: string
@raises dns.ttl.BadTTL: the TTL is not well-formed
@rtype: int
"""
if text.isdigit():
total = long(text)
else:
if not text[0].isdigit():
raise BadTTL
total = 0L
current = 0L
for c in text:
if c.isdigit():
current *= 10
current += long(c)
else:
c = c.lower()
if c == 'w':
total += current * 604800L
elif c == 'd':
total += current * 86400L
elif c == 'h':
total += current * 3600L
elif c == 'm':
total += current * 60L
elif c == 's':
total += current
else:
raise BadTTL("unknown unit '%s'" % c)
current = 0
if not current == 0:
raise BadTTL("trailing integer")
if total < 0L or total > 2147483647L:
raise BadTTL("TTL should be between 0 and 2^31 - 1 (inclusive)")
return total
|
2ndQuadrant/ansible | refs/heads/master | test/units/modules/network/f5/test_bigip_profile_http2.py | 16 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_profile_http2 import ApiParameters
from library.modules.bigip_profile_http2 import ModuleParameters
from library.modules.bigip_profile_http2 import ModuleManager
from library.modules.bigip_profile_http2 import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_profile_http2 import ApiParameters
from ansible.modules.network.f5.bigip_profile_http2 import ModuleParameters
from ansible.modules.network.f5.bigip_profile_http2 import ModuleManager
from ansible.modules.network.f5.bigip_profile_http2 import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='bar',
description='This is a Test',
streams=20,
enforce_tls_requirements=True,
frame_size=1024,
activation_modes=['always'],
insert_header=True,
insert_header_name='FOO'
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/bar'
assert p.description == 'This is a Test'
assert p.streams == 20
assert p.enforce_tls_requirements == 'enabled'
assert p.frame_size == 1024
assert p.activation_modes == ['always']
assert p.insert_header == 'enabled'
assert p.insert_header_name == 'FOO'
def test_api_parameters(self):
args = load_fixture('load_ltm_http2_profile.json')
p = ApiParameters(params=args)
assert p.name == 'test'
assert p.streams == 10
assert p.enforce_tls_requirements == 'enabled'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
# Configure the arguments that would be sent to the Ansible module
set_module_args(dict(
name='foo',
enforce_tls_requirements='yes',
parent='bar',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(return_value=False)
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['enforce_tls_requirements'] == 'yes'
|
denys-duchier/sorl-thumbnail-py3 | refs/heads/master | sorl/thumbnail/fields.py | 1 |
from django.db import models
from django.db.models import Q
from django import forms
from django.utils.translation import ugettext_lazy as _
from sorl.thumbnail import default
import collections
__all__ = ('ImageField', 'ImageFormField')
class ImageField(models.FileField):
def delete_file(self, instance, sender, **kwargs):
"""
Adds deletion of thumbnails and key kalue store references to the
parent class implementation. Only called in Django < 1.2.5
"""
file_ = getattr(instance, self.attname)
# If no other object of this type references the file, and it's not the
# default value for future objects, delete it from the backend.
query = Q(**{self.name: file_.name}) & ~Q(pk=instance.pk)
qs = sender._default_manager.filter(query)
if (file_ and file_.name != self.default and not qs):
default.backend.delete(file_)
elif file_:
# Otherwise, just close the file, so it doesn't tie up resources.
file_.close()
def formfield(self, **kwargs):
defaults = {'form_class': ImageFormField}
defaults.update(kwargs)
return super(ImageField, self).formfield(**defaults)
def save_form_data(self, instance, data):
if data is not None:
setattr(instance, self.name, data or '')
def south_field_triple(self):
from south.modelsinspector import introspector
cls_name = '%s.%s' % (self.__class__.__module__ , self.__class__.__name__)
args, kwargs = introspector(self)
return (cls_name, args, kwargs)
class ImageFormField(forms.FileField):
default_error_messages = {
'invalid_image': _("Upload a valid image. The file you uploaded was "
"either not an image or a corrupted image."),
}
def to_python(self, data):
"""
Checks that the file-upload field data contains a valid image (GIF,
JPG, PNG, possibly others -- whatever the engine supports).
"""
f = super(ImageFormField, self).to_python(data)
if f is None:
return None
# We need to get a file raw data to validate it.
if hasattr(data, 'temporary_file_path'):
with open(data.temporary_file_path(), 'rb') as fp:
raw_data = fp.read()
elif hasattr(data, 'read'):
raw_data = data.read()
else:
raw_data = data['content']
if not default.engine.is_valid_image(raw_data):
raise forms.ValidationError(self.error_messages['invalid_image'])
if hasattr(f, 'seek') and isinstance(f.seek, collections.Callable):
f.seek(0)
return f
|
jamezpolley/pip | refs/heads/develop | tests/data/packages/LocalExtras/setup.py | 46 | import os
from setuptools import setup, find_packages
def path_to_url(path):
"""
Convert a path to URI. The path will be made absolute and
will not have quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join(filepath)
if drive:
return 'file:///' + drive + url
return 'file://' +url
HERE = os.path.dirname(__file__)
DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple')
DEP_URL = path_to_url(DEP_PATH)
setup(
name='LocalExtras',
version='0.0.1',
packages=find_packages(),
extras_require={ 'bar': ['simple'] },
dependency_links=[DEP_URL]
)
|
AngelkPetkov/titanium_mobile | refs/heads/master | support/common/css/serialize.py | 75 | # -*- coding: utf-8 -*-
'''
A serializer for CSS.
'''
import css
# This module comprises all serialization code for the
# syntax object of CSS, kept here so that the serialization
# strategy for the whole system can be modified easily
# without the need to touch a dozen classes.
#
# Adding a
# new type of data requires another conditional in
# serialize(), and possibly a new serialize_<type>()
# method. (The data types of CSS are finite and the number
# relatively small, so this should be a rare occassion.)
#
# Each serializer method takes a `printer` argument,
# which should be a function that returns a serialized
# value for objects of builtin types.
def serialize(obj, printer=str):
if isinstance(obj, css.Hexcolor):
return serialize_Hexcolor(obj, printer)
elif isinstance(obj, css.Function):
return serialize_Function(obj, printer)
elif isinstance(obj, css.Uri):
return serialize_Uri(obj, printer)
elif isinstance(obj, css.String):
return serialize_String(obj, printer)
elif isinstance(obj, css.Ident):
return serialize_Ident(obj, printer)
elif isinstance(obj, css.Term):
return serialize_Term(obj, printer)
elif isinstance(obj, css.Declaration):
return serialize_Declaration(obj, printer)
elif isinstance(obj, css.Ruleset):
return serialize_Ruleset(obj, printer)
elif isinstance(obj, css.Charset):
return serialize_Charset(obj, printer)
elif isinstance(obj, css.Page):
return serialize_Page(obj, printer)
elif isinstance(obj, css.Media):
return serialize_Media(obj, printer)
elif isinstance(obj, css.Import):
return serialize_Import(obj, printer)
elif isinstance(obj, css.Stylesheet):
return serialize_Stylesheet(obj, printer)
else:
return printer(obj)
def serialize_Hexcolor(obj, printer):
return printer('#') + printer(obj.value)
def serialize_Function(obj, printer):
return printer(obj.name) + printer('(') + printer(obj.parameters) + printer(')')
def serialize_Uri(obj, printer):
return printer('url(') + printer(obj.url) + printer(')')
def serialize_String(obj, printer):
s = printer(obj.value.replace(u'"', u'\\"'))
return printer('"') + s + printer('"')
def serialize_Ident(obj, printer):
return printer(obj.name)
def serialize_Term(obj, printer):
s = printer(obj.value)
if obj.unary_operator:
s = printer(obj.unary_operator) + s
return s
def serialize_Declaration(obj, printer):
s = serialize_Ident(obj.property, printer)
s += printer(':') + printer(obj.value)
if obj.important:
s += printer(' !important')
return s
def serialize_Ruleset(obj, printer):
s = serialize_Selector_group(obj.selectors, printer)
s += serialize_Declaration_block(obj.declarations, printer)
return s
def serialize_Charset(obj, printer):
return printer('@charset ') + printer(obj.encoding) + printer(';')
def serialize_Page(obj, printer):
s = printer('@page')
if obj.pseudo_page:
s += serialize_Pseudo(obj.pseudo_page, printer)
s += serialize_Declaration_block(obj.declarations, printer)
return s
def serialize_Media(obj, printer):
s = printer('@media ')
s += printer(',').join((printer(x) for x in obj.media_types))
s += printer('{') + printer('\n').join([serialize_Ruleset(x, printer) for x in obj.rulesets]) + printer('}')
return s
def serialize_Import(obj, printer):
s = printer('@import ') + serialize(obj.source, printer)
if obj.media_types:
s += printer(' ') + printer(',').join((printer(x) for x in obj.media_types))
s += printer(';')
return s
def serialize_Stylesheet(obj, printer):
s = printer('')
if obj.charset:
s += serialize_Charset(obj.charset, printer) + printer('\n')
if obj.imports:
s += printer('\n').join((serialize_Import(x, printer) for x in obj.imports)) + printer('\n')
s += printer('\n').join((serialize(x, printer) for x in obj.statements))
return s
def serialize_Pseudo(obj, printer):
return printer(':') + serialize_Ident(obj, printer)
def serialize_Selector_group(selectors, printer):
return printer(',').join((printer(x) for x in selectors))
def serialize_Declaration_block(declarations, printer):
return printer('{') + printer(';').join((serialize_Declaration(x, printer) for x in declarations)) + printer('}')
|
bayespy/bayespy | refs/heads/develop | bayespy/inference/vmp/nodes/gaussian.py | 2 | ################################################################################
# Copyright (C) 2011-2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Module for the Gaussian distribution and similar distributions.
"""
import numpy as np
from scipy import special
from bayespy.utils import (random,
misc,
linalg)
from bayespy.utils.linalg import dot, mvdot
from .expfamily import (ExponentialFamily,
ExponentialFamilyDistribution,
useconstructor)
from .wishart import (WishartMoments,
WishartPriorMoments)
from .gamma import (GammaMoments,
GammaDistribution,
GammaPriorMoments)
from .deterministic import Deterministic
from .node import (Moments,
ensureparents)
#
# MOMENTS
#
class GaussianMoments(Moments):
r"""
Class for the moments of Gaussian variables.
"""
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
self.dims = (shape, 2*shape)
super().__init__()
def compute_fixed_moments(self, x):
r"""
Compute the moments for a fixed value
"""
x = np.asanyarray(x)
x = misc.atleast_nd(x, self.ndim)
return [x, linalg.outer(x, x, ndim=self.ndim)]
@classmethod
def from_values(cls, x, ndim):
r"""
Return the shape of the moments for a fixed value.
"""
if ndim == 0:
return cls(())
else:
return cls(np.shape(x)[-ndim:])
def get_instance_conversion_kwargs(self):
return dict(ndim=self.ndim)
def get_instance_converter(self, ndim):
if ndim == self.ndim or ndim is None:
return None
return GaussianToGaussian(self, ndim)
class GaussianToGaussian():
def __init__(self, moments_from, ndim_to):
if not isinstance(moments_from, GaussianMoments):
raise ValueError()
if ndim_to < 0:
return ValueError("ndim_to must be non-negative")
self.shape_from = moments_from.shape
self.ndim_from = moments_from.ndim
self.ndim_to = ndim_to
if self.ndim_to > self.ndim_from:
raise ValueError()
if self.ndim_to == 0:
self.moments = GaussianMoments(())
else:
self.moments = GaussianMoments(self.shape_from[-self.ndim_to:])
return
def compute_moments(self, u):
if self.ndim_to == self.ndim_from:
return u
u0 = u[0]
u1 = misc.get_diag(u[1], ndim=self.ndim_from, ndim_to=self.ndim_to)
return [u0, u1]
def compute_message_to_parent(self, m, u_parent):
# Handle broadcasting in m_child
m0 = m[0] * np.ones(self.shape_from)
m1 = (
misc.make_diag(m[1], ndim=self.ndim_from, ndim_from=self.ndim_to)
* misc.identity(*self.shape_from)
)
return [m0, m1]
def compute_weights_to_parent(self, weights):
diff = self.ndim_from - self.ndim_to
if diff == 0:
return weights
return np.sum(
weights * np.ones(self.shape_from[:diff]),
#misc.atleast_nd(weights, diff),
axis=tuple(range(-diff, 0))
)
def plates_multiplier_from_parent(self, plates_multiplier):
diff = self.ndim_from - self.ndim_to
return plates_multiplier + diff * (1,)
def plates_from_parent(self, plates):
diff = self.ndim_from - self.ndim_to
if diff == 0:
return plates
return plates + self.shape_from[:diff]
def plates_to_parent(self, plates):
diff = self.ndim_from - self.ndim_to
if diff == 0:
return plates
return plates[:-diff]
class GaussianGammaMoments(Moments):
r"""
Class for the moments of Gaussian-gamma-ISO variables.
"""
def __init__(self, shape):
r"""
Create moments object for Gaussian-gamma isotropic variables
ndim=0: scalar
ndim=1: vector
ndim=2: matrix
...
"""
self.shape = shape
self.ndim = len(shape)
self.dims = (shape, 2*shape, (), ())
super().__init__()
def compute_fixed_moments(self, x_alpha):
r"""
Compute the moments for a fixed value
`x` is a mean vector.
`alpha` is a precision scale
"""
(x, alpha) = x_alpha
x = np.asanyarray(x)
alpha = np.asanyarray(alpha)
u0 = x * misc.add_trailing_axes(alpha, self.ndim)
u1 = (linalg.outer(x, x, ndim=self.ndim)
* misc.add_trailing_axes(alpha, 2*self.ndim))
u2 = np.copy(alpha)
u3 = np.log(alpha)
u = [u0, u1, u2, u3]
return u
@classmethod
def from_values(cls, x_alpha, ndim):
r"""
Return the shape of the moments for a fixed value.
"""
(x, alpha) = x_alpha
if ndim == 0:
shape = ( (), (), (), () )
else:
shape = np.shape(x)[-ndim:]
return cls(shape)
def get_instance_conversion_kwargs(self):
return dict(ndim=self.ndim)
def get_instance_converter(self, ndim):
# FIXME/TODO: IMPLEMENT THIS CORRECTLY!
if ndim != self.ndim:
raise NotImplementedError(
"Conversion to different ndim in GaussianMoments not yet "
"implemented."
)
return None
class GaussianWishartMoments(Moments):
r"""
Class for the moments of Gaussian-Wishart variables.
"""
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
self.dims = ( shape, (), 2*shape, () )
super().__init__()
def compute_fixed_moments(self, x, Lambda):
r"""
Compute the moments for a fixed value
`x` is a vector.
`Lambda` is a precision matrix
"""
x = np.asanyarray(x)
Lambda = np.asanyarray(Lambda)
u0 = linalg.mvdot(Lambda, x, ndim=self.ndim)
u1 = np.einsum(
'...i,...ij,...j->...',
misc.flatten_axes(x, self.ndim),
misc.flatten_axes(Lambda, self.ndim, self.ndim),
misc.flatten_axes(x, self.ndim)
)
u2 = np.copy(Lambda)
u3 = linalg.logdet_cov(Lambda, ndim=self.ndim)
return [u0, u1, u2, u3]
@classmethod
def from_values(self, x, Lambda, ndim):
r"""
Return the shape of the moments for a fixed value.
"""
if ndim == 0:
return cls(())
else:
if np.ndim(x) < ndim:
raise ValueError("Mean must be a vector")
shape = np.shape(x)[-ndim:]
if np.shape(Lambda)[-2*ndim:] != shape + shape:
raise ValueError("Shapes inconsistent")
return cls(shape)
#
# DISTRIBUTIONS
#
class GaussianDistribution(ExponentialFamilyDistribution):
r"""
Class for the VMP formulas of Gaussian variables.
Currently, supports only vector variables.
Notes
-----
Message passing equations:
.. math::
\mathbf{x} &\sim \mathcal{N}(\boldsymbol{\mu}, \mathbf{\Lambda}),
.. math::
\mathbf{x},\boldsymbol{\mu} \in \mathbb{R}^{D},
\quad \mathbf{\Lambda} \in \mathbb{R}^{D \times D},
\quad \mathbf{\Lambda} \text{ symmetric positive definite}
.. math::
\log\mathcal{N}( \mathbf{x} | \boldsymbol{\mu}, \mathbf{\Lambda} )
&=
- \frac{1}{2} \mathbf{x}^{\mathrm{T}} \mathbf{\Lambda} \mathbf{x}
+ \mathbf{x}^{\mathrm{T}} \mathbf{\Lambda} \boldsymbol{\mu}
- \frac{1}{2} \boldsymbol{\mu}^{\mathrm{T}} \mathbf{\Lambda}
\boldsymbol{\mu}
+ \frac{1}{2} \log |\mathbf{\Lambda}|
- \frac{D}{2} \log (2\pi)
"""
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
super().__init__()
def compute_message_to_parent(self, parent, index, u, u_mu_Lambda):
r"""
Compute the message to a parent node.
.. math::
\boldsymbol{\phi}_{\boldsymbol{\mu}} (\mathbf{x}, \mathbf{\Lambda})
&=
\left[ \begin{matrix}
\mathbf{\Lambda} \mathbf{x}
\\
- \frac{1}{2} \mathbf{\Lambda}
\end{matrix} \right]
\\
\boldsymbol{\phi}_{\mathbf{\Lambda}} (\mathbf{x}, \boldsymbol{\mu})
&=
\left[ \begin{matrix}
- \frac{1}{2} \mathbf{xx}^{\mathrm{T}}
+ \frac{1}{2} \mathbf{x}\boldsymbol{\mu}^{\mathrm{T}}
+ \frac{1}{2} \boldsymbol{\mu}\mathbf{x}^{\mathrm{T}}
- \frac{1}{2} \boldsymbol{\mu\mu}^{\mathrm{T}}
\\
\frac{1}{2}
\end{matrix} \right]
"""
if index == 0:
x = u[0]
xx = u[1]
m0 = x
m1 = -0.5
m2 = -0.5*xx
m3 = 0.5
return [m0, m1, m2, m3]
else:
raise ValueError("Index out of bounds")
def compute_phi_from_parents(self, u_mu_Lambda, mask=True):
r"""
Compute the natural parameter vector given parent moments.
.. math::
\boldsymbol{\phi} (\boldsymbol{\mu}, \mathbf{\Lambda})
&=
\left[ \begin{matrix}
\mathbf{\Lambda} \boldsymbol{\mu}
\\
- \frac{1}{2} \mathbf{\Lambda}
\end{matrix} \right]
"""
Lambda_mu = u_mu_Lambda[0]
Lambda = u_mu_Lambda[2]
return [Lambda_mu,
-0.5 * Lambda]
def compute_moments_and_cgf(self, phi, mask=True):
r"""
Compute the moments and :math:`g(\phi)`.
.. math::
\overline{\mathbf{u}} (\boldsymbol{\phi})
&=
\left[ \begin{matrix}
- \frac{1}{2} \boldsymbol{\phi}^{-1}_2 \boldsymbol{\phi}_1
\\
\frac{1}{4} \boldsymbol{\phi}^{-1}_2 \boldsymbol{\phi}_1
\boldsymbol{\phi}^{\mathrm{T}}_1 \boldsymbol{\phi}^{-1}_2
- \frac{1}{2} \boldsymbol{\phi}^{-1}_2
\end{matrix} \right]
\\
g_{\boldsymbol{\phi}} (\boldsymbol{\phi})
&=
\frac{1}{4} \boldsymbol{\phi}^{\mathrm{T}}_1 \boldsymbol{\phi}^{-1}_2
\boldsymbol{\phi}_1
+ \frac{1}{2} \log | -2 \boldsymbol{\phi}_2 |
"""
# TODO: Compute -2*phi[1] and simplify the formulas
L = linalg.chol(-2*phi[1], ndim=self.ndim)
k = np.shape(phi[0])[-1]
# Moments
u0 = linalg.chol_solve(L, phi[0], ndim=self.ndim)
u1 = (linalg.outer(u0, u0, ndim=self.ndim)
+ linalg.chol_inv(L, ndim=self.ndim))
u = [u0, u1]
# G
g = (-0.5 * linalg.inner(u[0], phi[0], ndim=self.ndim)
+ 0.5 * linalg.chol_logdet(L, ndim=self.ndim))
return (u, g)
def compute_cgf_from_parents(self, u_mu_Lambda):
r"""
Compute :math:`\mathrm{E}_{q(p)}[g(p)]`
.. math::
g (\boldsymbol{\mu}, \mathbf{\Lambda})
&=
- \frac{1}{2} \operatorname{tr}(\boldsymbol{\mu\mu}^{\mathrm{T}}
\mathbf{\Lambda} )
+ \frac{1}{2} \log |\mathbf{\Lambda}|
"""
mu_Lambda_mu = u_mu_Lambda[1]
logdet_Lambda = u_mu_Lambda[3]
g = -0.5*mu_Lambda_mu + 0.5*logdet_Lambda
return g
def compute_fixed_moments_and_f(self, x, mask=True):
r"""
Compute the moments and :math:`f(x)` for a fixed value.
.. math::
\mathbf{u} (\mathbf{x})
&=
\left[ \begin{matrix}
\mathbf{x}
\\
\mathbf{xx}^{\mathrm{T}}
\end{matrix} \right]
\\
f(\mathbf{x})
&= - \frac{D}{2} \log(2\pi)
"""
k = np.shape(x)[-1]
u = [x, linalg.outer(x, x, ndim=self.ndim)]
f = -k/2*np.log(2*np.pi)
return (u, f)
def compute_gradient(self, g, u, phi):
r"""
Compute the standard gradient with respect to the natural parameters.
Gradient of the moments:
.. math::
\mathrm{d}\overline{\mathbf{u}} &=
\begin{bmatrix}
\frac{1}{2} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1} \phi_1
- \frac{1}{2} \phi_2^{-1} \mathrm{d}\phi_1
\\
- \frac{1}{4} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1} \phi_1 \phi_1^{\mathrm{T}} \phi_2^{-1}
- \frac{1}{4} \phi_2^{-1} \phi_1 \phi_1^{\mathrm{T}} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1}
+ \frac{1}{2} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1}
+ \frac{1}{4} \phi_2^{-1} \mathrm{d}\phi_1 \phi_1^{\mathrm{T}} \phi_2^{-1}
+ \frac{1}{4} \phi_2^{-1} \phi_1 \mathrm{d}\phi_1^{\mathrm{T}} \phi_2^{-1}
\end{bmatrix}
\\
&=
\begin{bmatrix}
2 (\overline{u}_2 - \overline{u}_1 \overline{u}_1^{\mathrm{T}}) \mathrm{d}\phi_2 \overline{u}_1
+ (\overline{u}_2 - \overline{u}_1 \overline{u}_1^{\mathrm{T}}) \mathrm{d}\phi_1
\\
u_2 d\phi_2 u_2 - 2 u_1 u_1^T d\phi_2 u_1 u_1^T
+ 2 (u_2 - u_1 u_1^T) d\phi_1 u_1^T
\end{bmatrix}
Standard gradient given the gradient with respect to the moments, that
is, given the Riemannian gradient :math:`\tilde{\nabla}`:
.. math::
\nabla =
\begin{bmatrix}
(\overline{u}_2 - \overline{u}_1 \overline{u}_1^{\mathrm{T}}) \tilde{\nabla}_1
+ 2 (u_2 - u_1 u_1^T) \tilde{\nabla}_2 u_1
\\
(u_2 - u_1 u_1^T) \tilde{\nabla}_1 u_1^T
+ u_1 \tilde{\nabla}_1^T (u_2 - u_1 u_1^T)
+ 2 u_2 \tilde{\nabla}_2 u_2
- 2 u_1 u_1^T \tilde{\nabla}_2 u_1 u_1^T
\end{bmatrix}
"""
ndim = 1
x = u[0]
xx = u[1]
# Some helpful variables
x_x = linalg.outer(x, x, ndim=self.ndim)
Cov = xx - x_x
cov_g0 = linalg.mvdot(Cov, g[0], ndim=self.ndim)
cov_g0_x = linalg.outer(cov_g0, x, ndim=self.ndim)
g1_x = linalg.mvdot(g[1], x, ndim=self.ndim)
# Compute gradient terms
d0 = cov_g0 + 2 * linalg.mvdot(Cov, g1_x, ndim=self.ndim)
d1 = (cov_g0_x + linalg.transpose(cov_g0_x, ndim=self.ndim)
+ 2 * linalg.mmdot(xx,
linalg.mmdot(g[1], xx, ndim=self.ndim),
ndim=self.ndim)
- 2 * x_x * misc.add_trailing_axes(linalg.inner(g1_x,
x,
ndim=self.ndim),
2*self.ndim))
return [d0, d1]
def random(self, *phi, plates=None):
r"""
Draw a random sample from the distribution.
"""
# TODO/FIXME: You shouldn't draw random values for
# observed/fixed elements!
# Note that phi[1] is -0.5*inv(Cov)
U = linalg.chol(-2*phi[1], ndim=self.ndim)
mu = linalg.chol_solve(U, phi[0], ndim=self.ndim)
shape = plates + self.shape
z = np.random.randn(*shape)
# Denote Lambda = -2*phi[1]
# Then, Cov = inv(Lambda) = inv(U'*U) = inv(U) * inv(U')
# Thus, compute mu + U\z
z = linalg.solve_triangular(U, z, trans='N', lower=False, ndim=self.ndim)
return mu + z
class GaussianARDDistribution(ExponentialFamilyDistribution):
r"""
...
Log probability density function:
.. math::
\log p(x|\mu, \alpha) = -\frac{1}{2} x^T \mathrm{diag}(\alpha) x + x^T
\mathrm{diag}(\alpha) \mu - \frac{1}{2} \mu^T \mathrm{diag}(\alpha) \mu
+ \frac{1}{2} \sum_i \log \alpha_i - \frac{D}{2} \log(2\pi)
Parent has moments:
.. math::
\begin{bmatrix}
\alpha \circ \mu
\\
\alpha \circ \mu \circ \mu
\\
\alpha
\\
\log(\alpha)
\end{bmatrix}
"""
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
super().__init__()
def compute_message_to_parent(self, parent, index, u, u_mu_alpha):
r"""
...
.. math::
m =
\begin{bmatrix}
x
\\
[-\frac{1}{2}, \ldots, -\frac{1}{2}]
\\
-\frac{1}{2} \mathrm{diag}(xx^T)
\\
[\frac{1}{2}, \ldots, \frac{1}{2}]
\end{bmatrix}
"""
if index == 0:
x = u[0]
x2 = misc.get_diag(u[1], ndim=self.ndim)
m0 = x
m1 = -0.5 * np.ones(self.shape)
m2 = -0.5 * x2
m3 = 0.5 * np.ones(self.shape)
return [m0, m1, m2, m3]
else:
raise ValueError("Invalid parent index")
def compute_weights_to_parent(self, index, weights):
r"""
Maps the mask to the plates of a parent.
"""
if index != 0:
raise IndexError()
return misc.add_trailing_axes(weights, self.ndim)
def compute_phi_from_parents(self, u_mu_alpha, mask=True):
alpha_mu = u_mu_alpha[0]
alpha = u_mu_alpha[2]
#mu = u_mu[0]
#alpha = u_alpha[0]
## if np.ndim(mu) < self.ndim_mu:
## raise ValueError("Moment of mu does not have enough dimensions")
## mu = misc.add_axes(mu,
## axis=np.ndim(mu)-self.ndim_mu,
## num=self.ndim-self.ndim_mu)
phi0 = alpha_mu
phi1 = -0.5 * alpha
if self.ndim > 0:
# Ensure that phi is not using broadcasting for variable
# dimension axes
ones = np.ones(self.shape)
phi0 = ones * phi0
phi1 = ones * phi1
# Make a diagonal matrix
phi1 = misc.diag(phi1, ndim=self.ndim)
return [phi0, phi1]
def compute_moments_and_cgf(self, phi, mask=True):
if self.ndim == 0:
# Use scalar equations
u0 = -phi[0] / (2*phi[1])
u1 = u0**2 - 1 / (2*phi[1])
u = [u0, u1]
g = (-0.5 * u[0] * phi[0] + 0.5 * np.log(-2*phi[1]))
# TODO/FIXME: You could use these equations if phi is a scalar
# in practice although ndim>0 (because the shape can be, e.g.,
# (1,1,1,1) for ndim=4).
else:
# Reshape to standard vector and matrix
D = np.prod(self.shape)
phi0 = np.reshape(phi[0], phi[0].shape[:-self.ndim] + (D,))
phi1 = np.reshape(phi[1], phi[1].shape[:-2*self.ndim] + (D,D))
# Compute the moments
L = linalg.chol(-2*phi1)
Cov = linalg.chol_inv(L)
u0 = linalg.chol_solve(L, phi0)
u1 = linalg.outer(u0, u0) + Cov
# Compute CGF
g = (- 0.5 * np.einsum('...i,...i', u0, phi0)
+ 0.5 * linalg.chol_logdet(L))
# Reshape to arrays
u0 = np.reshape(u0, u0.shape[:-1] + self.shape)
u1 = np.reshape(u1, u1.shape[:-2] + self.shape + self.shape)
u = [u0, u1]
return (u, g)
def compute_cgf_from_parents(self, u_mu_alpha):
r"""
Compute the value of the cumulant generating function.
"""
# Compute sum(mu^2 * alpha) correctly for broadcasted shapes
alpha_mu2 = u_mu_alpha[1]
logdet_alpha = u_mu_alpha[3]
axes = tuple(range(-self.ndim, 0))
# TODO/FIXME: You could use plate multiplier type of correction instead
# of explicitly broadcasting with ones.
if self.ndim > 0:
alpha_mu2 = misc.sum_multiply(alpha_mu2, np.ones(self.shape),
axis=axes)
if self.ndim > 0:
logdet_alpha = misc.sum_multiply(logdet_alpha, np.ones(self.shape),
axis=axes)
# Compute g
g = -0.5*alpha_mu2 + 0.5*logdet_alpha
return g
def compute_fixed_moments_and_f(self, x, mask=True):
r""" Compute u(x) and f(x) for given x. """
if self.ndim > 0 and np.shape(x)[-self.ndim:] != self.shape:
raise ValueError("Invalid shape")
k = np.prod(self.shape)
u = [x, linalg.outer(x, x, ndim=self.ndim)]
f = -k/2*np.log(2*np.pi)
return (u, f)
def plates_to_parent(self, index, plates):
r"""
Resolves the plate mapping to a parent.
Given the plates of the node's moments, this method returns the plates
that the message to a parent has for the parent's distribution.
"""
if index != 0:
raise IndexError()
return plates + self.shape
def plates_from_parent(self, index, plates):
r"""
Resolve the plate mapping from a parent.
Given the plates of a parent's moments, this method returns the plates
that the moments has for this distribution.
"""
if index != 0:
raise IndexError()
if self.ndim == 0:
return plates
else:
return plates[:-self.ndim]
def random(self, *phi, plates=None):
r"""
Draw a random sample from the Gaussian distribution.
"""
# TODO/FIXME: You shouldn't draw random values for
# observed/fixed elements!
D = self.ndim
if D == 0:
dims = ()
else:
dims = np.shape(phi[0])[-D:]
if np.prod(dims) == 1.0:
# Scalar Gaussian
phi1 = phi[1]
if D > 0:
# Because the covariance matrix has shape (1,1,...,1,1),
# that is 2*D number of ones, remove the extra half of the
# shape
phi1 = np.reshape(phi1, np.shape(phi1)[:-2*D] + D*(1,))
var = -0.5 / phi1
std = np.sqrt(var)
mu = var * phi[0]
shape = plates + dims
z = np.random.randn(*shape)
x = mu + std * z
else:
N = np.prod(dims)
dims_cov = dims + dims
# Reshape precision matrix
plates_cov = np.shape(phi[1])[:-2*D]
V = -2 * np.reshape(phi[1], plates_cov + (N,N))
# Compute Cholesky
U = linalg.chol(V)
# Reshape mean vector
plates_phi0 = np.shape(phi[0])[:-D]
phi0 = np.reshape(phi[0], plates_phi0 + (N,))
mu = linalg.chol_solve(U, phi0)
# Compute mu + U\z
shape = plates + (N,)
z = np.random.randn(*shape)
# Denote Lambda = -2*phi[1]
# Then, Cov = inv(Lambda) = inv(U'*U) = inv(U) * inv(U')
# Thus, compute mu + U\z
x = mu + linalg.solve_triangular(U, z,
trans='N',
lower=False)
x = np.reshape(x, plates + dims)
return x
def compute_gradient(self, g, u, phi):
r"""
Compute the standard gradient with respect to the natural parameters.
Gradient of the moments:
.. math::
\mathrm{d}\overline{\mathbf{u}} &=
\begin{bmatrix}
\frac{1}{2} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1} \phi_1
- \frac{1}{2} \phi_2^{-1} \mathrm{d}\phi_1
\\
- \frac{1}{4} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1} \phi_1 \phi_1^{\mathrm{T}} \phi_2^{-1}
- \frac{1}{4} \phi_2^{-1} \phi_1 \phi_1^{\mathrm{T}} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1}
+ \frac{1}{2} \phi_2^{-1} \mathrm{d}\phi_2 \phi_2^{-1}
+ \frac{1}{4} \phi_2^{-1} \mathrm{d}\phi_1 \phi_1^{\mathrm{T}} \phi_2^{-1}
+ \frac{1}{4} \phi_2^{-1} \phi_1 \mathrm{d}\phi_1^{\mathrm{T}} \phi_2^{-1}
\end{bmatrix}
\\
&=
\begin{bmatrix}
2 (\overline{u}_2 - \overline{u}_1 \overline{u}_1^{\mathrm{T}}) \mathrm{d}\phi_2 \overline{u}_1
+ (\overline{u}_2 - \overline{u}_1 \overline{u}_1^{\mathrm{T}}) \mathrm{d}\phi_1
\\
u_2 d\phi_2 u_2 - 2 u_1 u_1^T d\phi_2 u_1 u_1^T
+ 2 (u_2 - u_1 u_1^T) d\phi_1 u_1^T
\end{bmatrix}
Standard gradient given the gradient with respect to the moments, that
is, given the Riemannian gradient :math:`\tilde{\nabla}`:
.. math::
\nabla =
\begin{bmatrix}
(\overline{u}_2 - \overline{u}_1 \overline{u}_1^{\mathrm{T}}) \tilde{\nabla}_1
+ 2 (u_2 - u_1 u_1^T) \tilde{\nabla}_2 u_1
\\
(u_2 - u_1 u_1^T) \tilde{\nabla}_1 u_1^T
+ u_1 \tilde{\nabla}_1^T (u_2 - u_1 u_1^T)
+ 2 u_2 \tilde{\nabla}_2 u_2
- 2 u_1 u_1^T \tilde{\nabla}_2 u_1 u_1^T
\end{bmatrix}
"""
ndim = self.ndim
x = u[0]
xx = u[1]
# Some helpful variables
x_x = linalg.outer(x, x, ndim=ndim)
Cov = xx - x_x
cov_g0 = linalg.mvdot(Cov, g[0], ndim=ndim)
cov_g0_x = linalg.outer(cov_g0, x, ndim=ndim)
g1_x = linalg.mvdot(g[1], x, ndim=ndim)
# Compute gradient terms
d0 = cov_g0 + 2 * linalg.mvdot(Cov, g1_x, ndim=ndim)
d1 = (cov_g0_x + linalg.transpose(cov_g0_x, ndim=ndim)
+ 2 * linalg.mmdot(xx,
linalg.mmdot(g[1], xx, ndim=ndim),
ndim=ndim)
- 2 * x_x * misc.add_trailing_axes(linalg.inner(g1_x,
x,
ndim=ndim),
2*ndim))
return [d0, d1]
class GaussianGammaDistribution(ExponentialFamilyDistribution):
r"""
Class for the VMP formulas of Gaussian-Gamma-ISO variables.
Currently, supports only vector variables.
Log pdf of the prior:
.. math::
\log p(\mathbf{x}, \tau | \boldsymbol{\mu}, \mathbf{\Lambda}, a, b) =&
- \frac{1}{2} \tau \mathbf{x}^T \mathbf{\Lambda} \mathbf{x}
+ \frac{1}{2} \tau \mathbf{x}^T \mathbf{\Lambda} \boldsymbol{\mu}
+ \frac{1}{2} \tau \boldsymbol{\mu}^T \mathbf{\Lambda} \mathbf{x}
- \frac{1}{2} \tau \boldsymbol{\mu}^T \mathbf{\Lambda} \boldsymbol{\mu}
+ \frac{1}{2} \log|\mathbf{\Lambda}|
+ \frac{D}{2} \log\tau
- \frac{D}{2} \log(2\pi)
\\ &
- b \tau
+ a \log\tau
- \log\tau
+ a \log b
- \log \Gamma(a)
Log pdf of the posterior approximation:
.. math::
\log q(\mathbf{x}, \tau) =&
\tau \mathbf{x}^T \boldsymbol{\phi}_1
+ \tau \mathbf{x}^T \mathbf{\Phi}_2 \mathbf{x}
+ \tau \phi_3
+ \log\tau \phi_4
+ g(\boldsymbol{\phi}_1, \mathbf{\Phi}_2, \phi_3, \phi_4)
+ f(x, \tau)
"""
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
super().__init__()
def compute_message_to_parent(self, parent, index, u, u_mu_Lambda, u_a, u_b):
r"""
Compute the message to a parent node.
- Parent :math:`(\boldsymbol{\mu}, \mathbf{\Lambda})`
Moments:
.. math::
\begin{bmatrix}
\mathbf{\Lambda}\boldsymbol{\mu}
\\
\boldsymbol{\mu}^T\mathbf{\Lambda}\boldsymbol{\mu}
\\
\mathbf{\Lambda}
\\
\log|\mathbf{\Lambda}|
\end{bmatrix}
Message:
.. math::
\begin{bmatrix}
\langle \tau \mathbf{x} \rangle
\\
- \frac{1}{2} \langle \tau \rangle
\\
- \frac{1}{2} \langle \tau \mathbf{xx}^T \rangle
\\
\frac{1}{2}
\end{bmatrix}
- Parent :math:`a`:
Moments:
.. math::
\begin{bmatrix}
a
\\
\log \Gamma(a)
\end{bmatrix}
Message:
.. math::
\begin{bmatrix}
\langle \log\tau \rangle + \langle \log b \rangle
\\
-1
\end{bmatrix}
- Parent :math:`b`:
Moments:
.. math::
\begin{bmatrix}
b
\\
\log b
\end{bmatrix}
Message:
.. math::
\begin{bmatrix}
- \langle \tau \rangle
\\
\langle a \rangle
\end{bmatrix}
"""
x_tau = u[0]
xx_tau = u[1]
tau = u[2]
logtau = u[3]
if index == 0:
m0 = x_tau
m1 = -0.5 * tau
m2 = -0.5 * xx_tau
m3 = 0.5
return [m0, m1, m2, m3]
elif index == 1:
logb = u_b[1]
m0 = logtau + logb
m1 = -1
return [m0, m1]
elif index == 2:
a = u_a[0]
m0 = -tau
m1 = a
return [m0, m1]
else:
raise ValueError("Index out of bounds")
def compute_phi_from_parents(self, u_mu_Lambda, u_a, u_b, mask=True):
r"""
Compute the natural parameter vector given parent moments.
"""
Lambda_mu = u_mu_Lambda[0]
mu_Lambda_mu = u_mu_Lambda[1]
Lambda = u_mu_Lambda[2]
a = u_a[0]
b = u_b[0]
phi = [Lambda_mu,
-0.5*Lambda,
-0.5*mu_Lambda_mu - b,
a]
return phi
def compute_moments_and_cgf(self, phi, mask=True):
r"""
Compute the moments and :math:`g(\phi)`.
"""
# Compute helpful variables
V = -2*phi[1]
L_V = linalg.chol(V, ndim=self.ndim)
logdet_V = linalg.chol_logdet(L_V, ndim=self.ndim)
mu = linalg.chol_solve(L_V, phi[0], ndim=self.ndim)
Cov = linalg.chol_inv(L_V, ndim=self.ndim)
a = phi[3]
b = -phi[2] - 0.5 * linalg.inner(mu, phi[0], ndim=self.ndim)
log_b = np.log(b)
# Compute moments
u2 = a / b
u3 = -log_b + special.psi(a)
u0 = mu * misc.add_trailing_axes(u2, self.ndim)
u1 = Cov + (
linalg.outer(mu, mu, ndim=self.ndim)
* misc.add_trailing_axes(u2, 2 * self.ndim)
)
u = [u0, u1, u2, u3]
# Compute g
g = 0.5*logdet_V + a*log_b - special.gammaln(a)
return (u, g)
def compute_cgf_from_parents(self, u_mu_Lambda, u_a, u_b):
r"""
Compute :math:`\mathrm{E}_{q(p)}[g(p)]`
"""
logdet_Lambda = u_mu_Lambda[3]
a = u_a[0]
gammaln_a = u_a[1]
log_b = u_b[1]
g = 0.5*logdet_Lambda + a*log_b - gammaln_a
return g
def compute_fixed_moments_and_f(self, x_alpha, mask=True):
r"""
Compute the moments and :math:`f(x)` for a fixed value.
"""
(x, alpha) = x_alpha
logalpha = np.log(alpha)
u0 = x * misc.add_trailing_axes(alpha, self.ndim)
u1 = linalg.outer(x, x, ndim=self.ndim) * misc.add_trailing_axes(alpha, 2*self.ndim)
u2 = alpha
u3 = logalpha
u = [u0, u1, u2, u3]
if self.ndim > 0:
D = np.prod(np.shape(x)[-self.ndim:])
else:
D = 1
f = (D/2 - 1) * logalpha - D/2 * np.log(2*np.pi)
return (u, f)
def random(self, *phi, plates=None):
r"""
Draw a random sample from the distribution.
"""
# TODO/FIXME: This is incorrect, I think. Gamma distribution parameters
# aren't directly those, because phi has some parts from the Gaussian
# distribution.
alpha = GammaDistribution().random(
phi[2],
phi[3],
plates=plates
)
mu = GaussianARDDistribution(self.shape).random(
misc.add_trailing_axes(alpha, self.ndim) * phi[0],
misc.add_trailing_axes(alpha, 2*self.ndim) * phi[1],
plates=plates
)
return (mu, alpha)
class GaussianWishartDistribution(ExponentialFamilyDistribution):
r"""
Class for the VMP formulas of Gaussian-Wishart variables.
Currently, supports only vector variables.
.. math::
\log p(\mathbf{x}, \mathbf{\Lambda} | \boldsymbol{\mu},
\alpha, n, \mathbf{V})
=&
- \frac{1}{2} \alpha \mathbf{x}^T \mathbf{\Lambda} \mathbf{x}
+ \frac{1}{2} \alpha \mathbf{x}^T \mathbf{\Lambda} \boldsymbol{\mu}
+ \frac{1}{2} \alpha \boldsymbol{\mu}^T \mathbf{\Lambda} \mathbf{x}
- \frac{1}{2} \alpha \boldsymbol{\mu}^T \mathbf{\Lambda} \boldsymbol{\mu}
+ \frac{1}{2} \log|\mathbf{\Lambda}|
+ \frac{D}{2} \log\alpha
- \frac{D}{2} \log(2\pi)
\\ &
- \frac{1}{2} \mathrm{tr}(\mathbf{V}\mathbf{\Lambda})
+ \frac{n-d-1}{2} \log|\mathbf{\Lambda}|
- \frac{nd}{2}\log 2
- \frac{n}{2} \log|\mathbf{V}|
- \log\Gamma_d(\frac{n}{2})
Posterior approximation:
.. math::
\log q(\mathbf{x}, \mathbf{\Lambda})
=&
\mathbf{x}^T \mathbf{\Lambda} \boldsymbol{\phi}_1
+ \phi_2 \mathbf{x}^T \mathbf{\Lambda} \mathbf{x}
+ \mathrm{tr}(\mathbf{\Lambda} \mathbf{\Phi}_3)
+ \phi_4 \log|\mathbf{\Lambda}|
+ g(\boldsymbol{\phi}_1, \phi_2, \mathbf{\Phi}_3, \phi_4)
+ f(\mathbf{x}, \mathbf{\Lambda})
"""
def compute_message_to_parent(self, parent, index, u, u_mu_alpha, u_n, u_V):
r"""
Compute the message to a parent node.
For parent :math:`q(\boldsymbol{\mu}, \alpha)`:
.. math::
\alpha \boldsymbol{\mu}^T \mathbf{m}_1
\Rightarrow &
\mathbf{m}_1 = \langle \mathbf{\Lambda x} \rangle
\\
\alpha \boldsymbol{\mu}^T \mathbf{M}_2 \boldsymbol{\mu}
\Rightarrow &
\mathbf{M}_2 = - \frac{1}{2} \langle \mathbf{\Lambda} \rangle
\\
\alpha m_3
\Rightarrow &
m_3 = - \frac{1}{2} \langle \mathbf{x}^T \mathbf{\Lambda} \mathbf{x} \rangle
\\
m_4 \log \alpha
\Rightarrow &
m_4 = \frac{d}{2}
For parent :math:`q(\mathbf{V})`:
.. math::
\mathbf{M}_1 &= \frac{\partial \langle \log p \rangle}{\partial
\langle \mathbf{V} \rangle} = -\frac{1}{2} \langle \mathbf{\Lambda} \rangle
\\
\mathbf{M}_2 &= \frac{\partial \langle \log p \rangle}{\partial \langle \log|\mathbf{V}| \rangle}
= ...
"""
if index == 0:
m0
m1
m2
m3
raise NotImplementedError()
elif index == 1:
raise NotImplementedError()
elif index == 2:
raise NotImplementedError()
else:
raise ValueError("Index out of bounds")
def compute_phi_from_parents(self, u_mu_alpha, u_n, u_V, mask=True):
r"""
Compute the natural parameter vector given parent moments.
"""
alpha_mu = u_mu_alpha[0]
alpha_mumu = u_mu_alpha[1]
alpha = u_mu_alpha[2]
V = u_V[0]
n = u_n[0]
phi0 = alpha_mu
phi1 = -0.5 * alpha
phi2 = -0.5 * (V + alpha_mumu)
phi3 = 0.5 * n
return [phi0, phi1, phi2, phi3]
def compute_moments_and_cgf(self, phi, mask=True):
r"""
Compute the moments and :math:`g(\phi)`.
"""
# TODO/FIXME: This isn't probably correct. Phi[2:] has terms that are
# related to the Gaussian also, not only Wishart.
u_Lambda = WishartDistribution((D,)).compute_moments_and_cgf(phi[2:])
raise NotImplementedError()
return (u, g)
def compute_cgf_from_parents(self, u_mu_alpha, u_n, u_V):
r"""
Compute :math:`\mathrm{E}_{q(p)}[g(p)]`
"""
raise NotImplementedError()
return g
def compute_fixed_moments_and_f(self, x, Lambda, mask=True):
r"""
Compute the moments and :math:`f(x)` for a fixed value.
"""
raise NotImplementedError()
return (u, f)
def random(self, *params, plates=None):
r"""
Draw a random sample from the distribution.
"""
raise NotImplementedError()
#
# NODES
#
class _GaussianTemplate(ExponentialFamily):
def translate(self, b, debug=False):
"""
Transforms the current posterior by adding a bias to the mean
Parameters
----------
b : array
Constant to add
"""
ndim = len(self.dims[0])
if ndim > 0 and np.shape(b)[-ndim:] != self.dims[0]:
raise ValueError("Bias has incorrect shape")
x = self.u[0]
xb = linalg.outer(x, b, ndim=ndim)
bx = linalg.transpose(xb, ndim=ndim)
bb = linalg.outer(b, b, ndim=ndim)
uh = [
self.u[0] + b,
self.u[1] + xb + bx + bb
]
Lambda = -2 * self.phi[1]
Lambda_b = linalg.mvdot(Lambda, b, ndim=ndim)
dg = -0.5 * (
linalg.inner(b, Lambda_b, ndim=ndim)
+ 2 * linalg.inner(x, Lambda_b, ndim=ndim)
)
phih = [
self.phi[0] + Lambda_b,
self.phi[1]
]
self._check_shape(uh)
self._check_shape(phih)
self.u = uh
self.phi = phih
self.g = self.g + dg
# TODO: This is all just debugging stuff and can be removed
if debug:
uh = [ui.copy() for ui in uh]
gh = self.g.copy()
self._update_moments_and_cgf()
if any(not np.allclose(uih, ui, atol=1e-6) for (uih, ui) in zip(uh, self.u)):
raise RuntimeError("BUG")
if not np.allclose(self.g, gh, atol=1e-6):
raise RuntimeError("BUG")
return
class Gaussian(_GaussianTemplate):
r"""
Node for Gaussian variables.
The node represents a :math:`D`-dimensional vector from the Gaussian
distribution:
.. math::
\mathbf{x} &\sim \mathcal{N}(\boldsymbol{\mu}, \mathbf{\Lambda}),
where :math:`\boldsymbol{\mu}` is the mean vector and
:math:`\mathbf{\Lambda}` is the precision matrix (i.e., inverse of the
covariance matrix).
.. math::
\mathbf{x},\boldsymbol{\mu} \in \mathbb{R}^{D},
\quad \mathbf{\Lambda} \in \mathbb{R}^{D \times D},
\quad \mathbf{\Lambda} \text{ symmetric positive definite}
Parameters
----------
mu : Gaussian-like node or GaussianGamma-like node or GaussianWishart-like node or array
Mean vector
Lambda : Wishart-like node or array
Precision matrix
See also
--------
Wishart, GaussianARD, GaussianWishart, GaussianGamma
"""
def __init__(self, mu, Lambda, **kwargs):
r"""
Create Gaussian node
"""
super().__init__(mu, Lambda, **kwargs)
@classmethod
def _constructor(cls, mu, Lambda, ndim=1, **kwargs):
r"""
Constructs distribution and moments objects.
"""
mu_Lambda = WrapToGaussianWishart(mu, Lambda, ndim=ndim)
shape = mu_Lambda._moments.shape
moments = GaussianMoments(shape)
parent_moments = (mu_Lambda._moments,)
if mu_Lambda.dims != ( shape, (), shape+shape, () ):
raise Exception("Parents have wrong dimensionality")
distribution = GaussianDistribution(shape)
parents = [mu_Lambda]
return (parents,
kwargs,
moments.dims,
cls._total_plates(kwargs.get('plates'),
distribution.plates_from_parent(0, mu_Lambda.plates)),
distribution,
moments,
parent_moments)
def initialize_from_parameters(self, mu, Lambda):
u = self._parent_moments[0].compute_fixed_moments(mu, Lambda)
self._initialize_from_parent_moments(u)
def __str__(self):
ndim = len(self.dims[0])
mu = self.u[0]
Cov = self.u[1] - linalg.outer(mu, mu, ndim=ndim)
return ("%s ~ Gaussian(mu, Cov)\n"
" mu = \n"
"%s\n"
" Cov = \n"
"%s\n"
% (self.name, mu, Cov))
def rotate(self, R, inv=None, logdet=None, Q=None):
# TODO/FIXME: Combine and refactor all these rotation transformations
# into _GaussianTemplate
if self._moments.ndim != 1:
raise NotImplementedError("Not implemented for ndim!=1 yet")
if inv is not None:
invR = inv
else:
invR = np.linalg.inv(R)
if logdet is not None:
logdetR = logdet
else:
logdetR = np.linalg.slogdet(R)[1]
# It would be more efficient and simpler, if you just rotated the
# moments and didn't touch phi. However, then you would need to call
# update() before lower_bound_contribution. This is more error-safe.
# Rotate plates, if plate rotation matrix is given. Assume that there's
# only one plate-axis
if Q is not None:
# Rotate moments using Q
self.u[0] = np.einsum('ik,kj->ij', Q, self.u[0])
sumQ = np.sum(Q, axis=0)
# Rotate natural parameters using Q
self.phi[1] = np.einsum('d,dij->dij', sumQ**(-2), self.phi[1])
self.phi[0] = np.einsum('dij,dj->di', -2*self.phi[1], self.u[0])
# Transform parameters using R
self.phi[0] = mvdot(invR.T, self.phi[0])
self.phi[1] = dot(invR.T, self.phi[1], invR)
if Q is not None:
self._update_moments_and_cgf()
else:
# Transform moments and g using R
self.u[0] = mvdot(R, self.u[0])
self.u[1] = dot(R, self.u[1], R.T)
self.g -= logdetR
def rotate_matrix(self, R1, R2, inv1=None, logdet1=None, inv2=None, logdet2=None, Q=None):
r"""
The vector is reshaped into a matrix by stacking the row vectors.
Computes R1*X*R2', which is identical to kron(R1,R2)*x (??)
Note that this is slightly different from the standard Kronecker product
definition because Numpy stacks row vectors instead of column vectors.
Parameters
----------
R1 : ndarray
A matrix from the left
R2 : ndarray
A matrix from the right
"""
if self._moments.ndim != 1:
raise NotImplementedError("Not implemented for ndim!=1 yet")
if Q is not None:
# Rotate moments using Q
self.u[0] = np.einsum('ik,kj->ij', Q, self.u[0])
sumQ = np.sum(Q, axis=0)
# Rotate natural parameters using Q
self.phi[1] = np.einsum('d,dij->dij', sumQ**(-2), self.phi[1])
self.phi[0] = np.einsum('dij,dj->di', -2*self.phi[1], self.u[0])
if inv1 is None:
inv1 = np.linalg.inv(R1)
if logdet1 is None:
logdet1 = np.linalg.slogdet(R1)[1]
if inv2 is None:
inv2 = np.linalg.inv(R2)
if logdet2 is None:
logdet2 = np.linalg.slogdet(R2)[1]
D1 = np.shape(R1)[0]
D2 = np.shape(R2)[0]
# Reshape into matrices
sh0 = np.shape(self.phi[0])[:-1] + (D1,D2)
sh1 = np.shape(self.phi[1])[:-2] + (D1,D2,D1,D2)
phi0 = np.reshape(self.phi[0], sh0)
phi1 = np.reshape(self.phi[1], sh1)
# Apply rotations to phi
#phi0 = dot(inv1, phi0, inv2.T)
phi0 = dot(inv1.T, phi0, inv2)
phi1 = np.einsum('...ia,...abcd->...ibcd', inv1.T, phi1)
phi1 = np.einsum('...ic,...abcd->...abid', inv1.T, phi1)
phi1 = np.einsum('...ib,...abcd->...aicd', inv2.T, phi1)
phi1 = np.einsum('...id,...abcd->...abci', inv2.T, phi1)
# Reshape back into vectors
self.phi[0] = np.reshape(phi0, self.phi[0].shape)
self.phi[1] = np.reshape(phi1, self.phi[1].shape)
# It'd be better to rotate the moments too..
self._update_moments_and_cgf()
class GaussianARD(_GaussianTemplate):
r"""
Node for Gaussian variables with ARD prior.
The node represents a :math:`D`-dimensional vector from the Gaussian
distribution:
.. math::
\mathbf{x} &\sim \mathcal{N}(\boldsymbol{\mu}, \mathrm{diag}(\boldsymbol{\alpha})),
where :math:`\boldsymbol{\mu}` is the mean vector and
:math:`\mathrm{diag}(\boldsymbol{\alpha})` is the diagonal precision matrix
(i.e., inverse of the covariance matrix).
.. math::
\mathbf{x},\boldsymbol{\mu} \in \mathbb{R}^{D}, \quad \alpha_d > 0 \text{
for } d=0,\ldots,D-1
*Note:* The form of the posterior approximation is a Gaussian distribution with full
covariance matrix instead of a diagonal matrix.
Parameters
----------
mu : Gaussian-like node or GaussianGamma-like node or array Mean vector
alpha : gamma-like node or array
Diagonal elements of the precision matrix
See also
--------
Gamma, Gaussian, GaussianGamma, GaussianWishart
"""
def __init__(self, mu, alpha, ndim=None, shape=None, **kwargs):
r"""
Create GaussianARD node.
"""
super().__init__(mu, alpha, ndim=ndim, shape=shape, **kwargs)
@classmethod
def _constructor(cls, mu, alpha, ndim=None, shape=None, **kwargs):
r"""
Constructs distribution and moments objects.
If __init__ uses useconstructor decorator, this method is called to
construct distribution and moments objects.
The method is given the same inputs as __init__. For some nodes, some of
these can't be "static" class attributes, then the node class must
overwrite this method to construct the objects manually.
The point of distribution class is to move general distribution but
not-node specific code. The point of moments class is to define the
messaging protocols.
"""
mu_alpha = WrapToGaussianGamma(mu, alpha, ndim=0)
if ndim is None:
if shape is not None:
ndim = len(shape)
else:
shape = ()
ndim = 0
else:
if shape is not None:
if ndim != len(shape):
raise ValueError("Given shape and ndim inconsistent")
else:
if ndim == 0:
shape = ()
else:
if ndim > len(mu_alpha.plates):
raise ValueError(
"Cannot determine shape for ndim={0} because parent "
"full shape has ndim={1}."
.format(ndim, len(mu_alpha.plates))
)
shape = mu_alpha.plates[-ndim:]
moments = GaussianMoments(shape)
parent_moments = [GaussianGammaMoments(())]
distribution = GaussianARDDistribution(shape)
plates = cls._total_plates(kwargs.get('plates'),
distribution.plates_from_parent(0, mu_alpha.plates))
parents = [mu_alpha]
return (parents,
kwargs,
moments.dims,
plates,
distribution,
moments,
parent_moments)
def initialize_from_parameters(self, mu, alpha):
# Explicit broadcasting so the shapes match
mu = mu * np.ones(np.shape(alpha))
alpha = alpha * np.ones(np.shape(mu))
# Compute parent moments
u = self._parent_moments[0].compute_fixed_moments([mu, alpha])
# Initialize distribution
self._initialize_from_parent_moments(u)
def initialize_from_mean_and_covariance(self, mu, Cov):
ndim = len(self._distribution.shape)
u = [mu, Cov + linalg.outer(mu, mu, ndim=ndim)]
mask = np.logical_not(self.observed)
# TODO: You could compute the CGF but it requires Cholesky of
# Cov. Do it later.
self._set_moments_and_cgf(u, np.nan, mask=mask)
return
def __str__(self):
mu = self.u[0]
Cov = self.u[1] - linalg.outer(mu, mu)
return ("%s ~ Gaussian(mu, Cov)\n"
" mu = \n"
"%s\n"
" Cov = \n"
"%s\n"
% (self.name, mu, Cov))
def rotate(self, R, inv=None, logdet=None, axis=-1, Q=None, subset=None, debug=False):
if Q is not None:
raise NotImplementedError()
if subset is not None:
raise NotImplementedError()
# TODO/FIXME: Combine and refactor all these rotation transformations
# into _GaussianTemplate
ndim = len(self._distribution.shape)
if inv is not None:
invR = inv
else:
invR = np.linalg.inv(R)
if logdet is not None:
logdetR = logdet
else:
logdetR = np.linalg.slogdet(R)[1]
self.phi[0] = rotate_mean(self.phi[0], invR.T,
axis=axis,
ndim=ndim)
self.phi[1] = rotate_covariance(self.phi[1], invR.T,
axis=axis,
ndim=ndim)
self.u[0] = rotate_mean(self.u[0], R,
axis=axis,
ndim=ndim)
self.u[1] = rotate_covariance(self.u[1], R,
axis=axis,
ndim=ndim)
s = list(self.dims[0])
s.pop(axis)
self.g -= logdetR * np.prod(s)
# TODO: This is all just debugging stuff and can be removed
if debug:
uh = [ui.copy() for ui in self.u]
gh = self.g.copy()
self._update_moments_and_cgf()
if any(not np.allclose(uih, ui, atol=1e-6) for (uih, ui) in zip(uh, self.u)):
raise RuntimeError("BUG")
if not np.allclose(self.g, gh, atol=1e-6):
raise RuntimeError("BUG")
return
def rotate_plates(self, Q, plate_axis=-1):
r"""
Approximate rotation of a plate axis.
Mean is rotated exactly but covariance/precision matrix is rotated
approximately.
"""
ndim = len(self._distribution.shape)
# Rotate moments using Q
if not isinstance(plate_axis, int):
raise ValueError("Plate axis must be integer")
if plate_axis >= 0:
plate_axis -= len(self.plates)
if plate_axis < -len(self.plates) or plate_axis >= 0:
raise ValueError("Axis out of bounds")
u0 = rotate_mean(self.u[0], Q,
ndim=ndim+(-plate_axis),
axis=0)
sumQ = misc.add_trailing_axes(np.sum(Q, axis=0),
2*ndim-plate_axis-1)
phi1 = sumQ**(-2) * self.phi[1]
phi0 = -2 * matrix_dot_vector(phi1, u0, ndim=ndim)
self.phi[0] = phi0
self.phi[1] = phi1
self._update_moments_and_cgf()
return
class GaussianGamma(ExponentialFamily):
r"""
Node for Gaussian-gamma (isotropic) random variables.
The prior:
.. math::
p(x, \alpha| \mu, \Lambda, a, b)
p(x|\alpha, \mu, \Lambda) = \mathcal{N}(x | \mu, \alpha Lambda)
p(\alpha|a, b) = \mathcal{G}(\alpha | a, b)
The posterior approximation :math:`q(x, \alpha)` has the same Gaussian-gamma
form.
Currently, supports only vector variables.
"""
@classmethod
def _constructor(cls, mu, Lambda, a, b, ndim=1, **kwargs):
r"""
Constructs distribution and moments objects.
This method is called if useconstructor decorator is used for __init__.
`mu` is the mean/location vector
`alpha` is the scale
`V` is the scale matrix
`n` is the degrees of freedom
"""
# Convert parent nodes
mu_Lambda = WrapToGaussianWishart(mu, Lambda, ndim=ndim)
a = cls._ensure_moments(a, GammaPriorMoments)
b = cls._ensure_moments(b, GammaMoments)
shape = mu_Lambda.dims[0]
distribution = GaussianGammaDistribution(shape)
moments = GaussianGammaMoments(shape)
parent_moments = (
mu_Lambda._moments,
a._moments,
b._moments,
)
# Check shapes
if mu_Lambda.dims != ( shape, (), 2*shape, () ):
raise ValueError("mu and Lambda have wrong shape")
if a.dims != ( (), () ):
raise ValueError("a has wrong shape")
if b.dims != ( (), () ):
raise ValueError("b has wrong shape")
# List of parent nodes
parents = [mu_Lambda, a, b]
return (parents,
kwargs,
moments.dims,
cls._total_plates(kwargs.get('plates'),
distribution.plates_from_parent(0, mu_Lambda.plates),
distribution.plates_from_parent(1, a.plates),
distribution.plates_from_parent(2, b.plates)),
distribution,
moments,
parent_moments)
def translate(self, b, debug=False):
if self._moments.ndim != 1:
raise NotImplementedError("Only ndim=1 supported at the moment")
tau = self.u[2]
x = self.u[0] / tau[...,None]
xb = linalg.outer(x, b, ndim=1)
bx = linalg.transpose(xb, ndim=1)
bb = linalg.outer(b, b, ndim=1)
uh = [
self.u[0] + tau[...,None] * b,
self.u[1] + tau[...,None,None] * (xb + bx + bb),
self.u[2],
self.u[3]
]
Lambda = -2 * self.phi[1]
dtau = -0.5 * (
np.einsum('...ij,...i,...j->...', Lambda, b, b)
+ 2 * np.einsum('...ij,...i,...j->...', Lambda, b, x)
)
phih = [
self.phi[0] + np.einsum('...ij,...j->...i', Lambda, b),
self.phi[1],
self.phi[2] + dtau,
self.phi[3]
]
self._check_shape(uh)
self._check_shape(phih)
self.phi = phih
self.u = uh
# TODO: This is all just debugging stuff and can be removed
if debug:
uh = [ui.copy() for ui in uh]
gh = self.g.copy()
self._update_moments_and_cgf()
if any(not np.allclose(uih, ui, atol=1e-6) for (uih, ui) in zip(uh, self.u)):
raise RuntimeError("BUG")
if not np.allclose(self.g, gh, atol=1e-6):
raise RuntimeError("BUG")
return
def rotate(self, R, inv=None, logdet=None, debug=False):
if self._moments.ndim != 1:
raise NotImplementedError("Only ndim=1 supported at the moment")
if inv is None:
inv = np.linalg.inv(R)
if logdet is None:
logdet = np.linalg.slogdet(R)[1]
uh = [
rotate_mean(self.u[0], R),
rotate_covariance(self.u[1], R),
self.u[2],
self.u[3]
]
phih = [
rotate_mean(self.phi[0], inv.T),
rotate_covariance(self.phi[1], inv.T),
self.phi[2],
self.phi[3]
]
self._check_shape(uh)
self._check_shape(phih)
self.phi = phih
self.u = uh
self.g = self.g - logdet
# TODO: This is all just debugging stuff and can be removed
if debug:
uh = [ui.copy() for ui in uh]
gh = self.g.copy()
self._update_moments_and_cgf()
if any(not np.allclose(uih, ui, atol=1e-6) for (uih, ui) in zip(uh, self.u)):
raise RuntimeError("BUG")
if not np.allclose(self.g, gh, atol=1e-6):
raise RuntimeError("BUG")
return
def plotmatrix(self):
r"""
Creates a matrix of marginal plots.
On diagonal, are marginal plots of each variable. Off-diagonal plot
(i,j) shows the joint marginal density of x_i and x_j.
"""
import bayespy.plot as bpplt
if self.ndim != 1:
raise NotImplementedError("Only ndim=1 supported at the moment")
if np.prod(self.plates) != 1:
raise ValueError("Currently, does not support plates in the node.")
if len(self.dims[0]) != 1:
raise ValueError("Currently, supports only vector variables")
# Dimensionality of the Gaussian
D = self.dims[0][0]
# Compute standard parameters
tau = self.u[2]
mu = self.u[0]
mu = mu / misc.add_trailing_axes(tau, 1)
Cov = self.u[1] - linalg.outer(self.u[0], mu, ndim=1)
Cov = Cov / misc.add_trailing_axes(tau, 2)
a = self.phi[3]
b = -self.phi[2] - 0.5*linalg.inner(self.phi[0], mu, ndim=1)
# Create subplots
(fig, axes) = bpplt.pyplot.subplots(D+1, D+1)
# Plot marginal Student t distributions
for i in range(D):
for j in range(i+1):
if i == j:
bpplt._pdf_t(*(random.gaussian_gamma_to_t(mu[i],
Cov[i,i],
a,
b,
ndim=0)),
axes=axes[i,i])
else:
S = Cov[np.ix_([i,j],[i,j])]
(m, S, nu) = random.gaussian_gamma_to_t(mu[[i,j]],
S,
a,
b)
bpplt._contour_t(m, S, nu, axes=axes[i,j])
bpplt._contour_t(m, S, nu, axes=axes[j,i], transpose=True)
# Plot Gaussian-gamma marginal distributions
for k in range(D):
bpplt._contour_gaussian_gamma(mu[k], Cov[k,k], a, b,
axes=axes[D,k])
bpplt._contour_gaussian_gamma(mu[k], Cov[k,k], a, b,
axes=axes[k,D],
transpose=True)
# Plot gamma marginal distribution
bpplt._pdf_gamma(a, b, axes=axes[D,D])
return axes
def get_gaussian_location(self):
r"""
Return the mean and variance of the distribution
"""
if self._moments.ndim != 1:
raise NotImplementedError("Only ndim=1 supported at the moment")
tau = self.u[2]
tau_mu = self.u[0]
return tau_mu / tau[...,None]
def get_gaussian_mean_and_variance(self):
r"""
Return the mean and variance of the distribution
"""
if self.ndim != 1:
raise NotImplementedError("Only ndim=1 supported at the moment")
a = self.phi[3]
nu = 2*a
if np.any(nu <= 1):
raise ValueError("Mean not defined for degrees of freedom <= 1")
if np.any(nu <= 2):
raise ValueError("Variance not defined if degrees of freedom <= 2")
tau = self.u[2]
tau_mu = self.u[0]
mu = tau_mu / misc.add_trailing_axes(tau, 1)
var = misc.get_diag(self.u[1], ndim=1) - tau_mu*mu
var = var / misc.add_trailing_axes(tau, 1)
var = nu / (nu-2) * var
return (mu, var)
def get_marginal_logpdf(self, gaussian=None, gamma=None):
r"""
Get the (marginal) log pdf of a subset of the variables
Parameters
----------
gaussian : list or None
Indices of the Gaussian variables to keep or None
gamma : bool or None
True if keep the gamma variable, otherwise False or None
Returns
-------
function
A function which computes log-pdf
"""
if self.ndim != 1:
raise NotImplementedError("Only ndim=1 supported at the moment")
if gaussian is None and not gamma:
raise ValueError("Must give some variables")
# Compute standard parameters
tau = self.u[2]
mu = self.u[0]
mu = mu / misc.add_trailing_axes(tau, 1)
Cov = np.linalg.inv(-2*self.phi[1])
if not np.allclose(Cov,
self.u[1] - linalg.outer(self.u[0], mu, ndim=1)):
raise Exception("WAAAT")
#Cov = Cov / misc.add_trailing_axes(tau, 2)
a = self.phi[3]
b = -self.phi[2] - 0.5*linalg.inner(self.phi[0], mu, ndim=1)
if not gamma:
# Student t distributions
inds = list(gaussian)
mu = mu[inds]
Cov = Cov[np.ix_(inds, inds)]
(mu, Cov, nu) = random.gaussian_gamma_to_t(mu,
Cov,
a,
b,
ndim=1)
L = linalg.chol(Cov)
logdet_Cov = linalg.chol_logdet(L)
D = len(inds)
def logpdf(x):
y = x - mu
v = linalg.chol_solve(L, y)
z2 = linalg.inner(y, v, ndim=1)
return random.t_logpdf(z2, logdet_Cov, nu, D)
return logpdf
elif gaussian is None:
# Gamma distribution
def logpdf(x):
logx = np.log(x)
return random.gamma_logpdf(b*x,
logx,
a*logx,
a*np.log(b),
special.gammaln(a))
return logpdf
else:
# Gaussian-gamma distribution
inds = list(gaussian)
mu = mu[inds]
Cov = Cov[np.ix_(inds, inds)]
D = len(inds)
L = linalg.chol(Cov)
logdet_Cov = linalg.chol_logdet(L)
def logpdf(x):
tau = x[...,-1]
logtau = np.log(tau)
x = x[...,:-1]
y = x - mu
v = linalg.chol_solve(L, y) * tau[...,None]
z2 = linalg.inner(y, v, ndim=1)
return (random.gaussian_logpdf(z2,
0,
0,
logdet_Cov + D*logtau,
D) +
random.gamma_logpdf(b*tau,
logtau,
a*logtau,
a*np.log(b),
special.gammaln(a)))
return logpdf
class GaussianWishart(ExponentialFamily):
r"""
Node for Gaussian-Wishart random variables.
The prior:
.. math::
p(x, \Lambda| \mu, \alpha, V, n)
p(x|\Lambda, \mu, \alpha) = \mathcal(N)(x | \mu, \alpha^{-1} Lambda^{-1})
p(\Lambda|V, n) = \mathcal(W)(\Lambda | n, V)
The posterior approximation :math:`q(x, \Lambda)` has the same Gaussian-Wishart form.
Currently, supports only vector variables.
"""
_distribution = GaussianWishartDistribution()
@classmethod
def _constructor(cls, mu, alpha, n, V, **kwargs):
r"""
Constructs distribution and moments objects.
This method is called if useconstructor decorator is used for __init__.
`mu` is the mean/location vector
`alpha` is the scale
`n` is the degrees of freedom
`V` is the scale matrix
"""
# Convert parent nodes
mu_alpha = WrapToGaussianGamma(mu, alpha, ndim=1)
D = mu_alpha.dims[0][0]
shape = mu_alpha._moments.shape
moments = GaussianWishartMoments(shape)
n = cls._ensure_moments(n, WishartPriorMoments, d=D)
V = cls._ensure_moments(V, WishartMoments, ndim=1)
parent_moments = (
mu_alpha._moments,
n._moments,
V._moments
)
# Check shapes
if mu_alpha.dims != ( (D,), (D,D), (), () ):
raise ValueError("mu and alpha have wrong shape")
if V.dims != ( (D,D), () ):
raise ValueError("Precision matrix has wrong shape")
if n.dims != ( (), () ):
raise ValueError("Degrees of freedom has wrong shape")
parents = [mu_alpha, n, V]
return (parents,
kwargs,
moments.dims,
cls._total_plates(kwargs.get('plates'),
cls._distribution.plates_from_parent(0, mu_alpha.plates),
cls._distribution.plates_from_parent(1, n.plates),
cls._distribution.plates_from_parent(2, V.plates)),
cls._distribution,
moments,
parent_moments)
#
# CONVERTERS
#
class GaussianToGaussianGamma(Deterministic):
r"""
Converter for Gaussian moments to Gaussian-gamma isotropic moments
Combines the Gaussian moments with gamma moments for a fixed value 1.
"""
def __init__(self, X, **kwargs):
r"""
"""
if not isinstance(X._moments, GaussianMoments):
raise ValueError("Wrong moments, should be Gaussian")
shape = X._moments.shape
self.ndim = X._moments.ndim
self._moments = GaussianGammaMoments(shape)
self._parent_moments = [GaussianMoments(shape)]
shape = X.dims[0]
dims = ( shape, 2*shape, (), () )
super().__init__(X, dims=dims, **kwargs)
def _compute_moments(self, u_X):
r"""
"""
x = u_X[0]
xx = u_X[1]
u = [x, xx, 1, 0]
return u
def _compute_message_to_parent(self, index, m_child, u_X):
r"""
"""
if index == 0:
m = m_child[:2]
return m
else:
raise ValueError("Invalid parent index")
def _compute_function(self, x):
return (x, 1)
GaussianMoments.add_converter(GaussianGammaMoments,
GaussianToGaussianGamma)
class GaussianGammaToGaussianWishart(Deterministic):
r"""
"""
def __init__(self, X_alpha, **kwargs):
raise NotImplementedError()
GaussianGammaMoments.add_converter(GaussianWishartMoments,
GaussianGammaToGaussianWishart)
#
# WRAPPERS
#
# These wrappers form a single node from two nodes for messaging purposes.
#
class WrapToGaussianGamma(Deterministic):
r"""
"""
def __init__(self, X, alpha, ndim=None, **kwargs):
r"""
"""
# In case X is a numerical array, convert it to Gaussian first
try:
X = self._ensure_moments(X, GaussianMoments, ndim=ndim)
except Moments.NoConverterError:
pass
try:
ndim = X._moments.ndim
except AttributeError as err:
raise TypeError("ndim needs to be given explicitly") from err
X = self._ensure_moments(X, GaussianGammaMoments, ndim=ndim)
if len(X.dims[0]) != ndim:
raise RuntimeError("Conversion failed ndim.")
shape = X.dims[0]
dims = ( shape, 2 * shape, (), () )
self.shape = shape
self.ndim = len(shape)
self._moments = GaussianGammaMoments(shape)
self._parent_moments = [
GaussianGammaMoments(shape),
GammaMoments()
]
super().__init__(X, alpha, dims=dims, **kwargs)
def _compute_moments(self, u_X, u_alpha):
r"""
"""
(tau_x, tau_xx, tau, logtau) = u_X
(alpha, logalpha) = u_alpha
u0 = tau_x * misc.add_trailing_axes(alpha, self.ndim)
u1 = tau_xx * misc.add_trailing_axes(alpha, 2 * self.ndim)
u2 = tau * alpha
u3 = logtau + logalpha
return [u0, u1, u2, u3]
def _compute_message_to_parent(self, index, m_child, u_X, u_alpha):
r"""
"""
if index == 0:
alpha = u_alpha[0]
m0 = m_child[0] * misc.add_trailing_axes(alpha, self.ndim)
m1 = m_child[1] * misc.add_trailing_axes(alpha, 2 * self.ndim)
m2 = m_child[2] * alpha
m3 = m_child[3]
return [m0, m1, m2, m3]
elif index == 1:
(tau_x, tau_xx, tau, logtau) = u_X
m0 = (
linalg.inner(m_child[0], tau_x, ndim=self.ndim)
+ linalg.inner(m_child[1], tau_xx, ndim=2*self.ndim)
+ m_child[2] * tau
)
m1 = m_child[3]
return [m0, m1]
else:
raise ValueError("Invalid parent index")
class WrapToGaussianWishart(Deterministic):
r"""
Wraps Gaussian and Wishart nodes into a Gaussian-Wishart node.
The following node combinations can be wrapped:
* Gaussian and Wishart
* Gaussian-gamma and Wishart
* Gaussian-Wishart and gamma
"""
def __init__(self, X, Lambda, ndim=1, **kwargs):
r"""
"""
# Just in case X is an array, convert it to a Gaussian node first.
try:
X = self._ensure_moments(X, GaussianMoments, ndim=ndim)
except Moments.NoConverterError:
pass
try:
# Try combo Gaussian-Gamma and Wishart
X = self._ensure_moments(X, GaussianGammaMoments, ndim=ndim)
except Moments.NoConverterError:
# Have to use Gaussian-Wishart and Gamma
X = self._ensure_moments(X, GaussianWishartMoments, ndim=ndim)
Lambda = self._ensure_moments(Lambda, GammaMoments, ndim=ndim)
shape = X.dims[0]
if Lambda.dims != ((), ()):
raise ValueError(
"Mean and precision have inconsistent shapes: {0} and {1}"
.format(
X.dims,
Lambda.dims
)
)
self.wishart = False
else:
# Gaussian-Gamma and Wishart
shape = X.dims[0]
Lambda = self._ensure_moments(Lambda, WishartMoments, ndim=ndim)
if Lambda.dims != (2 * shape, ()):
raise ValueError(
"Mean and precision have inconsistent shapes: {0} and {1}"
.format(
X.dims,
Lambda.dims
)
)
self.wishart = True
self.ndim = len(shape)
self._parent_moments = (
X._moments,
Lambda._moments,
)
self._moments = GaussianWishartMoments(shape)
super().__init__(X, Lambda, dims=self._moments.dims, **kwargs)
def _compute_moments(self, u_X_alpha, u_Lambda):
r"""
"""
if self.wishart:
alpha_x = u_X_alpha[0]
alpha_xx = u_X_alpha[1]
alpha = u_X_alpha[2]
log_alpha = u_X_alpha[3]
Lambda = u_Lambda[0]
logdet_Lambda = u_Lambda[1]
D = np.prod(self.dims[0])
u0 = linalg.mvdot(Lambda, alpha_x, ndim=self.ndim)
u1 = linalg.inner(Lambda, alpha_xx, ndim=2*self.ndim)
u2 = Lambda * misc.add_trailing_axes(alpha, 2*self.ndim)
u3 = logdet_Lambda + D * log_alpha
u = [u0, u1, u2, u3]
return u
else:
raise NotImplementedError()
def _compute_message_to_parent(self, index, m_child, u_X_alpha, u_Lambda):
r"""
...
Message from the child is :math:`[m_0, m_1, m_2, m_3]`:
.. math::
\alpha m_0^T \Lambda x + m_1 \alpha x^T \Lambda x
+ \mathrm{tr}(\alpha m_2 \Lambda) + m_3 (\log | \alpha \Lambda |)
In case of Gaussian-gamma and Wishart parents:
Message to the first parent (x, alpha):
.. math::
\tilde{m_0} &= \Lambda m_0
\\
\tilde{m_1} &= m_1 \Lambda
\\
\tilde{m_2} &= \mathrm{tr}(m_2 \Lambda)
\\
\tilde{m_3} &= m_3 \cdot D
Message to the second parent (Lambda):
.. math::
\tilde{m_0} &= \alpha (\frac{1}{2} m_0 x^T + \frac{1}{2} x m_0^T +
m_1 xx^T + m_2)
\\
\tilde{m_1} &= m_3
"""
if index == 0:
if self.wishart:
# Message to Gaussian-gamma (isotropic)
Lambda = u_Lambda[0]
D = np.prod(self.dims[0])
m0 = linalg.mvdot(Lambda, m_child[0], ndim=self.ndim)
m1 = Lambda * misc.add_trailing_axes(m_child[1], 2*self.ndim)
m2 = linalg.inner(Lambda, m_child[2], ndim=2*self.ndim)
m3 = D * m_child[3]
m = [m0, m1, m2, m3]
return m
else:
# Message to Gaussian-Wishart
raise NotImplementedError()
elif index == 1:
if self.wishart:
# Message to Wishart
alpha_x = u_X_alpha[0]
alpha_xx = u_X_alpha[1]
alpha = u_X_alpha[2]
m0 = (0.5*linalg.outer(alpha_x, m_child[0], ndim=self.ndim) +
0.5*linalg.outer(m_child[0], alpha_x, ndim=self.ndim) +
alpha_xx * misc.add_trailing_axes(m_child[1], 2*self.ndim) +
misc.add_trailing_axes(alpha, 2*self.ndim) * m_child[2])
m1 = m_child[3]
m = [m0, m1]
return m
else:
# Message to gamma (isotropic)
raise NotImplementedError()
else:
raise ValueError("Invalid parent index")
def reshape_gaussian_array(dims_from, dims_to, x0, x1):
r"""
Reshape the moments Gaussian array variable.
The plates remain unaffected.
"""
num_dims_from = len(dims_from)
num_dims_to = len(dims_to)
# Reshape the first moment / mean
num_plates_from = np.ndim(x0) - num_dims_from
plates_from = np.shape(x0)[:num_plates_from]
shape = (
plates_from
+ (1,)*(num_dims_to-num_dims_from) + dims_from
)
x0 = np.ones(dims_to) * np.reshape(x0, shape)
# Reshape the second moment / covariance / precision
num_plates_from = np.ndim(x1) - 2*num_dims_from
plates_from = np.shape(x1)[:num_plates_from]
shape = (
plates_from
+ (1,)*(num_dims_to-num_dims_from) + dims_from
+ (1,)*(num_dims_to-num_dims_from) + dims_from
)
x1 = np.ones(dims_to+dims_to) * np.reshape(x1, shape)
return (x0, x1)
def transpose_covariance(Cov, ndim=1):
r"""
Transpose the covariance array of Gaussian array variable.
That is, swap the last ndim axes with the ndim axes before them. This makes
transposing easy for array variables when the covariance is not a matrix but
a multidimensional array.
"""
axes_in = [Ellipsis] + list(range(2*ndim,0,-1))
axes_out = [Ellipsis] + list(range(ndim,0,-1)) + list(range(2*ndim,ndim,-1))
return np.einsum(Cov, axes_in, axes_out)
def left_rotate_covariance(Cov, R, axis=-1, ndim=1):
r"""
Rotate the covariance array of Gaussian array variable.
ndim is the number of axes for the Gaussian variable.
For vector variable, ndim=1 and covariance is a matrix.
"""
if not isinstance(axis, int):
raise ValueError("Axis must be an integer")
if axis < -ndim or axis >= ndim:
raise ValueError("Axis out of range")
# Force negative axis
if axis >= 0:
axis -= ndim
# Rotation from left
axes_R = [Ellipsis, ndim+abs(axis)+1, ndim+abs(axis)]
axes_Cov = [Ellipsis] + list(range(ndim+abs(axis),
0,
-1))
axes_out = [Ellipsis, ndim+abs(axis)+1] + list(range(ndim+abs(axis)-1,
0,
-1))
Cov = np.einsum(R, axes_R, Cov, axes_Cov, axes_out)
return Cov
def right_rotate_covariance(Cov, R, axis=-1, ndim=1):
r"""
Rotate the covariance array of Gaussian array variable.
ndim is the number of axes for the Gaussian variable.
For vector variable, ndim=1 and covariance is a matrix.
"""
if not isinstance(axis, int):
raise ValueError("Axis must be an integer")
if axis < -ndim or axis >= ndim:
raise ValueError("Axis out of range")
# Force negative axis
if axis >= 0:
axis -= ndim
# Rotation from right
axes_R = [Ellipsis, abs(axis)+1, abs(axis)]
axes_Cov = [Ellipsis] + list(range(abs(axis),
0,
-1))
axes_out = [Ellipsis, abs(axis)+1] + list(range(abs(axis)-1,
0,
-1))
Cov = np.einsum(R, axes_R, Cov, axes_Cov, axes_out)
return Cov
def rotate_covariance(Cov, R, axis=-1, ndim=1):
r"""
Rotate the covariance array of Gaussian array variable.
ndim is the number of axes for the Gaussian variable.
For vector variable, ndim=1 and covariance is a matrix.
"""
# Rotate from left and right
Cov = left_rotate_covariance(Cov, R, ndim=ndim, axis=axis)
Cov = right_rotate_covariance(Cov, R, ndim=ndim, axis=axis)
return Cov
def rotate_mean(mu, R, axis=-1, ndim=1):
r"""
Rotate the mean array of Gaussian array variable.
ndim is the number of axes for the Gaussian variable.
For vector variable, ndim=1 and mu is a vector.
"""
if not isinstance(axis, int):
raise ValueError("Axis must be an integer")
if axis < -ndim or axis >= ndim:
raise ValueError("Axis out of range")
# Force negative axis
if axis >= 0:
axis -= ndim
# Rotation from right
axes_R = [Ellipsis, abs(axis)+1, abs(axis)]
axes_mu = [Ellipsis] + list(range(abs(axis),
0,
-1))
axes_out = [Ellipsis, abs(axis)+1] + list(range(abs(axis)-1,
0,
-1))
mu = np.einsum(R, axes_R, mu, axes_mu, axes_out)
return mu
def array_to_vector(x, ndim=1):
if ndim == 0:
return x
shape_x = np.shape(x)
D = np.prod(shape_x[-ndim:])
return np.reshape(x, shape_x[:-ndim] + (D,))
def array_to_matrix(A, ndim=1):
if ndim == 0:
return A
shape_A = np.shape(A)
D = np.prod(shape_A[-ndim:])
return np.reshape(A, shape_A[:-2*ndim] + (D,D))
def vector_to_array(x, shape):
shape_x = np.shape(x)
return np.reshape(x, np.shape(x)[:-1] + tuple(shape))
def matrix_dot_vector(A, x, ndim=1):
if ndim < 0:
raise ValueError("ndim must be non-negative integer")
if ndim == 0:
return A*x
dims_x = np.shape(x)[-ndim:]
A = array_to_matrix(A, ndim=ndim)
x = array_to_vector(x, ndim=ndim)
y = np.einsum('...ik,...k->...i', A, x)
return vector_to_array(y, dims_x)
|
emanuelschuetze/OpenSlides | refs/heads/master | openslides/users/migrations/0009_auto_20190119_0941.py | 8 | # Generated by Django 2.1.5 on 2019-01-19 08:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("users", "0008_user_gender")]
operations = [
migrations.AlterModelOptions(
name="user",
options={
"default_permissions": (),
"ordering": ("last_name", "first_name", "username"),
"permissions": (
("can_see_name", "Can see names of users"),
(
"can_see_extra_data",
"Can see extra data of users (e.g. present and comment)",
),
("can_change_password", "Can change its own password"),
("can_manage", "Can manage users"),
),
},
)
]
|
Kaushikpatnaik/Active-Learning-and-Best-Response-Dynamics | refs/heads/master | passive_learners.py | 1 | from numpy import *
from numpy.linalg import svd
from scipy.stats import norm as normal
from scipy import linalg as lin
import time
import itertools
import random
from learners import *
from cvxopt import matrix, solvers, spdiag
solvers.options['show_progress'] = False
solvers.options['maxiters'] = 2000
class HingeLossSGD(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d, tau, tolerance = 10e-6):
LinearLearner.__init__(self, d, w = None)
self.tau = tau
self.tolerance = tolerance
gamma_0 = self.tau
self.rate = lambda t: gamma_0 * (1.0 + 0.1 * t)**(-1.0)
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a support vector machine with standard linear kernel.'''
# Infer the sample size from the data
m = len(Y)
def hinge_loss(w, X, Y):
total = 0.0
for i in xrange(m):
total += max(0.0, 1.0 - Y[i] * dot(w, X[i]) / self.tau)
return total / m
w_star = ones(self.d)
w_star /= lin.norm(w_star, 2)
#print
#print 'w_star:', hinge_loss(w_star, X, Y)
t = 0
delta = -1
index = range(m)
# Pick starting weight vector randomly
self.w = normal.rvs(size = self.d)
#self.w = ones(self.d)
self.w /= lin.norm(self.w, 2)
end_loss = hinge_loss(self.w, X, Y)
# Proceed until the change in loss is small
while delta > self.tolerance or delta < 0.0:
start_loss = end_loss
# Randomize the order
random.shuffle(index)
# Iterate through data once (a single epoch)
for i in xrange(m):
t += 1
# If the margin is violated, make perceptron-like update
if Y[index[i]] * dot(self.w, X[index[i]]) < self.tau:
self.w += self.rate(t) * Y[index[i]] * X[index[i]] / self.tau
# If norm constraint is violated, normalize w
norm_w = lin.norm(self.w, 2)
if norm_w > 1.0:
self.w /= norm_w
# Check the change in loss over the epoch
end_loss = hinge_loss(self.w, X, Y)
delta = start_loss - end_loss
#print end_loss, delta, self.rate(t)
class HingeLossSGD2(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d, tau, v, r, tolerance = 0.0001):
LinearLearner.__init__(self, d, w = None)
self.tau = tau
self.v = v
self.r = r
self.tolerance = tolerance
gamma_0 = self.tau
self.rate = lambda t: gamma_0 * (1.0 + 0.1 * t)**(-1.0)
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a support vector machine with standard linear kernel.'''
# Infer the sample size from the data
m = len(Y)
def hinge_loss(w, X, Y):
total = 0.0
for i in xrange(m):
total += max(0.0, 1.0 - Y[i] * dot(w, X[i]) / self.tau)
return total / m
w_star = ones(self.d)
w_star /= lin.norm(w_star, 2)
#print
#print 'w_star:', hinge_loss(w_star, X, Y)
t = 0
delta = -1
index = range(m)
# Pick starting weight vector randomly
self.w = normal.rvs(size = self.d)
#self.w = ones(self.d)
self.w /= lin.norm(self.w, 2)
end_loss = hinge_loss(self.w, X, Y)
# Proceed until the change in loss is small
while delta > self.tolerance or delta < 0.0:
start_loss = end_loss
# Randomize the order
random.shuffle(index)
# Iterate through data once (a single epoch)
for i in xrange(m):
t += 1
# If the margin is violated, make perceptron-like update
if Y[index[i]] * dot(self.w, X[index[i]]) < self.tau:
self.w += self.rate(t) * Y[index[i]] * X[index[i]] / self.tau
# If norm constraint is violated, normalize w
norm_w = lin.norm(self.w, 2)
if norm_w > 1.0:
self.w /= norm_w
# If other constraint is violated, project w
vw = self.w - self.v
norm_vw = lin.norm(vw, 2)
if norm_vw > self.r:
self.w = self.v + self.r * vw / norm_vw
# Check the change in loss over the epoch
end_loss = hinge_loss(self.w, X, Y)
delta = start_loss - end_loss
#print end_loss, delta, self.rate(t)
class SVM(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a support vector machine with standard linear kernel.'''
# Infer the sample size from the data
m = len(Y)
# Set up the appropriate matrices and call CVXOPT's quadratic programming
P = matrix(dot(X, X.T) * dot(Y, Y.T))
q = matrix(-ones(m))
G = matrix(-identity(m))
h = matrix(zeros(m))
alpha = solvers.qp(P, q, G, h)['x']
# Find the weight vector of the hyperplane from the Lagrange multipliers
self.w = dot(X.T, alpha * Y)
self.w = self.w.reshape((self.d,))
class soft_SVM(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d, C):
LinearLearner.__init__(self, d, w = None)
self.C = C
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a support vector machine with standard linear kernel.'''
# Infer the sample size from the data
m = len(Y)
# Set up the appropriate matrices and call CVXOPT's quadratic programming
P = matrix(dot(X, X.T) * dot(Y, Y.T))
q = matrix(-ones(m))
G = matrix(vstack((-identity(m), identity(m))))
h = matrix(hstack((zeros(m), self.C * ones(m))))
alpha = solvers.qp(P, q, G, h)['x']
# Find the weight vector of the hyperplane from the Lagrange multipliers
self.w = dot(X.T, alpha * Y)
self.w = self.w.reshape((self.d,))
class LinearProgram(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a linear program.'''
# Infer the dimension and sample size from the data
m = len(Y)
# Set up the appropriate matrices and call CVXOPT's linear programming
c = matrix(sign(normal.rvs(loc = 0, scale = 1.0, size = self.d)))
G = matrix(vstack([-Y * X, identity(self.d)]))
h = matrix(vstack([zeros((m, 1)), m**2*ones((self.d, 1))]))
self.w = solvers.lp(c, G, h)['x']
self.w = array(self.w).reshape((self.d,))
class Average(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
def batch_train(self, X, Y):
self.w = (Y * X).sum(axis = 0)
self.w /= lin.norm(self.w, 2)
class BandSelection(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d, num_iters):
self.num_iters = num_iters
self.bandparam = 0
self.radiusparam = 0
LinearLearner.__init__(self, d, w = None)
def param_calc(self, X, k, typeof):
sorteddistance = sorted(X)
#print sorteddistance
length = len(sorteddistance)
print "Range of distances in kernel space is"
print sorteddistance[0]
print sorteddistance[length-1]
ratio = 1
if typeof == "exp":
frac = pow(2, 1 - k) * ratio
elif typeof == "inv":
frac = pow(k, -1) * ratio
elif typeof == "lin":
frac = (1 - (k - 1) / (self.num_iters - 1.0)) * ratio
else:
raise ValueError
print
print frac
self.radiusparam = 2 * frac
print 'radius:', self.radiusparam
num_points = int(ceil(length * frac))
print 'points within band:', num_points
self.bandparam = sorteddistance[num_points - 1]
# print 'band:', self.bandparam
class PCA(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
self.var = None
LinearLearner.__init__(self, d, w = None)
def pca_run(self, X):
n = mean(X, axis=0)
X -= n
Cov = cov(X.T)
eigenval, eigenvec = lin.eig(Cov)
idx = argsort(eigenval)[::-1]
eigenvec = eigenvec[:,idx]
eigenval = eigenval[idx]
self.w = eigenvec[0].real
self.w = self.w.reshape((self.d,))
return eigenval[0].real, eigenvec[0].real
def variance_calc(self, X):
X -= mean(X)
row = len(X)
total = 0
for i in range(row):
total += pow(X[i],2)
self.var = total/row
def train(self, X, Y):
# Getting initial values from PCA
val, vec = self.pca_run(X,Y)
row,col = X.shape
comp = 10*row*log(row)/col
# Check value of first eigenvalue with (10*number of examples*log(number of examples)/dimensions), and iterating likewise
while val >= comp:
# Remove all values from X with greater than the eligible variance
for p in range(row):
print vec,X[p]
rem = pow(dot(vec,X[p]),2)
if rem >= (comp/row):
#c.remove(ids[p]) # Removal of outliers
X = delete(X, p)
Y = delete(Y, p)
# Recalculate the PCA with the new arrays of X and Y
val, vec = self.pca_run(X,Y)
row,col = X.shape
comp = 10*row*log(row)/col
# Calculate w if first eigenvalue is less than the eligible variance
self.w = 1.0/row * dot(X.T,Y)
self.w = self.w.reshape((self.d,))
self.var = val
print self.w, self.var
class soft_SVM_q():
def __init__(self, d, q, C):
self.d = d
self.q = q
self.C = C # Smaller C makes margin more important
# Larger C makes hinge loss more important
self.w = None
def batch_train(self, X, Y):
'''
Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a q-norm support vector machine with standard linear kernel.
'''
m = len(Y)
# First find a feasible solution and create the objective function
lp = soft_SVM(self.d, self.C)
lp.batch_train(X, Y)
s = 1.0 - dot(Y * X, lp.w)
s[s < 0.0] = 0.0
x_0 = hstack((lp.w, s))
F = make_soft_q_svm_primal_objective(self.d, m, self.q, self.C, x_0)
# Set up the appropriate matrices and call CVXOPT's convex programming
G_top = -hstack((Y * X, identity(m)))
G_bottom = -hstack((zeros((m, self.d)), identity(m)))
G_fix1 = hstack((identity(self.d), zeros((self.d, m))))
G_fix2 = -hstack((identity(self.d), zeros((self.d, m))))
G = matrix(vstack((G_top, G_bottom, G_fix1, G_fix2)))
h = matrix(hstack((-ones(m), zeros(m), 1e3 * ones(self.d), 1e3 * ones(self.d) )))
# Change solver options
solvers.options['maxiters'] = 100
solvers.options['abstol'] = 1e-3
solvers.options['reltol'] = 1e-2
result = solvers.cp(F, G, h)
# Reset solver options to defaults
solvers.options['maxiters'] = 2000
solvers.options['abstol'] = 1e-7
solvers.options['reltol'] = 1e-6
z = result['x']
self.w = array(z[:self.d]).reshape((self.d,))
def classify(self, x):
return sign(dot(self.w, x))
def margin(self, x):
return dot(self.w, x)
def make_soft_q_svm_primal_objective(n, m, q, C, x_0 = None):
if x_0 is None:
x_0 = r.normal(0, 0.1, n + m)
# Choose normalization constant so objective function values starts at 10.0
w_0 = x_0[:n]
s_0 = x_0[n:]
scale = 1.0 / (sum(abs(w_0)**q) / q + C * sum(s_0))
x_0 = matrix(x_0.reshape((n + m, 1)))
def F(x = None, z = None):
# Case 1
if x is None and z is None:
return (0, x_0)
# Case 2 and 3
else:
w = x[:n]
s = x[n:]
abs_w = abs(w)
f = scale * (sum(abs_w**q) / q + C * sum(s))
Df_w = sign(w) * abs_w**(q - 1.0)
Df_s = C * ones((m, 1))
Df = scale * vstack((Df_w, Df_s))
Df = matrix(Df.reshape((1, n + m)))
# Case 2 only
if z is None:
return (f, Df)
# Case 3 only
else:
try:
H_w = scale * z * (q - 1.0) * abs_w**(q - 2.0)
except (ValueError, RuntimeWarning):
#print 'abs_w:', abs_w
#print 'power:', (q - 2.0)
H_w = scale * z * (q - 1.0) * (abs_w + 1e-20)**(q - 2.0)
H_s = zeros((m, 1))
diag_H = matrix(vstack((H_w, H_s)))
H = spdiag(diag_H)
return (f, Df, H)
return F
class QP(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
def train(self, X, Y, radius, normfac, prevw):
solvers.options['show_progress'] = False
# Reduce maxiters and tolerance to reasonable levels
solvers.options['maxiters'] = 200
solvers.options['abstol'] = 1e-2
solvers.options['feastol'] = 1e-2
row, col = X.shape
n = row + self.d
prevw = prevw.reshape((self.d, 1))
x_0 = matrix(0.0, (n, 1))
x_0[:row] = 1.0 - Y * dot(X, prevw) / normfac
x_0[row:] = prevw
# x represents all the variables in an array, the first ones Ei and then each dimenstion of w, updated to 1/row
c = matrix(row*[1.0] + self.d*[0.0]) # the objective function represented as a sum of Ei
scale_factor = float(dot(c.T, x_0))
if scale_factor > 0.0:
c /= scale_factor
helper = matrix(array(row*[0.0] + self.d*[1.0]).reshape((n, 1)))
r2 = radius**2
def F(x = None, z = None):
if x is None:
return (2, x_0) # Ei starts from 1 and w starts from 1
w = x[row:]
diff = w - prevw
f = matrix(0.0, (2, 1))
f[0] = dot(diff.T, diff)[0] - r2 # the first non-linear constraint ||w-w[k-1]||^2 < r[k]^2
f[1] = dot(w.T, w)[0] - 1.0 # the second non-linear constraint ||w||^2 < 1
Df = matrix(0.0, (2, n)) # creating the Df martrix, one row for each non-linear equation with variables as columns
Df[0, row:] = 2.0 * diff.T # derivative of first non-linear equation, populates a sparse matrix
Df[1, row:] = 2.0 * w.T # derivative of second non-linear equation, populates a sparse matrix
if z is None:
return f, Df
diag_H = 2.0 * z[0] + 2.0 * z[1] * helper # Each nonlinear constraint has second derivative 2I w.r.t. w and 0 w.r.t. eps
H = spdiag(diag_H)
return f, Df, H
# for linear inequalities
G = matrix(0.0, (row*2, n)) # there are two linear constaints for Ei, and for each Ei the entire w
h = matrix(0.0, (row*2, 1))
for i in range(row):
G[i,i] = -1.0 # -Ei <= 0
G[row+i, i] = -1.0
h[row+i] = -1.0
for j in range(self.d):
G[row+i, row+j] = (-Y[i][0]/normfac)*X[i,j] # -Ei - yi/Tk(w.xi) <= -1
# solve and return w
sol = solvers.cpl(c, F, G, h)
self.w = sol['x'][row:]
self.w = array(self.w).reshape((self.d,))
#print
#print sol['status']
'''
print 'Radius wanted'
print radius
print 'Output of quadratic solver'
print self.w
print ' Norm of output of quadratic solver pre-normalization'
print sqrt(dot(self.w.T, self.w))
print ' Distance to the previous weight vector pre-normalization'
print sqrt(dot((self.w-prevw).T, (self.w-prevw)))
'''
self.w = self.w/sqrt(dot(self.w.T,self.w)) # Normalizing the vector output
'''
print 'Output of quadratic solver post -norm'
print self.w
print ' Norm of output of quadratic solver post-normalization'
print sqrt(dot(self.w.T, self.w))
print ' Distance to the previous weight vector post-normalization'
print sqrt(dot((self.w-prevw).T, (self.w-prevw)))
'''
class OutlierRemoval(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
self.weightdist = None
def train(self, X, band, radius, normfac, prevw, bound):
# Set max iterations to 5000
max_iterations = 2000
out_itercount = 1
row, col = X.shape
# Calculate the variance limit in the data
# sigma = pow(radius,2) + lin.norm(prevw,2)
# Set q(x) to 1 for start
q = ones(row)
# Objective function for q(x)
def objectiveq(q, sep, X):
return sum(q * pow(dot(sep, X.T), 2)) / row
# Constraint on q(x)
def constraintq(q, bound):
# Repeat the following until convergence
while True:
# Threshold at 0 and 1
q[q < 0.0] = 0.0
q[q > 1.0] = 1.0
# Check the total weight
if sum(q) >= (1.0 - bound) * row - 0.01:
break
# Scale up the weights, but only increase those less than 1
else:
q[q < 1.0] *= 1.0 / sum(q[q < 1.0]) * ((1.0 - bound) * row - sum(q[q == 1.0]))
return q
# Starting the outer gradient descent loop for q(x)
end_obj = inf
diff = 1
# print
# print end_obj
start_outer = time.time()
while (diff > pow(10,-4) or diff < 0) and out_itercount < max_iterations:
start_obj = end_obj
# Use SVD to maximize over w
linsep, new_obj = constrained_variance_maximization(X, q, prevw, radius)
# update q
outer_rate = 0.1
w_dot_x_2 = pow(dot(linsep, X.T), 2)
q -= outer_rate * w_dot_x_2 / lin.norm(w_dot_x_2, 2)
# check constraints
q = constraintq(q, bound)
#print "the distribution weights"
# print q
# print min(q)
end_obj = objectiveq(q, linsep , X)
# print end_obj
diff = start_obj - end_obj
#print 'Start Obj and End Obj w.r.t to q ' + str(start_obj) + " " + str(end_obj)
#print('\n')
out_itercount = out_itercount + 1
# print out_itercount
end_outer = time.time()
#print " Total time for outer loop run " + str(end_outer - start_outer)
#print 'Optimal q satisfying all conditions is '
#print q
self.weightdist = q
def constrained_variance_maximization(X, q, u, r):
# X is n x d
# q is n x 1
# u is d x 1
# r is scalar
# Returns (w, val) where w maximizes sum_{i=1}^n q[i] * dot(w, x[i])^2
# subject to ||w|| = 1 and ||w - u|| <= r,
# and where val is the value of that maximum.
n, d = X.shape
q = q.reshape((n, 1))
u = u.reshape((d, 1))
Xq = sqrt(q) * X
XqT_Xq = dot(Xq.T, Xq)
# First check if the first principle component satisfies the constraints
left, diagonal, right = svd(XqT_Xq)
w1 = right[0].reshape((d, 1))
val1 = diagonal[0]
if lin.norm(u - w1, 2) <= r or lin.norm(u + w1, 2) <= r:
return w1.reshape((d,)), val1
# Now project the data
Xq_proj = Xq - dot(Xq, u) * tile(u.T, (n, 1))
# Find the first principle component of the projected data
left, diagonal, right = svd(dot(Xq_proj.T, Xq_proj))
v = right[0].reshape((d, 1))
# This should be close to zero
# assert abs(dot(u.T, v)) <= 0.01
# Construct the vector and the value in the original space
c1 = (1.0 + dot(u.T, u) - r**2) / 2.0
c2 = sqrt(1.0 - c1**2)
w = c1 * u + c2 * v
val = dot(dot(w.T, XqT_Xq), w)[0, 0]
# Check the result
# print
# print dot(dot(u.T, XqT_Xq), u)[0, 0]
# print val
# print val1
# print lin.norm(w, 2)
# print lin.norm(u - w, 2), r
# assert dot(dot(u.T, XqT_Xq), u) <= val <= val1
# assert 0.99 <= lin.norm(w, 2) <= 1.01
# assert lin.norm(u - w, 2) <= r + 0.01
return w.reshape((d,)), val
'''
class QPwithoutBandConstraint(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
def train(self, X, Y, radius, normfac, prevw):
#Have commented out all the equations relating to the band constraint from this solver
solvers.options['show_progress'] = True
solvers.options['maxiters'] = 10000
row, col = X.shape
# x represents all the variables in an array, the first ones Ei and then each dimenstion of w, updated to 1/row
c = matrix(row*[1.0] + self.d*[0.0]) # the objective function represented as a sum of Ei
def F(x=None, z=None):
if x is None: return 1, matrix(row*[1.0] + self.d*[1.0]) # Ei starts from 1 and w starts from 1
f = matrix(0.0, (1,1))
#f[0] = sqrt(dot((x[row:].T-prevw),(x[row:].T-prevw).T))-radius # the first non-linear constraint ||w-w[k-1]||^2 < r[k]
f[0] = sqrt(dot(x[row:].T,x[row:])) -1 # the second non-linear constraint ||w||^2 <1
Df = matrix(0.0, (1,row+self.d)) # creating the Df martrix, one row for each non-linear equation with variables as columns
#Df[0,row:] = 2.0 * (x[row:].T-prevw[:]) # derivative of first non-linear equation, populates a sparse matrix
Df[0,row:] = 2.0 * x[row:].T # derivative of second non-linear equation, populates a sparse matrix
if z is None: return f, Df
secder = matrix(row*[0.0] + self.d*[2.0])
H = matrix(0.0, (row+self.d, row+self.d))
for i in range(self.d):
H[row+i,row+i] = z[0]*secder[row+i] # returns the second derivative, a sparse matrix
return f, Df, H
# for linear inequalities
G = matrix(0.0,(row*2, row+self.d)) # there are two linear constaints for Ei, and for each Ei the entire w
h = matrix(0.0, (row*2, 1))
for i in range(row):
G[i,i] = -1.0 # -Ei <= 0
G[row+i, i] = -1.0
h[row+i] = -1.0
for j in range(self.d):
G[row+i, row+j] = (-Y[i][0]/normfac)*X[i,j] # -Ei - yi/Tk(w.xi) <= -1
# solve and return w
sol = solvers.cpl(c, F, G, h)
self.w = sol['x'][row:]
self.w = array(self.w).reshape((self.d,))
print sol
print 'Radius wanted'
print radius
print 'Output of quadratic solver'
print self.w
print ' Norm of output of quadratic solver pre-normalization'
print sqrt(dot(self.w.T, self.w))
print ' Distance to the previous weight vector pre-normalization'
print sqrt(dot((self.w-prevw).T, (self.w-prevw)))
self.w = self.w/sqrt(dot(self.w.T,self.w)) # Normalizing the vector output
print 'Output of quadratic solver post -norm'
print self.w
print ' Norm of output of quadratic solver post-normalization'
print sqrt(dot(self.w.T, self.w))
print ' Distance to the previous weight vector post-normalization'
print sqrt(dot((self.w-prevw).T, (self.w-prevw)))
class QPwithoutNormConstraint(LinearLearner, PassiveSupervisedLearner):
def __init__(self, d):
LinearLearner.__init__(self, d, w = None)
def train(self, X, Y, radius, normfac, prevw):
#Have commented out all the equations relating to the norm constraint on W from this solver
#solvers.options['show_progress'] = True
#solvers.options['maxiters'] = 10000
row, col = X.shape
# x represents all the variables in an array, the first ones Ei and then each dimenstion of w, updated to 1/row
c = matrix(row*[1.0] + self.d*[0.0]) # the objective function represented as a sum of Ei
def F(x=None, z=None):
if x is None: return 1, matrix(row*[1.0] + self.d*[1.0]) # Ei starts from 1 and w starts from 1
f = matrix(0.0, (1,1))
f[0] = sqrt(dot((x[row:].T-prevw),(x[row:].T-prevw).T))-radius # the first non-linear constraint ||w-w[k-1]||^2 < r[k]
#f[0] = sqrt(dot(x[row:].T,x[row:])) -1 # the second non-linear constraint ||w||^2 <1
Df = matrix(0.0, (1,row+self.d)) # creating the Df martrix, one row for each non-linear equation with variables as columns
Df[0,row:] = 2.0 * (x[row:].T-prevw[:]) # derivative of first non-linear equation, populates a sparse matrix
#Df[0,row:] = 2.0 * x[row:].T # derivative of second non-linear equation, populates a sparse matrix
if z is None: return f, Df
secder = matrix(row*[0.0] + self.d*[2.0])
H = matrix(0.0, (row+self.d, row+self.d))
for i in range(self.d):
H[row+i,row+i] = z[0]*secder[row+i] # returns the second derivative, a sparse matrix
return f, Df, H
# for linear inequalities
G = matrix(0.0,(row*2, row+self.d)) # there are two linear constaints for Ei, and for each Ei the entire w
h = matrix(0.0, (row*2, 1))
for i in range(row):
G[i,i] = -1.0 # -Ei <= 0
G[row+i, i] = -1.0
h[row+i] = -1.0
for j in range(self.d):
G[row+i, row+j] = (-Y[i][0]/normfac)*X[i,j] # -Ei - yi/Tk(w.xi) <= -1
# solve and return w
sol = solvers.cpl(c, F, G, h)
self.w = sol['x'][row:]
#print self.w
self.w = array(self.w).reshape((self.d,))
self.w = self.w/sqrt(dot(self.w.T,self.w)) # Normalizing the vector output
#print sol
'''
#####################################################################################################################################
class KernelSVM(KernelLearner):
def __init__(self, d, kernel):
KernelLearner.__init__(self, d, kernel)
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a support vector machine with standard linear kernel.'''
# Infer the sample size from the data
m = len(Y)
K = zeros((m,m))
for i in range(m):
for j in range(m):
K[i,j] = self.kernel(X[i],X[j])
# Set up the appropriate matrices and call CVXOPT's quadratic programming
P = matrix(K * dot(Y, Y.T))
q = matrix(-ones(m))
G = matrix(-identity(m))
h = matrix(zeros(m))
alpha = solvers.qp(P, q, G, h)['x']
#storing the required values in the KernelLearner.support variable
for i in range(m):
temp = alpha[i] * Y[i]
self.support.append([temp, X[i]])
class Kernel_soft_SVM(KernelLearner):
def __init__(self, d, C, kernel):
KernelLearner.__init__(self, d, kernel)
self.C = C
def batch_train(self, X, Y):
'''Given unlabeled training examples (one per row) in matrix X and their
associated (-1, +1) labels (one per row) in vector Y, returns a weight
vector w that determines a separating hyperplane, if one exists, using
a support vector machine with standard linear kernel.'''
# Infer the sample size from the data
m = len(Y)
K = zeros((m,m))
for i in range(m):
for j in range(m):
K[i,j] = self.kernel(X[i],X[j])
# Set up the appropriate matrices and call CVXOPT's quadratic programming
P = matrix(K * dot(Y, Y.T))
q = matrix(-ones(m))
G = matrix(vstack((-identity(m), identity(m))))
h = matrix(hstack((zeros(m), self.C * ones(m))))
alpha = solvers.qp(P, q, G, h)['x']
#storing the required values in the KernelLearner.support variable
for i in range(m):
temp = alpha[i] * Y[i]
self.support.append([temp, X[i]])
class KernelQP(KernelLearner, PassiveSupervisedLearner):
def __init__(self, d, kernel):
KernelLearner.__init__(self, d, kernel)
def train(self, X, Y, normfac):
solvers.options['show_progress'] = False
# Reduce maxiters and tolerance to reasonable levels
solvers.options['maxiters'] = 200
solvers.options['abstol'] = 1e-2
solvers.options['feastol'] = 1e-2
row, col = X.shape
P = matrix(0.0, (row,row))
# Calculating the Kernel Matrix
for i in range(row):
for j in range(row):
P[i,j] = Y[i] * self.kernel(X[i],X[j]) * Y[j] # It's a PSD matrix, so its okay !
# A point in the solution space for objective
x_0 = matrix(0.5, (row, 1))
normarr = matrix(normfac, (1,row))
def F(x = None, z = None):
if x is None:
return (0, x_0) # Alpha's start from 0.5, first value is zero as there are zero non-linear objectives
term = matrix(sqrt(x.T * P * x))
f = matrix(term - normfac * sum(x)) # return the objective function
# first derivative
Df = (x.T * P)/term - normarr # since for each alpha, normfac will be subtracted, norm arr is an array
if z is None:
return f, Df
term2 = matrix((P*x) * (P*x).T)
H = z[0] * (P/term - term2/pow(term,3)) # Second derivative of the objective function, is a symmetric matrix, so no need for spDiag ?
return f, Df, H
# for linear inequalities
G = matrix(0.0, (row*2, row)) # there are two linear constaints for Alpha
h = matrix(0.0, (row*2, 1))
for i in range(row):
G[i,i] = -1.0 # -Alpha <= 0
G[row+i, i] = 1.0 # Alpha <= 1
h[row+i] = 1.0
# solve and return w
sol = solvers.cp(F, G, h)
alpha = sol['x']
for i in range(row):
self.support.append([alpha[i] * Y[i], X[i]])
#print
#print sol
class KernelQPwithLinearBand(KernelLearner, PassiveSupervisedLearner):
def __init__(self, d, kernel):
KernelLearner.__init__(self, d, kernel)
def train(self, X, Y, normfac, radius, prevw):
# the weight vector w is kept as a tuple - alpha_i * y_i and x_i, send only the required number of rows
solvers.options['show_progress'] = False
# Reduce maxiters and tolerance to reasonable levels
solvers.options['maxiters'] = 2000
solvers.options['abstol'] = 1e-2
solvers.options['feastol'] = 1e-2
row, col = X.shape
P = matrix(0.0, (row+1,row+1))
# Calculating the Kernel Matrix
# Kernel matrix will now include multiple kernel matrices
for i in range(row):
for j in range(row):
P[i,j] = Y[i] * self.kernel(X[i],X[j]) * Y[j] # It's a PSD matrix, so its okay !
# Summing over the kernel values between current set of points and prevw
for i in range(row):
P[i,row] = normfac * Y[i] * sum( prevw[k][0] * self.kernel(prevw[k][1], X[i]) for k in range(len(prevw)) )
P[row,i] = P[i,row]
# summing over the kernels value of the entire prevw matrix
P[row, row] = pow(normfac,2) * sum( prevw[k][0] * self.kernel(prevw[k][1], prevw[r][1]) * prevw[r][0] for k,r in itertools.product(range(len(prevw)), range(len(prevw))) )
# A point in the solution space for objective
x_0 = matrix(0.5, (row+1, 1))
normarr = matrix([normfac]*row + [normfac*(1-pow(radius,2)/2)]).T
def F(x = None, z = None):
if x is None:
return (0, x_0) # Alpha's start from 0.5, first value is zero as there are zero non-linear objectives
term = matrix(sqrt(x.T * P * x))
f = matrix(term - normfac * sum(x[0:row]) - x[row] * normfac * (1-pow(radius,2)/2)) # return the objective function
# first derivative
Df = (x.T * P)/term - normarr # since for each alpha, normfac will be subtracted, norm arr is an array
#print "Rank of Df"
#print linalg.matrix_rank(Df)
#print Df.size
#print "Rank of f"
#print linalg.matrix_rank(f)
if z is None:
return f, Df
term2 = matrix((P*x) * (P*x).T)
H = z[0] * (P/term - term2/pow(term,3)) # Second derivative of the objective function, is a symmetric matrix, so no need for spDiag ?
#print "Rank of hessian"
#print linalg.matrix_rank((P/term - term2/pow(term,3)))
#print "Size of hessian"
#print H.size
return f, Df, H
# for linear inequalities
G = matrix(0.0, (row*2 + 1, row +1)) # there are two linear constaints for Alpha, one for Beta
h = matrix(0.0, (row*2 +1, 1))
for i in range(row):
G[i,i] = -1.0 # -Alpha <= 0
G[row+i, i] = 1.0 # Alpha <= 1
h[row+i] = 1.0
G[row*2, row] = -1.0 # -Beta <= 0
#print "Rank of G"
#print linalg.matrix_rank(G)
#print "Rank of hessian"
#print linalg.matrix_rank(h)
# solve and return w
sol = solvers.cp(F, G, h)
#print sol
alpha = sol['x'][0:row]
beta = sol['x'][row]
row_prev = len(prevw)
templist = []
for i in range(row):
templist.append([alpha[i] * Y[i], X[i]])
# Add Beta * Tau_k to the previous support vectors and store in current support vectors
for i in range(row_prev):
templist.append([prevw[i][0] * beta * normfac, prevw[i][1]])
self.support = templist
#print
#print sol['x']
class StochasticDualCoordinateAscent(KernelLearner, ActiveBatchLearner):
def __init__(self, d, C, kernel):
KernelLearner.__init__(self, d, kernel)
self.C = C
def train(self, X, Y):
row, col = X.shape
alpha = zeros((row,1))
w = sum( Y[i]*X[i]*alpha[i] for i in range(row))
iter_local = 200
for k in range(iter_local):
i = random.randint(0, row-1)
G = Y[i] * sum( alpha[j] * Y[j] * self.kernel(X[j], X[i]) for j in range(row)) -1
if alpha[i] == 0:
PG = min(0, G)
elif alpha[i] == self.C:
PG = max(0, G)
else:
PG = G
kernel_temp = self.kernel(X[i], X[i])
if PG != 0:
alpha_temp = alpha[i]
alpha[i] = min(max(alpha[i] - G/kernel_temp, 0), self.C)
w = w + (alpha[i] - alpha_temp) * Y[i] * X[i]
for i in range(row):
self.support.append([Y[i]*alpha[i], X[i]])
|
jelugbo/ddi | refs/heads/master | lms/lib/comment_client/user.py | 5 | from .utils import merge_dict, perform_request, CommentClientRequestError
import models
import settings
class User(models.Model):
accessible_fields = ['username', 'follower_ids', 'upvoted_ids', 'downvoted_ids',
'id', 'external_id', 'subscribed_user_ids', 'children', 'course_id',
'group_id', 'subscribed_thread_ids', 'subscribed_commentable_ids',
'subscribed_course_ids', 'threads_count', 'comments_count',
'default_sort_key'
]
updatable_fields = ['username', 'external_id', 'default_sort_key']
initializable_fields = updatable_fields
metric_tag_fields = ['course_id']
base_url = "{prefix}/users".format(prefix=settings.PREFIX)
default_retrieve_params = {'complete': True}
type = 'user'
@classmethod
def from_django_user(cls, user):
return cls(id=str(user.id),
external_id=str(user.id),
username=user.username)
def follow(self, source):
params = {'source_type': source.type, 'source_id': source.id}
response = perform_request(
'post',
_url_for_subscription(self.id),
params,
metric_action='user.follow',
metric_tags=self._metric_tags + ['target.type:{}'.format(source.type)],
)
def unfollow(self, source):
params = {'source_type': source.type, 'source_id': source.id}
response = perform_request(
'delete',
_url_for_subscription(self.id),
params,
metric_action='user.unfollow',
metric_tags=self._metric_tags + ['target.type:{}'.format(source.type)],
)
def vote(self, voteable, value):
if voteable.type == 'thread':
url = _url_for_vote_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_vote_comment(voteable.id)
else:
raise CommentClientRequestError("Can only vote / unvote for threads or comments")
params = {'user_id': self.id, 'value': value}
response = perform_request(
'put',
url,
params,
metric_action='user.vote',
metric_tags=self._metric_tags + ['target.type:{}'.format(voteable.type)],
)
voteable._update_from_response(response)
def unvote(self, voteable):
if voteable.type == 'thread':
url = _url_for_vote_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_vote_comment(voteable.id)
else:
raise CommentClientRequestError("Can only vote / unvote for threads or comments")
params = {'user_id': self.id}
response = perform_request(
'delete',
url,
params,
metric_action='user.unvote',
metric_tags=self._metric_tags + ['target.type:{}'.format(voteable.type)],
)
voteable._update_from_response(response)
def active_threads(self, query_params={}):
if not self.course_id:
raise CommentClientRequestError("Must provide course_id when retrieving active threads for the user")
url = _url_for_user_active_threads(self.id)
params = {'course_id': self.course_id.to_deprecated_string()}
params = merge_dict(params, query_params)
response = perform_request(
'get',
url,
params,
metric_action='user.active_threads',
metric_tags=self._metric_tags,
paged_results=True,
)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
def subscribed_threads(self, query_params={}):
if not self.course_id:
raise CommentClientRequestError("Must provide course_id when retrieving subscribed threads for the user")
url = _url_for_user_subscribed_threads(self.id)
params = {'course_id': self.course_id.to_deprecated_string()}
params = merge_dict(params, query_params)
response = perform_request(
'get',
url,
params,
metric_action='user.subscribed_threads',
metric_tags=self._metric_tags,
paged_results=True
)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
def _retrieve(self, *args, **kwargs):
url = self.url(action='get', params=self.attributes)
retrieve_params = self.default_retrieve_params.copy()
retrieve_params.update(kwargs)
if self.attributes.get('course_id'):
retrieve_params['course_id'] = self.course_id.to_deprecated_string()
if self.attributes.get('group_id'):
retrieve_params['group_id'] = self.group_id
try:
response = perform_request(
'get',
url,
retrieve_params,
metric_action='model.retrieve',
metric_tags=self._metric_tags,
)
except CommentClientRequestError as e:
if e.status_code == 404:
# attempt to gracefully recover from a previous failure
# to sync this user to the comments service.
self.save()
response = perform_request(
'get',
url,
retrieve_params,
metric_action='model.retrieve',
metric_tags=self._metric_tags,
)
else:
raise
self._update_from_response(response)
def _url_for_vote_comment(comment_id):
return "{prefix}/comments/{comment_id}/votes".format(prefix=settings.PREFIX, comment_id=comment_id)
def _url_for_vote_thread(thread_id):
return "{prefix}/threads/{thread_id}/votes".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_subscription(user_id):
return "{prefix}/users/{user_id}/subscriptions".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_active_threads(user_id):
return "{prefix}/users/{user_id}/active_threads".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_subscribed_threads(user_id):
return "{prefix}/users/{user_id}/subscribed_threads".format(prefix=settings.PREFIX, user_id=user_id)
|
enriclluelles/ansible-modules-extras | refs/heads/devel | database/vertica/vertica_configuration.py | 148 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: vertica_configuration
version_added: '2.0'
short_description: Updates Vertica configuration parameters.
description:
- Updates Vertica configuration parameters.
options:
name:
description:
- Name of the parameter to update.
required: true
value:
description:
- Value of the parameter to be set.
required: true
db:
description:
- Name of the Vertica database.
required: false
default: null
cluster:
description:
- Name of the Vertica cluster.
required: false
default: localhost
port:
description:
- Vertica cluster port to connect to.
required: false
default: 5433
login_user:
description:
- The username used to authenticate with.
required: false
default: dbadmin
login_password:
description:
- The password used to authenticate with.
required: false
default: null
notes:
- The default authentication assumes that you are either logging in as or sudo'ing
to the C(dbadmin) account on the host.
- This module uses C(pyodbc), a Python ODBC database adapter. You must ensure
that C(unixODBC) and C(pyodbc) is installed on the host and properly configured.
- Configuring C(unixODBC) for Vertica requires C(Driver = /opt/vertica/lib64/libverticaodbc.so)
to be added to the C(Vertica) section of either C(/etc/odbcinst.ini) or C($HOME/.odbcinst.ini)
and both C(ErrorMessagesPath = /opt/vertica/lib64) and C(DriverManagerEncoding = UTF-16)
to be added to the C(Driver) section of either C(/etc/vertica.ini) or C($HOME/.vertica.ini).
requirements: [ 'unixODBC', 'pyodbc' ]
author: "Dariusz Owczarek (@dareko)"
"""
EXAMPLES = """
- name: updating load_balance_policy
vertica_configuration: name=failovertostandbyafter value='8 hours'
"""
try:
import pyodbc
except ImportError:
pyodbc_found = False
else:
pyodbc_found = True
class NotSupportedError(Exception):
pass
class CannotDropError(Exception):
pass
# module specific functions
def get_configuration_facts(cursor, parameter_name=''):
facts = {}
cursor.execute("""
select c.parameter_name, c.current_value, c.default_value
from configuration_parameters c
where c.node_name = 'ALL'
and (? = '' or c.parameter_name ilike ?)
""", parameter_name, parameter_name)
while True:
rows = cursor.fetchmany(100)
if not rows:
break
for row in rows:
facts[row.parameter_name.lower()] = {
'parameter_name': row.parameter_name,
'current_value': row.current_value,
'default_value': row.default_value}
return facts
def check(configuration_facts, parameter_name, current_value):
parameter_key = parameter_name.lower()
if current_value and current_value.lower() != configuration_facts[parameter_key]['current_value'].lower():
return False
return True
def present(configuration_facts, cursor, parameter_name, current_value):
parameter_key = parameter_name.lower()
changed = False
if current_value and current_value.lower() != configuration_facts[parameter_key]['current_value'].lower():
cursor.execute("select set_config_parameter('{0}', '{1}')".format(parameter_name, current_value))
changed = True
if changed:
configuration_facts.update(get_configuration_facts(cursor, parameter_name))
return changed
# module logic
def main():
module = AnsibleModule(
argument_spec=dict(
parameter=dict(required=True, aliases=['name']),
value=dict(default=None),
db=dict(default=None),
cluster=dict(default='localhost'),
port=dict(default='5433'),
login_user=dict(default='dbadmin'),
login_password=dict(default=None),
), supports_check_mode = True)
if not pyodbc_found:
module.fail_json(msg="The python pyodbc module is required.")
parameter_name = module.params['parameter']
current_value = module.params['value']
db = ''
if module.params['db']:
db = module.params['db']
changed = False
try:
dsn = (
"Driver=Vertica;"
"Server={0};"
"Port={1};"
"Database={2};"
"User={3};"
"Password={4};"
"ConnectionLoadBalance={5}"
).format(module.params['cluster'], module.params['port'], db,
module.params['login_user'], module.params['login_password'], 'true')
db_conn = pyodbc.connect(dsn, autocommit=True)
cursor = db_conn.cursor()
except Exception, e:
module.fail_json(msg="Unable to connect to database: {0}.".format(e))
try:
configuration_facts = get_configuration_facts(cursor)
if module.check_mode:
changed = not check(configuration_facts, parameter_name, current_value)
else:
try:
changed = present(configuration_facts, cursor, parameter_name, current_value)
except pyodbc.Error, e:
module.fail_json(msg=str(e))
except NotSupportedError, e:
module.fail_json(msg=str(e), ansible_facts={'vertica_configuration': configuration_facts})
except CannotDropError, e:
module.fail_json(msg=str(e), ansible_facts={'vertica_configuration': configuration_facts})
except SystemExit:
# avoid catching this on python 2.4
raise
except Exception, e:
module.fail_json(msg=e)
module.exit_json(changed=changed, parameter=parameter_name, ansible_facts={'vertica_configuration': configuration_facts})
# import ansible utilities
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
drewswu/moviemap | refs/heads/master | webapp/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
npe9/depot_tools | refs/heads/master | third_party/logilab/astroid/brain/py2mechanize.py | 76 | from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def mechanize_transform():
return AstroidBuilder(MANAGER).string_build('''
class Browser(object):
def open(self, url, data=None, timeout=None):
return None
def open_novisit(self, url, data=None, timeout=None):
return None
def open_local_file(self, filename):
return None
''')
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
|
minhpqn/chainer | refs/heads/master | cupy/cudnn.py | 11 | import atexit
import ctypes
import numpy
import six
from cupy import cuda
from cupy.cuda import cudnn
_handles = {}
def get_handle():
global _handles
device = cuda.Device()
handle = _handles.get(device.id, None)
if handle is None:
handle = cudnn.create()
_handles[device.id] = handle
return handle
@atexit.register
def reset_handles():
global _handles
handles = _handles
_handles = {}
for handle in six.itervalues(handles):
cudnn.destroy(handle)
class Descriptor(object):
def __init__(self, descriptor, destroyer):
self.value = descriptor
self.destroy = destroyer
def __del__(self):
if self.value:
self.destroy(self.value)
self.value = None
def get_data_type(dtype):
if dtype.type == numpy.float32:
return cudnn.CUDNN_DATA_FLOAT
elif dtype.type == numpy.float64:
return cudnn.CUDNN_DATA_DOUBLE
else:
raise TypeError('Dtype {} is not supported in CuDNN v2'.format(dtype))
def _get_strides(arr):
return tuple(map(lambda s: s // arr.itemsize, arr.strides))
def _to_ctypes_array(tup, typ=ctypes.c_int):
array_type = typ * len(tup)
return array_type(*tup)
def create_tensor_descriptor(arr, format=cudnn.CUDNN_TENSOR_NCHW):
desc = Descriptor(cudnn.createTensorDescriptor(),
cudnn.destroyTensorDescriptor)
if arr.ndim != 4:
raise ValueError('Supports 4-dimensional array only')
if not arr.flags.c_contiguous:
raise ValueError('Supoorts c-contigous array only')
data_type = get_data_type(arr.dtype)
cudnn.setTensor4dDescriptor(desc.value, format, data_type,
*arr.shape)
return desc
def create_filter_descriptor(arr, mode=cudnn.CUDNN_CROSS_CORRELATION):
desc = Descriptor(cudnn.createFilterDescriptor(),
cudnn.destroyFilterDescriptor)
data_type = get_data_type(arr.dtype)
if arr.ndim == 4:
cudnn.setFilter4dDescriptor(desc.value, data_type, *arr.shape)
else:
cudnn.setFilterNdDescriptor(desc.value, data_type, arr.ndim,
_to_ctypes_array(arr.shape))
return desc
def create_convolution_descriptor(pad, stride,
mode=cudnn.CUDNN_CROSS_CORRELATION):
desc = Descriptor(cudnn.createConvolutionDescriptor(),
cudnn.destroyConvolutionDescriptor)
ndim = len(pad)
if ndim != len(stride):
raise ValueError('pad and stride must be of same length')
if ndim == 2:
cudnn.setConvolution2dDescriptor(
desc.value, pad[0], pad[1], stride[0], stride[1], 1, 1, mode)
else:
upscale = (1,) * ndim
cudnn.setConvolutionNdDescriptor(
desc.value, ndim, _to_ctypes_array(pad), _to_ctypes_array(stride),
_to_ctypes_array(upscale), mode)
return desc
def create_pooling_descriptor(ksize, stride, pad, mode):
desc = Descriptor(cudnn.createPoolingDescriptor(),
cudnn.destroyPoolingDescriptor)
ndim = len(ksize)
if ndim != len(stride) or ndim != len(pad):
raise ValueError('ksize, stride, and pad must be of same length')
if ndim == 2:
cudnn.setPooling2dDescriptor(
desc.value, mode, ksize[0], ksize[1], pad[0], pad[1],
stride[0], stride[1])
else:
cudnn.setPoolingNdDescriptor(
desc.value, mode, ndim, _to_ctypes_array(ksize),
_to_ctypes_array(pad), _to_ctypes_array(stride))
return desc
|
kurkop/server-tools | refs/heads/master | __unported__/import_odbc/import_odbc.py | 6 | # -*- coding: utf-8 -*-
##############################################################################
#
# Daniel Reis
# 2011
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
from datetime import datetime
from openerp.osv import orm, fields
import logging
_logger = logging.getLogger(__name__)
_loglvl = _logger.getEffectiveLevel()
SEP = '|'
class import_odbc_dbtable(orm.Model):
_name = "import.odbc.dbtable"
_description = 'Import Table Data'
_order = 'exec_order'
_columns = {
'name': fields.char('Datasource name', required=True, size=64),
'enabled': fields.boolean('Execution enabled'),
'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True),
'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'),
'model_target': fields.many2one('ir.model', 'Target object'),
'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."),
'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"),
'last_sync': fields.datetime('Last sync date',
help="Datetime for the last succesfull sync."
"\nLater changes on the source may not be replicated on the destination"),
'start_run': fields.datetime('Time started', readonly=True),
'last_run': fields.datetime('Time ended', readonly=True),
'last_record_count': fields.integer('Last record count', readonly=True),
'last_error_count': fields.integer('Last error count', readonly=True),
'last_warn_count': fields.integer('Last warning count', readonly=True),
'last_log': fields.text('Last run log', readonly=True),
'ignore_rel_errors': fields.boolean('Ignore relationship errors',
help="On error try to reimport rows ignoring relationships."),
'raise_import_errors': fields.boolean('Raise import errors',
help="Import errors not handled, intended for debugging purposes."
"\nAlso forces debug messages to be written to the server log."),
}
_defaults = {
'enabled': True,
'exec_order': 10,
}
def _import_data(self, cr, uid, flds, data, model_obj, table_obj, log):
"""Import data and returns error msg or empty string"""
def find_m2o(field_list):
""""Find index of first column with a one2many field"""
for i, x in enumerate(field_list):
if len(x) > 3 and x[-3:] == ':id' or x[-3:] == '/id':
return i
return -1
def append_to_log(log, level, obj_id='', msg='', rel_id=''):
if '_id_' in obj_id:
obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1]
if ': .' in msg and not rel_id:
rel_id = msg[msg.find(': .')+3:]
if '_id_' in rel_id:
rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1]
msg = msg[:msg.find(': .')]
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg))
_logger.debug(data)
cols = list(flds) # copy to avoid side effects
errmsg = str()
if table_obj.raise_import_errors:
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
else:
try:
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
except:
errmsg = str(sys.exc_info()[1])
if errmsg and not table_obj.ignore_rel_errors:
#Fail
append_to_log(log, 'ERROR', data, errmsg)
log['last_error_count'] += 1
return False
if errmsg and table_obj.ignore_rel_errors:
#Warn and retry ignoring many2one fields...
append_to_log(log, 'WARN', data, errmsg)
log['last_warn_count'] += 1
#Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
i = find_m2o(cols)
if i >= 0:
#Try again without the [i] column
del cols[i]
del data[i]
self._import_data(cr, uid, cols, data, model_obj, table_obj, log)
else:
#Fail
append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.')
log['last_error_count'] += 1
return False
return True
def import_run(self, cr, uid, ids=None, context=None):
db_model = self.pool.get('base.external.dbsource')
actions = self.read(cr, uid, ids, ['id', 'exec_order'])
actions.sort(key=lambda x: (x['exec_order'], x['id']))
#Consider each dbtable:
for action_ref in actions:
obj = self.browse(cr, uid, action_ref['id'])
if not obj.enabled:
continue # skip
_logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl)
_logger.debug('Importing %s...' % obj.name)
#now() microseconds are stripped to avoid problem with SQL smalldate
#TODO: convert UTC Now to local timezone
#http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime
model_name = obj.model_target.model
model_obj = self.pool.get(model_name)
xml_prefix = model_name.replace('.', '_') + "_id_"
log = {'start_run': datetime.now().replace(microsecond=0),
'last_run': None,
'last_record_count': 0,
'last_error_count': 0,
'last_warn_count': 0,
'last_log': list()}
self.write(cr, uid, [obj.id], log)
#Prepare SQL sentence; replace "%s" with the last_sync date
if obj.last_sync:
sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S")
else:
sync = datetime.datetime(1900, 1, 1, 0, 0, 0)
params = {'sync': sync}
res = db_model.execute(cr, uid, [obj.dbsource_id.id],
obj.sql_source, params, metadata=True)
#Exclude columns titled "None"; add (xml_)"id" column
cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE']
cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id']
#Import each row:
for row in res['rows']:
#Build data row; import only columns present in the "cols" list
data = list()
for i in cidx:
#TODO: Handle imported datetimes properly - convert from localtime to UTC!
v = row[i]
if isinstance(v, str):
v = v.strip()
data.append(v)
data.append(xml_prefix + str(row[0]).strip())
#Import the row; on error, write line to the log
log['last_record_count'] += 1
self._import_data(cr, uid, cols, data, model_obj, obj, log)
if log['last_record_count'] % 500 == 0:
_logger.info('...%s rows processed...' % (log['last_record_count']))
#Finished importing all rows
#If no errors, write new sync date
if not (log['last_error_count'] or log['last_warn_count']):
log['last_sync'] = log['start_run']
level = logging.DEBUG
if log['last_warn_count']:
level = logging.WARN
if log['last_error_count']:
level = logging.ERROR
_logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % (
model_name, log['last_record_count'], log['last_error_count'],
log['last_warn_count']))
#Write run log, either if the table import is active or inactive
if log['last_log']:
log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==')
log.update({'last_log': '\n'.join(log['last_log'])})
log.update({'last_run': datetime.now().replace(microsecond=0)})
self.write(cr, uid, [obj.id], log)
#Finished
_logger.debug('Import job FINISHED.')
return True
def import_schedule(self, cr, uid, ids, context=None):
cron_obj = self.pool.get('ir.cron')
new_create_id = cron_obj.create(cr, uid, {
'name': 'Import ODBC tables',
'interval_type': 'hours',
'interval_number': 1,
'numbercall': -1,
'model': 'import.odbc.dbtable',
'function': 'import_run',
'doall': False,
'active': True
})
return {
'name': 'Import ODBC tables',
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'ir.cron',
'res_id': new_create_id,
'type': 'ir.actions.act_window',
}
#EOF
|
openiitbombayx/edx-platform | refs/heads/master | cms/djangoapps/xblock_config/models.py | 172 | """
Models used by Studio XBlock infrastructure.
Includes:
StudioConfig: A ConfigurationModel for managing Studio.
"""
from django.db.models import TextField
from config_models.models import ConfigurationModel
class StudioConfig(ConfigurationModel):
"""
Configuration for XBlockAsides.
"""
disabled_blocks = TextField(
default="about course_info static_tab",
help_text="Space-separated list of XBlocks on which XBlockAsides should never render in studio",
)
@classmethod
def asides_enabled(cls, block_type):
"""
Return True if asides are enabled for this type of block in studio
"""
studio_config = cls.current()
return studio_config.enabled and block_type not in studio_config.disabled_blocks.split()
|
antoviaque/edx-platform | refs/heads/master | cms/djangoapps/contentstore/views/tests/utils.py | 198 | """
Utilities for view tests.
"""
import json
from contentstore.tests.utils import CourseTestCase
from contentstore.views.helpers import xblock_studio_url
from xmodule.modulestore.tests.factories import ItemFactory
class StudioPageTestCase(CourseTestCase):
"""
Base class for all tests of Studio pages.
"""
def setUp(self):
super(StudioPageTestCase, self).setUp()
self.chapter = ItemFactory.create(parent_location=self.course.location,
category='chapter', display_name="Week 1")
self.sequential = ItemFactory.create(parent_location=self.chapter.location,
category='sequential', display_name="Lesson 1")
def get_page_html(self, xblock):
"""
Returns the HTML for the page representing the xblock.
"""
url = xblock_studio_url(xblock)
self.assertIsNotNone(url)
resp = self.client.get_html(url)
self.assertEqual(resp.status_code, 200)
return resp.content
def get_preview_html(self, xblock, view_name):
"""
Returns the HTML for the xblock when shown within a unit or container page.
"""
preview_url = '/xblock/{usage_key}/{view_name}'.format(usage_key=xblock.location, view_name=view_name)
resp = self.client.get_json(preview_url)
self.assertEqual(resp.status_code, 200)
resp_content = json.loads(resp.content)
return resp_content['html']
def validate_preview_html(self, xblock, view_name, can_add=True):
"""
Verify that the specified xblock's preview has the expected HTML elements.
"""
html = self.get_preview_html(xblock, view_name)
self.validate_html_for_add_buttons(html, can_add)
# Verify drag handles always appear.
drag_handle_html = '<span data-tooltip="Drag to reorder" class="drag-handle action"></span>'
self.assertIn(drag_handle_html, html)
# Verify that there are no action buttons for public blocks
expected_button_html = [
'<a href="#" class="edit-button action-button">',
'<a href="#" data-tooltip="Delete" class="delete-button action-button">',
'<a href="#" data-tooltip="Duplicate" class="duplicate-button action-button">'
]
for button_html in expected_button_html:
self.assertIn(button_html, html)
def validate_html_for_add_buttons(self, html, can_add=True):
"""
Validate that the specified HTML has the appropriate add actions for the current publish state.
"""
# Verify that there are no add buttons for public blocks
add_button_html = '<div class="add-xblock-component new-component-item adding"></div>'
if can_add:
self.assertIn(add_button_html, html)
else:
self.assertNotIn(add_button_html, html)
|
quillford/redeem | refs/heads/master | redeem/Pipe.py | 1 | #!/usr/bin/env python
"""
Pipe - This uses a virtual TTY for communicating with
Toggler or similar front end.
Author: Elias Bakken
email: elias(dot)bakken(at)gmail(dot)com
Website: http://www.thing-printer.com
License: GNU GPL v3: http://www.gnu.org/copyleft/gpl.html
Redeem is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Redeem is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Redeem. If not, see <http://www.gnu.org/licenses/>.
"""
from threading import Thread
from distutils.spawn import find_executable
import select
import logging
import subprocess
import time
import os
from Gcode import Gcode
class Pipe:
@staticmethod
def check_tty0tty():
return (find_executable("tty0tty") is not None)
@staticmethod
def check_socat():
return (find_executable("socat") is not None)
def __init__(self, printer, prot):
self.printer = printer
self.prot = prot
pipe_0 = "/dev/" + prot + "_0"
pipe_1 = "/dev/" + prot + "_1"
# Ensure tty0tty is installed and available in the PATH
if not Pipe.check_tty0tty() and not Pipe.check_socat():
logging.error("Neither tty0tty nor socat found! tty0tty or socat must be installed")
raise EnvironmentError("tty0tty and socat not found")
if Pipe.check_tty0tty():
p = subprocess.Popen(["tty0tty", pipe_0, pipe_1],
stderr=subprocess.PIPE)
p.stderr.readline()
elif Pipe.check_socat():
p = subprocess.Popen([
"socat", "-d", "-d", "-lf", "/var/log/redeem2"+self.prot,
"pty,mode=777,raw,echo=0,link="+pipe_0,
"pty,mode=777,raw,echo=0,link="+pipe_1],
stderr=subprocess.PIPE)
while not os.path.exists(pipe_0):
time.sleep(0.1)
self.rd = open(pipe_0, "r")
self.wr = os.open(pipe_0, os.O_WRONLY)
logging.info("Pipe " + self.prot + " open. Use '" + pipe_1 + "' to "
"communicate with it")
self.running = True
self.t = Thread(target=self.get_message)
self.send_response = True
self.t.start()
def get_message(self):
""" Loop that gets messages and pushes them on the queue """
while self.running:
r, w, x = select.select([self.rd], [], [], 1.0)
if r:
message = self.rd.readline().rstrip()
if len(message) > 0:
g = Gcode({"message": message, "prot": self.prot})
self.printer.processor.enqueue(g)
def send_message(self, message):
if self.send_response:
if message[-1] != "\n":
message += "\n"
try:
os.write(self.wr, message)
except OSError:
logging.warning("Unable to write to file. Closing down?")
def close(self):
self.running = False
self.t.join()
self.rd.close()
os.close(self.wr)
|
maurerpe/FreeCAD | refs/heads/master | src/Mod/Import/App/SCL/essa_par.py | 30 | def process_nested_parent_str(attr_str):
'''
The first letter should be a parenthesis
input string: "(1,4,(5,6),7)"
output: tuple (1,4,(4,6),7)
'''
params = []
agg_scope_level = 0
current_param = ''
for i,ch in enumerate(attr_str):
if ch==',':
params.append(current_param)
current_param = ''
elif ch=='(':
agg_scope_level +=1
elif ch==')':
agg_scope_level = 0
elif agg_scope_level == 0:
current_param += ch
return params
def process_nested_parent_str2(attr_str,idx=0):
'''
The first letter should be a parenthesis
input string: "(1,4,(5,6),7)"
output: ['1','4',['5','6'],'7']
'''
#print 'Entering function with string %s'%(attr_str)
params = []
current_param = ''
k = 0
while (k<len(attr_str)):
#print 'k in this function:%i'%k
ch = attr_str[k]
k += 1
if ch==',':
#print "Add param:",current_param
params.append(current_param)
current_param = ''
elif ch=='(':
nv = attr_str[k:]
#print "Up one level parenthesis:%s"%(nv)
current_param, progress = process_nested_parent_str2(nv)
#print "Adding the list returned from nested",current_param
params.append(current_param)
current_param = ''
k += progress+1
elif ch==')':
#print "Down one level parenthesis: %i caracters parsed"%k
params.append(current_param)
#print "Current params:",params#k -= acc-2
return params,k
else:
current_param += ch
#print "Ch:",ch
#print "k:",k
#raw_input("")
#idx += 1
params.append(current_param)
return params,k
#print process_nested_parent_str2('1,2,3,4,5,6')
#idx=0
#print process_nested_parent_str2("'A','B','C'")
print process_nested_parent_str2("'A'")[0]
print process_nested_parent_str2("30.0,0.0,5.0")[0]
print process_nested_parent_str2("(Thomas)")[0]
print process_nested_parent_str2("Thomas, Paviot, ouais")[0]
print process_nested_parent_str2("1,2,(3,4,5),6,7,8")[0]
print process_nested_parent_str2("(#9149,#9166),#9142,.T.")[0]
|
pastaread/TanksBattle | refs/heads/master | cocos2d/plugin/tools/pluginx-bindings-generator/genbindings-lua.py | 130 | #!/usr/bin/python
# This script is used to generate luabinding glue codes.
# Android ndk version must be ndk-r9b.
import sys
import os, os.path
import shutil
import ConfigParser
import subprocess
import re
from contextlib import contextmanager
import shutil
import yaml
import tempfile
def _check_ndk_root_env():
''' Checking the environment NDK_ROOT, which will be used for building
'''
try:
NDK_ROOT = os.environ['NDK_ROOT']
except Exception:
print "NDK_ROOT not defined. Please define NDK_ROOT in your environment."
sys.exit(1)
return NDK_ROOT
def _check_python_bin_env():
''' Checking the environment PYTHON_BIN, which will be used for building
'''
try:
PYTHON_BIN = os.environ['PYTHON_BIN']
except Exception:
print "PYTHON_BIN not defined, use current python."
PYTHON_BIN = sys.executable
return PYTHON_BIN
class CmdError(Exception):
pass
@contextmanager
def _pushd(newDir):
previousDir = os.getcwd()
os.chdir(newDir)
yield
os.chdir(previousDir)
def _run_cmd(command):
ret = subprocess.call(command, shell=True)
if ret != 0:
message = "Error running command"
raise CmdError(message)
def _edit_yaml(filePath):
f = open(filePath, 'r')
data = yaml.load(f)
f.close()
data['conversions']['ns_map']['cocos2d::plugin::'] = 'plugin.'
data['conversions']['to_native']['TIAPDeveloperInfo'] = 'ok &= pluginx::luaval_to_TIAPDeveloperInfo(tolua_S, ${arg_idx}, &${out_value})'
data['conversions']['to_native']['TAdsDeveloperInfo'] = 'ok &= pluginx::luaval_to_TAdsDeveloperInfo(tolua_S, ${arg_idx}, &${out_value})'
data['conversions']['to_native']['TAdsInfo'] = 'ok &= pluginx::luaval_to_TAdsInfo(tolua_S, ${arg_idx}, &${out_value})'
data['conversions']['to_native']['TShareDeveloperInfo'] = 'ok &= pluginx::luaval_to_TShareDeveloperInfo(tolua_S, ${arg_idx}, &${out_value})'
data['conversions']['to_native']['TSocialDeveloperInfo'] = 'ok &= pluginx::luaval_to_TSocialDeveloperInfo(tolua_S, ${arg_idx}, &${out_value})'
data['conversions']['to_native']['TUserDeveloperInfo'] = 'ok &= pluginx::luaval_to_TUserDeveloperInfo(tolua_S, ${arg_idx}, &${out_value})'
f = open(filePath, 'w')
f.write(yaml.dump(data))
f.close()
def main():
cur_platform= '??'
llvm_path = '??'
ndk_root = _check_ndk_root_env()
# del the " in the path
ndk_root = re.sub(r"\"", "", ndk_root)
python_bin = _check_python_bin_env()
platform = sys.platform
if platform == 'win32':
cur_platform = 'windows'
elif platform == 'darwin':
cur_platform = platform
elif 'linux' in platform:
cur_platform = 'linux'
else:
print 'Your platform is not supported!'
sys.exit(1)
if platform == 'win32':
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s' % cur_platform))
if not os.path.exists(x86_llvm_path):
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.4/prebuilt', '%s' % cur_platform))
else:
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86')))
if not os.path.exists(x86_llvm_path):
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.4/prebuilt', '%s-%s' % (cur_platform, 'x86')))
x64_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86_64')))
if not os.path.exists(x64_llvm_path):
x64_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.4/prebuilt', '%s-%s' % (cur_platform, 'x86_64')))
if os.path.isdir(x86_llvm_path):
llvm_path = x86_llvm_path
elif os.path.isdir(x64_llvm_path):
llvm_path = x64_llvm_path
else:
print 'llvm toolchain not found!'
print 'path: %s or path: %s are not valid! ' % (x86_llvm_path, x64_llvm_path)
sys.exit(1)
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
cocos_root = os.path.abspath(os.path.join(project_root, ''))
cxx_generator_root = os.path.abspath(os.path.join(project_root, 'tools/bindings-generator'))
pluginx_root = os.path.abspath(os.path.join(project_root, 'plugin'))
# save config to file
config = ConfigParser.ConfigParser()
config.set('DEFAULT', 'androidndkdir', ndk_root)
config.set('DEFAULT', 'clangllvmdir', llvm_path)
config.set('DEFAULT', 'cocosdir', cocos_root)
config.set('DEFAULT', 'cxxgeneratordir', cxx_generator_root)
config.set('DEFAULT', 'extra_flags', '')
config.set('DEFAULT', 'pluginxdir', pluginx_root)
# To fix parse error on windows, we must difine __WCHAR_MAX__ and undefine __MINGW32__ .
if platform == 'win32':
config.set('DEFAULT', 'extra_flags', '-D__WCHAR_MAX__=0x7fffffff -U__MINGW32__')
conf_ini_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'userconf.ini'))
print 'generating userconf.ini...'
with open(conf_ini_file, 'w') as configfile:
config.write(configfile)
# set proper environment variables
if 'linux' in platform or platform == 'darwin':
os.putenv('LD_LIBRARY_PATH', '%s/libclang' % cxx_generator_root)
if platform == 'win32':
path_env = os.environ['PATH']
os.putenv('PATH', r'%s;%s\libclang;%s\tools\win32;' % (path_env, cxx_generator_root, cxx_generator_root))
# edit conversions config for pluginx
conversions_yaml = '%s/targets/lua/conversions.yaml' % cxx_generator_root
conversions_backup = '%s.backup' % conversions_yaml
shutil.copy(conversions_yaml, conversions_backup)
_edit_yaml(conversions_yaml)
try:
tolua_root = '%s/plugin/tools/pluginx-bindings-generator/tolua' % project_root
output_dir = '%s/plugin/luabindings/auto' % project_root
cmd_args = {'cocos2dx_pluginx.ini' : ('cocos2dx_pluginx', 'lua_cocos2dx_pluginx_auto')}
target = 'lua'
generator_py = '%s/generator.py' % cxx_generator_root
for key in cmd_args.keys():
args = cmd_args[key]
cfg = '%s/%s' % (tolua_root, key)
print 'Generating bindings for %s...' % (key[:-4])
command = '%s %s %s -s %s -t %s -o %s -n %s' % (python_bin, generator_py, cfg, args[0], target, output_dir, args[1])
_run_cmd(command)
if platform == 'win32':
with _pushd(output_dir):
_run_cmd('dos2unix *')
# replace header file
tmpfd,tmpname = tempfile.mkstemp(dir='.')
input_file_name = '%s/%s.cpp' % (output_dir, args[1])
try:
output_file = os.fdopen(tmpfd, 'w')
input_file = open(input_file_name)
for line in input_file:
output_file.write(line.replace('#include "LuaBasicConversions.h"', '#include "LuaBasicConversions.h"\n#include "lua_pluginx_basic_conversions.h"'))
finally:
output_file.close()
input_file.close()
shutil.move(tmpname, input_file_name)
print '---------------------------------'
print 'Generating lua bindings succeeds.'
print '---------------------------------'
except Exception as e:
if e.__class__.__name__ == 'CmdError':
print '---------------------------------'
print 'Generating lua bindings fails.'
print '---------------------------------'
sys.exit(1)
else:
raise
finally:
shutil.move(conversions_backup, conversions_yaml)
# -------------- main --------------
if __name__ == '__main__':
main()
|
pasiegel/SickGear | refs/heads/master | lib/subliminal/services/podnapisiweb.py | 23 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from . import ServiceBase
from ..exceptions import DownloadFailedError
from ..language import Language, language_set
from ..subtitles import ResultSubtitle
from ..utils import get_keywords
from ..videos import Episode, Movie
from bs4 import BeautifulSoup
import guessit
import logging
import re
from subliminal.subtitles import get_subtitle_path
logger = logging.getLogger("subliminal")
class PodnapisiWeb(ServiceBase):
server_url = 'http://simple.podnapisi.net'
site_url = 'http://www.podnapisi.net'
api_based = True
user_agent = 'Subliminal/0.6'
videos = [Episode, Movie]
require_video = False
required_features = ['xml']
languages = language_set(['Albanian', 'Arabic', 'Spanish (Argentina)', 'Belarusian', 'Bosnian', 'Portuguese (Brazil)', 'Bulgarian', 'Catalan',
'Chinese', 'Croatian', 'Czech', 'Danish', 'Dutch', 'English', 'Estonian', 'Persian',
'Finnish', 'French', 'German', 'gre', 'Kalaallisut', 'Hebrew', 'Hindi', 'Hungarian',
'Icelandic', 'Indonesian', 'Irish', 'Italian', 'Japanese', 'Kazakh', 'Korean', 'Latvian',
'Lithuanian', 'Macedonian', 'Malay', 'Norwegian', 'Polish', 'Portuguese', 'Romanian',
'Russian', 'Serbian', 'Sinhala', 'Slovak', 'Slovenian', 'Spanish', 'Swedish', 'Thai',
'Turkish', 'Ukrainian', 'Vietnamese'])
language_map = {Language('Albanian'): 29, Language('Arabic'): 12, Language('Spanish (Argentina)'): 14, Language('Belarusian'): 50,
Language('Bosnian'): 10, Language('Portuguese (Brazil)'): 48, Language('Bulgarian'): 33, Language('Catalan'): 53,
Language('Chinese'): 17, Language('Croatian'): 38, Language('Czech'): 7, Language('Danish'): 24,
Language('Dutch'): 23, Language('English'): 2, Language('Estonian'): 20, Language('Persian'): 52,
Language('Finnish'): 31, Language('French'): 8, Language('German'): 5, Language('gre'): 16,
Language('Kalaallisut'): 57, Language('Hebrew'): 22, Language('Hindi'): 42, Language('Hungarian'): 15,
Language('Icelandic'): 6, Language('Indonesian'): 54, Language('Irish'): 49, Language('Italian'): 9,
Language('Japanese'): 11, Language('Kazakh'): 58, Language('Korean'): 4, Language('Latvian'): 21,
Language('Lithuanian'): 19, Language('Macedonian'): 35, Language('Malay'): 55,
Language('Norwegian'): 3, Language('Polish'): 26, Language('Portuguese'): 32, Language('Romanian'): 13,
Language('Russian'): 27, Language('Serbian'): 36, Language('Sinhala'): 56, Language('Slovak'): 37,
Language('Slovenian'): 1, Language('Spanish'): 28, Language('Swedish'): 25, Language('Thai'): 44,
Language('Turkish'): 30, Language('Ukrainian'): 46, Language('Vietnamese'): 51,
29: Language('Albanian'), 12: Language('Arabic'), 14: Language('Spanish (Argentina)'), 50: Language('Belarusian'),
10: Language('Bosnian'), 48: Language('Portuguese (Brazil)'), 33: Language('Bulgarian'), 53: Language('Catalan'),
17: Language('Chinese'), 38: Language('Croatian'), 7: Language('Czech'), 24: Language('Danish'),
23: Language('Dutch'), 2: Language('English'), 20: Language('Estonian'), 52: Language('Persian'),
31: Language('Finnish'), 8: Language('French'), 5: Language('German'), 16: Language('gre'),
57: Language('Kalaallisut'), 22: Language('Hebrew'), 42: Language('Hindi'), 15: Language('Hungarian'),
6: Language('Icelandic'), 54: Language('Indonesian'), 49: Language('Irish'), 9: Language('Italian'),
11: Language('Japanese'), 58: Language('Kazakh'), 4: Language('Korean'), 21: Language('Latvian'),
19: Language('Lithuanian'), 35: Language('Macedonian'), 55: Language('Malay'), 40: Language('Chinese'),
3: Language('Norwegian'), 26: Language('Polish'), 32: Language('Portuguese'), 13: Language('Romanian'),
27: Language('Russian'), 36: Language('Serbian'), 47: Language('Serbian'), 56: Language('Sinhala'),
37: Language('Slovak'), 1: Language('Slovenian'), 28: Language('Spanish'), 25: Language('Swedish'),
44: Language('Thai'), 30: Language('Turkish'), 46: Language('Ukrainian'), Language('Vietnamese'): 51}
def list_checked(self, video, languages):
if isinstance(video, Movie):
return self.query(video.path or video.release, languages, video.title, year=video.year,
keywords=get_keywords(video.guess))
if isinstance(video, Episode):
return self.query(video.path or video.release, languages, video.series, season=video.season,
episode=video.episode, keywords=get_keywords(video.guess))
def query(self, filepath, languages, title, season=None, episode=None, year=None, keywords=None):
params = {'sXML': 1, 'sK': title, 'sJ': ','.join([str(self.get_code(l)) for l in languages])}
if season is not None:
params['sTS'] = season
if episode is not None:
params['sTE'] = episode
if year is not None:
params['sY'] = year
if keywords is not None:
params['sR'] = keywords
r = self.session.get(self.server_url + '/ppodnapisi/search', params=params)
if r.status_code != 200:
logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
return []
subtitles = []
soup = BeautifulSoup(r.content, self.required_features)
for sub in soup('subtitle'):
if 'n' in sub.flags:
logger.debug(u'Skipping hearing impaired')
continue
language = self.get_language(sub.languageId.text)
confidence = float(sub.rating.text) / 5.0
sub_keywords = set()
for release in sub.release.text.split():
sub_keywords |= get_keywords(guessit.guess_file_info(release + '.srt', 'autodetect'))
sub_path = get_subtitle_path(filepath, language, self.config.multi)
subtitle = ResultSubtitle(sub_path, language, self.__class__.__name__.lower(),
sub.url.text, confidence=confidence, keywords=sub_keywords)
subtitles.append(subtitle)
return subtitles
def download(self, subtitle):
r = self.session.get(subtitle.link)
if r.status_code != 200:
raise DownloadFailedError()
soup = BeautifulSoup(r.content)
self.download_zip_file(self.server_url + soup.find('a', href=re.compile('download'))['href'], subtitle.path)
return subtitle
Service = PodnapisiWeb
|
cinayc/crawler | refs/heads/master | crawler/spiders/test_spider.py | 1 | # -*- coding: utf-8 -*-
import scrapy
from scrapy.exceptions import IgnoreRequest
from scrapy.linkextractors import LinkExtractor
from scrapy.spidermiddlewares.httperror import HttpError
from scrapy.spiders import Rule, CrawlSpider
from service_identity.exceptions import DNSMismatch
from twisted.internet.error import DNSLookupError
from crawler.items import CrawlerItem
import pymysql
from bs4 import BeautifulSoup
from time import sleep
from crawler.spiders.common_spider import CommonSpider
class TestSpider(CommonSpider):
name = "test"
start_urls = [
"http://www.clien.net",
"http://shopping.naver.com/search/all.nhn?frm=NVSCTAB&query=%EC%B8%B5%EA%B3%BC+%EC%82%AC%EC%9D%B4&where=all", # robot rule test
"https://www.sgic.co.kr/chp/fileDownload/download.mvc;jsessionid=vvVNjS05IjEVHy11OoAT3vje8KzvFySWceewEgDSb61DodNC9hDtAfGcWOdLaFI0.egisap2_servlet_engine13?fileId=014D8DBD1EFE5CD6629A629A", #AttributeError test
"http://150090289679516/robots.txt", # DNS lookup test
"http://www.yonhapnews.co.kr/international/2007/08/13/0604000000AKR20070813217600043.HTML", # 404 not found test
]
def __init__(self, *a, **kw):
print("Init Test spider...")
super(TestSpider, self).__init__(*a, **kw)
def start_requests(self):
db_host = self.settings.get('DB_HOST')
db_port = self.settings.get('DB_PORT')
db_user = self.settings.get('DB_USER')
db_pass = self.settings.get('DB_PASS')
db_db = self.settings.get('DB_DB')
db_charset = self.settings.get('DB_CHARSET')
self.conn = pymysql.connect(host='localhost', port=3306, user='work', passwd='work!@#', database='DOC')
self.cursor = self.conn.cursor(pymysql.cursors.DictCursor)
url = self.start_urls[4]
yield scrapy.Request(url,
callback=self.parse,
dont_filter=True,
errback=lambda x: self.download_errback(x, url))
def __del__(self):
self.cursor.close()
self.conn.close()
def parse(self, response):
try:
raw = response.text
except AttributeError as e:
self.logger.error(e)
#self.parse_text(raw)
pass
|
meetmangukiya/coala | refs/heads/master | tests/results/LineDiffTest.py | 35 | import unittest
from coalib.results.LineDiff import LineDiff, ConflictError
class LineDiffTest(unittest.TestCase):
def test_everything(self):
self.assertRaises(TypeError, LineDiff, delete=5)
self.assertRaises(TypeError, LineDiff, change=5)
self.assertRaises(TypeError, LineDiff, add_after=5)
self.assertRaises(TypeError, LineDiff, change=True)
self.assertRaises(TypeError, LineDiff, add_after=True)
self.assertRaises(ConflictError,
LineDiff,
change=('1', '2'),
delete=True)
self.assertEqual(LineDiff(change=('1', '2')).change, ('1', '2'))
self.assertEqual(LineDiff(delete=True).delete, True)
self.assertEqual(LineDiff(add_after=[]).add_after, False)
self.assertEqual(LineDiff(add_after=['t']).add_after, ['t'])
self.assertEqual(LineDiff(add_after=('t',)).add_after, ['t'])
uut = LineDiff()
uut.delete = True
self.assertRaises(ConflictError, setattr, uut, 'change', ('1', '2'))
uut.delete = False
uut.change = ('1', '2')
self.assertRaises(ConflictError, setattr, uut, 'delete', True)
def test_equality(self):
self.assertEqual(LineDiff(), LineDiff())
self.assertNotEqual(LineDiff(), LineDiff(delete=True))
self.assertNotEqual(LineDiff(add_after=['']), LineDiff())
self.assertNotEqual(LineDiff(add_after=['']), LineDiff(delete=True))
self.assertNotEqual(LineDiff(change=('', 'a')), LineDiff())
|
tph-thuering/vnetsource | refs/heads/master | ts_emod2/utils/launch.py | 2 | from django.core.exceptions import PermissionDenied
from django.shortcuts import redirect
from vecnet.simulation import sim_model, sim_status
from data_services.data_api import EMODBaseline
from lib.templatetags.base_extras import set_notification
from sim_services import dispatcher
from data_services.models import DimUser, SimulationGroup, Simulation
from data_services.views.misc_functions import create_and_return_input_files
from ts_emod2.models import Scenario
import json
def launch_scenario(request, scenario_id):
dim_user = DimUser.objects.get(username=request.user.username)
scenario = Scenario.objects.get(id=scenario_id)
config_json = json.loads(scenario.config_file.get_contents())
print config_json['parameters']['Land_Temperature_Filename']
print config_json['parameters']['Rainfall_Filename']
print config_json['parameters']['Relative_Humidity_Filename']
print config_json['parameters']['Air_Temperature_Filename']
print config_json['parameters']['Campaign_Filename']
print config_json['parameters']['Demographics_Filename']
# config_json['parameters']['Land_Temperature_Filename'] = 'temperature.bin'
# config_json['parameters']['Rainfall_Filename'] = 'rainfall.bin'
# config_json['parameters']['Relative_Humidity_Filename'] = 'humidity.bin'
# config_json['parameters']['Air_Temperature_Filename'] = 'temperature.bin'
# config_json['parameters']['Campaign_Filename'] = 'campaign.json'
# config_json['parameters']['Demographics_Filename'] = 'demographics.compiled.json'
config_json['parameters']['Simulation_Duration'] = 100
try:
scenario.set_file_by_type('config', json.dumps(config_json))
except RuntimeError as error:
set_notification('alert-error', '<strong>Error!</strong> ' + str(error), request.session)
return redirect("ts_emod2.details", scenario_id=scenario_id)
submit(request, sim_model.EMOD, scenario_id)
set_notification('alert-success', '<strong>Success!</strong> Job submitted.', request.session)
return redirect("ts_emod2.details", scenario_id=scenario_id)
def submit(request, model, scenario_id):
dim_user = DimUser.objects.get(username=request.user.username)
scenario = Scenario.objects.get(id=scenario_id)
simulation = scenario.simulation
simulation_group = simulation.group
# Check if this is the right user for this scenario
if scenario.user != dim_user:
raise PermissionDenied
dispatcher.submit(simulation_group)
def add_simulation(dim_user, model, version, simulation_group, baseline_id, input_file_metadata=None):
assert isinstance(simulation_group, SimulationGroup)
emod_scenario = EMODBaseline.from_dw(id=baseline_id)
# Check if this is the right user for this scenario. All three should be the same. If the dim_scenario user
# and the simulation_group user are the same, and if user coming in is the same dis_scenario user, then all
# three are the same user.
if emod_scenario.user != simulation_group.submitted_by or dim_user != emod_scenario.user:
raise PermissionDenied
# Create simulation
simulation = Simulation.objects.create(
group=simulation_group,
model=model,
version=version,
status=sim_status.READY_TO_RUN
)
# Create input files and put them into a list
simulation_input_files = create_and_return_input_files(dim_user, emod_scenario)
# Add simulation input files to simulation
for i in range(len(simulation_input_files)):
simulation.input_files.add(simulation_input_files[i])
simulation.save()
return simulation |
sixninetynine/pex | refs/heads/master | pex/resolver.py | 1 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import print_function
import itertools
import os
import shutil
import time
from collections import namedtuple
from pkg_resources import safe_name
from .common import safe_mkdir
from .fetcher import Fetcher
from .interpreter import PythonInterpreter
from .iterator import Iterator, IteratorInterface
from .orderedset import OrderedSet
from .package import Package, distribution_compatible
from .platforms import Platform
from .resolvable import ResolvableRequirement, resolvables_from_iterable
from .resolver_options import ResolverOptionsBuilder
from .tracer import TRACER
from .util import DistributionHelper
class Untranslateable(Exception):
pass
class Unsatisfiable(Exception):
pass
class StaticIterator(IteratorInterface):
"""An iterator that iterates over a static list of packages."""
def __init__(self, packages, allow_prereleases=None):
self._packages = packages
self._allow_prereleases = allow_prereleases
def iter(self, req):
for package in self._packages:
if package.satisfies(req, allow_prereleases=self._allow_prereleases):
yield package
class _ResolvedPackages(namedtuple('_ResolvedPackages',
'resolvable packages parent constraint_only')):
@classmethod
def empty(cls):
return cls(None, OrderedSet(), None, False)
def merge(self, other):
if other.resolvable is None:
return _ResolvedPackages(self.resolvable, self.packages, self.parent, self.constraint_only)
return _ResolvedPackages(
self.resolvable,
self.packages & other.packages,
self.parent,
self.constraint_only and other.constraint_only)
class _ResolvableSet(object):
@classmethod
def normalize(cls, name):
return safe_name(name).lower()
def __init__(self, tuples=None):
# A list of _ResolvedPackages
self.__tuples = tuples or []
def _collapse(self):
# Collapse all resolvables by name along with the intersection of all compatible packages.
# If the set of compatible packages is the empty set, then we cannot satisfy all the
# specifications for a particular name (e.g. "setuptools==2.2 setuptools>4".)
#
# We need to return the resolvable since it carries its own network context and configuration
# regarding package precedence. This is arbitrary -- we could just as easily say "last
# resolvable wins" but it seems highly unlikely this will materially affect anybody
# adversely but could be the source of subtle resolution quirks.
resolvables = {}
for resolved_packages in self.__tuples:
key = self.normalize(resolved_packages.resolvable.name)
previous = resolvables.get(key, _ResolvedPackages.empty())
if previous.resolvable is None:
resolvables[key] = resolved_packages
else:
resolvables[key] = previous.merge(resolved_packages)
return resolvables
def _synthesize_parents(self, name):
def render_resolvable(resolved_packages):
return '%s%s' % (
str(resolved_packages.resolvable),
'(from: %s)' % resolved_packages.parent if resolved_packages.parent else '')
return ', '.join(
render_resolvable(resolved_packages) for resolved_packages in self.__tuples
if self.normalize(resolved_packages.resolvable.name) == self.normalize(name))
def _check(self):
# Check whether or not the resolvables in this set are satisfiable, raise an exception if not.
for name, resolved_packages in self._collapse().items():
if not resolved_packages.packages:
raise Unsatisfiable('Could not satisfy all requirements for %s:\n %s' % (
resolved_packages.resolvable, self._synthesize_parents(name)))
def merge(self, resolvable, packages, parent=None):
"""Add a resolvable and its resolved packages."""
self.__tuples.append(_ResolvedPackages(resolvable, OrderedSet(packages),
parent, resolvable.is_constraint))
self._check()
def get(self, name):
"""Get the set of compatible packages given a resolvable name."""
resolvable, packages, parent, constraint_only = self._collapse().get(
self.normalize(name), _ResolvedPackages.empty())
return packages
def packages(self):
"""Return a snapshot of resolvable => compatible packages set from the resolvable set."""
return list(self._collapse().values())
def extras(self, name):
return set.union(
*[set(tup.resolvable.extras()) for tup in self.__tuples
if self.normalize(tup.resolvable.name) == self.normalize(name)])
def replace_built(self, built_packages):
"""Return a copy of this resolvable set but with built packages.
:param dict built_packages: A mapping from a resolved package to its locally built package.
:returns: A new resolvable set with built package replacements made.
"""
def map_packages(resolved_packages):
packages = OrderedSet(built_packages.get(p, p) for p in resolved_packages.packages)
return _ResolvedPackages(resolved_packages.resolvable, packages,
resolved_packages.parent, resolved_packages.constraint_only)
return _ResolvableSet([map_packages(rp) for rp in self.__tuples])
class Resolver(object):
"""Interface for resolving resolvable entities into python packages."""
class Error(Exception): pass
@classmethod
def filter_packages_by_interpreter(cls, packages, interpreter, platform):
return [package for package in packages
if package.compatible(interpreter.identity, platform)]
def __init__(self, allow_prereleases=None, interpreter=None, platform=None):
self._interpreter = interpreter or PythonInterpreter.get()
self._platform = platform or Platform.current()
self._allow_prereleases = allow_prereleases
def package_iterator(self, resolvable, existing=None):
if existing:
existing = resolvable.compatible(
StaticIterator(existing, allow_prereleases=self._allow_prereleases))
else:
existing = resolvable.packages()
return self.filter_packages_by_interpreter(existing, self._interpreter, self._platform)
def build(self, package, options):
context = options.get_context()
translator = options.get_translator(self._interpreter, self._platform)
with TRACER.timed('Fetching %s' % package.url, V=2):
local_package = Package.from_href(context.fetch(package))
if local_package is None:
raise Untranslateable('Could not fetch package %s' % package)
with TRACER.timed('Translating %s into distribution' % local_package.local_path, V=2):
dist = translator.translate(local_package)
if dist is None:
raise Untranslateable('Package %s is not translateable by %s' % (package, translator))
if not distribution_compatible(dist, self._interpreter, self._platform):
raise Untranslateable(
'Could not get distribution for %s on platform %s.' % (package, self._platform))
return dist
def resolve(self, resolvables, resolvable_set=None):
resolvables = [(resolvable, None) for resolvable in resolvables]
resolvable_set = resolvable_set or _ResolvableSet()
processed_resolvables = set()
processed_packages = {}
distributions = {}
while resolvables:
while resolvables:
resolvable, parent = resolvables.pop(0)
if resolvable in processed_resolvables:
continue
packages = self.package_iterator(resolvable, existing=resolvable_set.get(resolvable.name))
resolvable_set.merge(resolvable, packages, parent)
processed_resolvables.add(resolvable)
built_packages = {}
for resolvable, packages, parent, constraint_only in resolvable_set.packages():
if constraint_only:
continue
assert len(packages) > 0, 'ResolvableSet.packages(%s) should not be empty' % resolvable
package = next(iter(packages))
if resolvable.name in processed_packages:
if package == processed_packages[resolvable.name]:
continue
if package not in distributions:
dist = self.build(package, resolvable.options)
built_package = Package.from_href(dist.location)
built_packages[package] = built_package
distributions[built_package] = dist
package = built_package
distribution = distributions[package]
processed_packages[resolvable.name] = package
new_parent = '%s->%s' % (parent, resolvable) if parent else str(resolvable)
resolvables.extend(
(ResolvableRequirement(req, resolvable.options), new_parent) for req in
distribution.requires(extras=resolvable_set.extras(resolvable.name)))
resolvable_set = resolvable_set.replace_built(built_packages)
# We may have built multiple distributions depending upon if we found transitive dependencies
# for the same package. But ultimately, resolvable_set.packages() contains the correct version
# for all packages. So loop through it and only return the package version in
# resolvable_set.packages() that is found in distributions.
dists = []
# No point in proceeding if distributions is empty
if not distributions:
return dists
for resolvable, packages, parent, constraint_only in resolvable_set.packages():
if constraint_only:
continue
assert len(packages) > 0, 'ResolvableSet.packages(%s) should not be empty' % resolvable
package = next(iter(packages))
dists.append(distributions[package])
return dists
class CachingResolver(Resolver):
"""A package resolver implementing a package cache."""
@classmethod
def filter_packages_by_ttl(cls, packages, ttl, now=None):
now = now if now is not None else time.time()
return [package for package in packages
if package.remote or package.local and (now - os.path.getmtime(package.local_path)) < ttl]
def __init__(self, cache, cache_ttl, *args, **kw):
self.__cache = cache
self.__cache_ttl = cache_ttl
safe_mkdir(self.__cache)
super(CachingResolver, self).__init__(*args, **kw)
# Short-circuiting package iterator.
def package_iterator(self, resolvable, existing=None):
iterator = Iterator(fetchers=[Fetcher([self.__cache])],
allow_prereleases=self._allow_prereleases)
packages = self.filter_packages_by_interpreter(
resolvable.compatible(iterator),
self._interpreter,
self._platform
)
if packages and self.__cache_ttl:
packages = self.filter_packages_by_ttl(packages, self.__cache_ttl)
return itertools.chain(
packages,
super(CachingResolver, self).package_iterator(resolvable, existing=existing)
)
# Caching sandwich.
def build(self, package, options):
# cache package locally
if package.remote:
package = Package.from_href(options.get_context().fetch(package, into=self.__cache))
os.utime(package.local_path, None)
# build into distribution
dist = super(CachingResolver, self).build(package, options)
# if distribution is not in cache, copy
target = os.path.join(self.__cache, os.path.basename(dist.location))
if not os.path.exists(target):
shutil.copyfile(dist.location, target + '~')
os.rename(target + '~', target)
os.utime(target, None)
return DistributionHelper.distribution_from_path(target)
def resolve(requirements,
fetchers=None,
interpreter=None,
platform=None,
context=None,
precedence=None,
cache=None,
cache_ttl=None,
allow_prereleases=None):
"""Produce all distributions needed to (recursively) meet `requirements`
:param requirements: An iterator of Requirement-like things, either
:class:`pkg_resources.Requirement` objects or requirement strings.
:keyword fetchers: (optional) A list of :class:`Fetcher` objects for locating packages. If
unspecified, the default is to look for packages on PyPI.
:keyword interpreter: (optional) A :class:`PythonInterpreter` object to use for building
distributions and for testing distribution compatibility.
:keyword platform: (optional) A PEP425-compatible platform string to use for filtering
compatible distributions. If unspecified, the current platform is used, as determined by
`Platform.current()`.
:keyword context: (optional) A :class:`Context` object to use for network access. If
unspecified, the resolver will attempt to use the best available network context.
:keyword precedence: (optional) An ordered list of allowable :class:`Package` classes
to be used for producing distributions. For example, if precedence is supplied as
``(WheelPackage, SourcePackage)``, wheels will be preferred over building from source, and
eggs will not be used at all. If ``(WheelPackage, EggPackage)`` is suppplied, both wheels and
eggs will be used, but the resolver will not resort to building anything from source.
:keyword cache: (optional) A directory to use to cache distributions locally.
:keyword cache_ttl: (optional integer in seconds) If specified, consider non-exact matches when
resolving requirements. For example, if ``setuptools==2.2`` is specified and setuptools 2.2 is
available in the cache, it will always be used. However, if a non-exact requirement such as
``setuptools>=2,<3`` is specified and there exists a setuptools distribution newer than
cache_ttl seconds that satisfies the requirement, then it will be used. If the distribution
is older than cache_ttl seconds, it will be ignored. If ``cache_ttl`` is not specified,
resolving inexact requirements will always result in making network calls through the
``context``.
:keyword allow_prereleases: (optional) Include pre-release and development versions. If
unspecified only stable versions will be resolved, unless explicitly included.
:returns: List of :class:`pkg_resources.Distribution` instances meeting ``requirements``.
:raises Unsatisfiable: If ``requirements`` is not transitively satisfiable.
:raises Untranslateable: If no compatible distributions could be acquired for
a particular requirement.
This method improves upon the setuptools dependency resolution algorithm by maintaining sets of
all compatible distributions encountered for each requirement rather than the single best
distribution encountered for each requirement. This prevents situations where ``tornado`` and
``tornado==2.0`` could be treated as incompatible with each other because the "best
distribution" when encountering ``tornado`` was tornado 3.0. Instead, ``resolve`` maintains the
set of compatible distributions for each requirement as it is encountered, and iteratively filters
the set. If the set of distributions ever becomes empty, then ``Unsatisfiable`` is raised.
.. versionchanged:: 0.8
A number of keywords were added to make requirement resolution slightly easier to configure.
The optional ``obtainer`` keyword was replaced by ``fetchers``, ``translator``, ``context``,
``threads``, ``precedence``, ``cache`` and ``cache_ttl``, also all optional keywords.
.. versionchanged:: 1.0
The ``translator`` and ``threads`` keywords have been removed. The choice of threading
policy is now implicit. The choice of translation policy is dictated by ``precedence``
directly.
.. versionchanged:: 1.0
``resolver`` is now just a wrapper around the :class:`Resolver` and :class:`CachingResolver`
classes.
"""
builder = ResolverOptionsBuilder(fetchers=fetchers,
allow_prereleases=allow_prereleases,
precedence=precedence,
context=context)
if cache:
resolver = CachingResolver(cache,
cache_ttl,
allow_prereleases=allow_prereleases,
interpreter=interpreter,
platform=platform)
else:
resolver = Resolver(allow_prereleases=allow_prereleases,
interpreter=interpreter,
platform=platform)
return resolver.resolve(resolvables_from_iterable(requirements, builder))
def resolve_multi(requirements,
fetchers=None,
interpreters=None,
platforms=None,
context=None,
precedence=None,
cache=None,
cache_ttl=None,
allow_prereleases=None):
"""A generator function that produces all distributions needed to meet `requirements`
for multiple interpreters and/or platforms.
:param requirements: An iterator of Requirement-like things, either
:class:`pkg_resources.Requirement` objects or requirement strings.
:keyword fetchers: (optional) A list of :class:`Fetcher` objects for locating packages. If
unspecified, the default is to look for packages on PyPI.
:keyword interpreters: (optional) An iterable of :class:`PythonInterpreter` objects to use
for building distributions and for testing distribution compatibility.
:keyword platforms: (optional) An iterable of PEP425-compatible platform strings to use for
filtering compatible distributions. If unspecified, the current platform is used, as
determined by `Platform.current()`.
:keyword context: (optional) A :class:`Context` object to use for network access. If
unspecified, the resolver will attempt to use the best available network context.
:keyword precedence: (optional) An ordered list of allowable :class:`Package` classes
to be used for producing distributions. For example, if precedence is supplied as
``(WheelPackage, SourcePackage)``, wheels will be preferred over building from source, and
eggs will not be used at all. If ``(WheelPackage, EggPackage)`` is suppplied, both wheels and
eggs will be used, but the resolver will not resort to building anything from source.
:keyword cache: (optional) A directory to use to cache distributions locally.
:keyword cache_ttl: (optional integer in seconds) If specified, consider non-exact matches when
resolving requirements. For example, if ``setuptools==2.2`` is specified and setuptools 2.2 is
available in the cache, it will always be used. However, if a non-exact requirement such as
``setuptools>=2,<3`` is specified and there exists a setuptools distribution newer than
cache_ttl seconds that satisfies the requirement, then it will be used. If the distribution
is older than cache_ttl seconds, it will be ignored. If ``cache_ttl`` is not specified,
resolving inexact requirements will always result in making network calls through the
``context``.
:keyword allow_prereleases: (optional) Include pre-release and development versions. If
unspecified only stable versions will be resolved, unless explicitly included.
:yields: All :class:`pkg_resources.Distribution` instances meeting ``requirements``.
:raises Unsatisfiable: If ``requirements`` is not transitively satisfiable.
:raises Untranslateable: If no compatible distributions could be acquired for
a particular requirement.
"""
interpreters = interpreters or [PythonInterpreter.get()]
platforms = platforms or [Platform.current()]
seen = set()
for interpreter in interpreters:
for platform in platforms:
for resolvable in resolve(requirements,
fetchers,
interpreter,
platform,
context,
precedence,
cache,
cache_ttl,
allow_prereleases):
if resolvable not in seen:
seen.add(resolvable)
yield resolvable
|
seniorivn/python_contact_book | refs/heads/master | contact.py | 1 | import sqlite3
from datetime import date
from time import strptime
class Contact(object):
"""class of contact with fields id,fname,lname,mname,phone,bday"""
_cid = ""
_fname = ""
_lname = ""
_mname = ""
_phone = ""
_bday = ""
bday_types=["%d/%m/%Y","%d/%m/%y"]
def __init__(self, *tupl):
if len(tupl)==5:
if tupl[0]:
self.fname(tupl[0])
if tupl[1]:
self.mname(tupl[0])
if tupl[2]:
self.lname(tupl[0])
if tupl[3]:
self.self.phone(tupl[0])
if tupl[4]:
self.bday(tupl[0])
else:
self.fname = ""
self.lname = ""
self.mname = ""
self.phone = ""
self.bday = ""
@property
def cid(self):
return self._cid
@property
def fname(self):
return self._fname
@property
def lname(self):
return self._lname
@property
def mname(self):
return self._mname
@property
def phone(self):
return self._phone
@property
def bday(self):
return self._bday
@cid.setter
def cid(self, integer):
if integer:
try:
self._cid=int(integer)
except Exception as e:
raise TypeError("Error: cid should be integer")
@fname.setter
def fname(self, string):
if string:
self._fname=string
@lname.setter
def lname(self, string):
if string:
self._lname=string
@mname.setter
def mname(self, string):
if string:
self._mname=string
@phone.setter
def phone(self, string):
if string:
self._phone=string
@bday.setter
def bday(self, string):
if string:
self.set_bday(string)
def set_cid(self, integer):
self.cid=integer
def set_fname(self, string):
self.fname=string
def set_lname(self, string):
self.lname=string
def set_mname(self, string):
self.mname=string
def set_phone(self, string):
self.phone=string
def set_bday(self, string):
if string == "":
return
for i in " .-_":
string = string.replace(i,'/')
types = self.bday_types
for t in types:
try:
struct=strptime(string, t)
self._bday=str(struct.tm_mday) + "/" + str(struct.tm_mon) + "/" +str(struct.tm_year)
return
except ValueError as e:
ex=e
# return False
raise Exception("incorrect date format"+str(ex))
def get_tuple(self):
return (self.cid, self.fname, self.lname, self.mname, self.phone, self.bday)
def __str__(self):
fname = " first name="+self.fname if self.fname else ""
lname = " last name="+self.lname if self.lname else ""
mname = " middle name="+self.mname if self.mname else ""
phone = " phone="+self.phone if self.phone else ""
bday = " birthday date="+self.bday if self.bday else ""
return fname+mname+lname+phone+bday
def __repr__(self):
return self.__str__()
def __iter__(self):
return contactIter(self)
@staticmethod
def setcontact(contact, c):
"""set contact by id"""
if contact.cid:
if contact.fname:
c.execute("UPDATE `contacts` SET `fname`=? WHERE `_rowid_`=?;",(contact.fname,contact.cid))
if contact.lname:
c.execute("UPDATE `contacts` SET `lname`=? WHERE `_rowid_`=?;",(contact.lname,contact.cid))
if contact.mname:
c.execute("UPDATE `contacts` SET `mname`=? WHERE `_rowid_`=?;",(contact.mname,contact.cid))
if contact.phone:
c.execute("UPDATE `contacts` SET `phone`=? WHERE `_rowid_`=?;",(contact.phone,contact.cid))
if contact.bday:
c.execute("UPDATE `contacts` SET `bday`=? WHERE `_rowid_`=?;",(contact.bday,contact.cid))
return True
else:
return False
@staticmethod
def add(contact, c, args):
"""add contact method"""
if args:
replace=args["--replace"]
string = ""
msk = ""
tup = []
if contact.fname:
tup.append(contact.fname)
string += "fname,"
msk += "?,"
if contact.lname:
tup.append(contact.lname)
string += "lname,"
msk += "?,"
if contact.mname:
tup.append(contact.mname)
string += "mname,"
msk += "?,"
if contact.phone:
tup.append(contact.phone)
string += "phone,"
msk += "?,"
if contact.bday:
tup.append(contact.bday)
string += "bday,"
msk += "?,"
string = string[:-1]
msk = msk[:-1]
if string:
finded = contact.find(contact, c)
if not finded:
if contact.phone:
cnt = Contact()
cnt.phone = contact.phone
phone_finded=contact.find(cnt, c)
if phone_finded:
if replace:
phone_contact=phone_finded[0]
contact.cid=phone_contact[0]
contact.setcontact(contact, c)
return True, True, "Contact with this phone="+contact.phone+" replaced"
else:
return False, True, "Contact with this phone="+contact.phone+" already exist"
c.execute('insert into contacts('+string+') VALUES ('+msk+')', tuple(tup))
return True, False, "Contact was added"
else:
return False, True, "This contact already exist"
else:
return False, False, "there is empty contact"
@staticmethod
def find( contact, c):
"""find contact method"""
string1 = "select id, fname, lname, mname, phone, bday from contacts "
string = ""
if contact.cid:
string+=" id='" + str(contact.cid) + "' and "
if contact.fname:
string+=" fname='" + contact.fname + "' and "
if contact.lname:
string+=" lname='" + contact.lname + "' and "
if contact.mname:
string+=" mname='" + contact.mname + "' and "
if contact.phone:
string+=" phone='" + contact.phone + "' and "
if contact.bday:
string+=" bday='" + contact.bday + "' and "
string = string[:-4]
if string != "":
string = string1 + " where " + string
else:
string = string1
# print(string)
rows = []
for row in c.execute(string):
cont=eval(str(row))
rows.append(cont)
return tuple(rows)
@staticmethod
def lst( args, c):
"""list all contacts method"""
if args and args["--sort"]:
ex='select id, fname, lname, mname, phone, bday from contacts order by ' + args["--sort"]
else:
ex='select id, fname, lname, mname, phone, bday from contacts '
if args and args["--reverse"]:
ex+=" desc"
try:
result = []
for row in c.execute(ex):
cont=eval(str(row))
result.append(cont)
return tuple(result)
except sqlite3.Error as e:
print("there is no column:" + args["--sort"])
raise
@staticmethod
def delete(contact, c):
"""delete contacts"""
string1 = "select id, fname, lname, mname, phone, bday from contacts where"
string = ""
if contact.cid:
string+=" id='" + str(contact.cid) + "' and "
if contact.fname:
string+=" fname='" + contact.fname + "' and "
if contact.lname:
string+=" lname='" + contact.lname + "' and "
if contact.mname:
string+=" mname='" + contact.mname + "' and "
if contact.phone:
string+=" phone='" + contact.phone + "' and "
if contact.bday:
string+=" bday='" + contact.bday + "' and "
string = string[:-4]
if string == "":
return False, "empty contact can't be deleted"
try:
lst=c.execute(string1 + string).fetchall()
if lst:
c.execute("delete from contacts where" + string)
return lst, "contact(s) deleted"
else:
return False, "there is no contact"
except sqlite3.Error as e:
return False, "there is no contact=" + contact + "in the database"
@staticmethod
def reminder(c):
"""remind about birthdays in this or next month"""
today = date.today()
today = str(today.day)+"/"+str(today.month)+"/"+str(today.year)
contacts=[]
for row in c.execute("select id, fname, lname, mname, phone, bday from contacts"):
contact=Contact()
contact.cid=row[0]
contact.fname=row[1]
contact.lname=row[2]
contact.mname=row[3]
contact.phone=row[4]
contact.bday=row[5]
if contact.bday and contact.monthdelta(today,contact.bday):
contacts.append(contact)
return contacts
@staticmethod
def monthdelta(date1,date2):
"""let birthdays delta"""
day1, month1, year1 = date1.split("/")
day2, month2, year2 = date2.split("/")
mdelta=int(month2) - int(month1)
ddelta=int(day2) - int(day1)
if mdelta == 0 and ddelta >= 0:
return True
elif 0 < mdelta < 2:
return True
return False
class contactIter(object):
"""Contact Iterator"""
def __init__(self, contact):
self.lst = contact.get_tuple()
self.i = -1
def __iter__(self):
return self
def __next__(self):
if self.i<len(self.lst)-1:
self.i += 1
return self.lst[self.i]
else:
raise StopIteration
|
bear/circleci-nginx-proxy | refs/heads/master | foob/settings.py | 1 | # -*- coding: utf-8 -*-
"""
:copyright: (c) 2016 by Mike Taylor
:license: CC0 1.0 Universal, see LICENSE for more details.
"""
import os
_cwd = os.path.dirname(os.path.abspath(__file__))
class Config(object):
SECRET_KEY = "foob"
TEMPLATES = os.path.join(_cwd, 'templates')
class ProdConfig(Config):
ENV = 'prod'
DEBUG = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
class DevConfig(Config):
ENV = 'dev'
DEBUG = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
class TestConfig(Config):
ENV = 'test'
DEBUG = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
|
neavouli/yournextrepresentative | refs/heads/release-neavouli | candidates/tests/test_feeds.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django_webtest import WebTest
from popolo.models import Person
from .auth import TestUserMixin
from ..models import LoggedAction
class TestFeeds(TestUserMixin, WebTest):
def setUp(self):
self.person1 = Person.objects.create(
name='Test Person1'
)
self.person2 = Person.objects.create(
name='Test Person2'
)
self.action1 = LoggedAction.objects.create(
user=self.user,
action_type='person-create',
ip_address='127.0.0.1',
person=self.person1,
popit_person_new_version='1234567890abcdef',
source='Just for tests...',
)
self.action2 = LoggedAction.objects.create(
user=self.user,
action_type='candidacy-delete',
ip_address='127.0.0.1',
person=self.person2,
popit_person_new_version='987654321',
source='Something with unicode in it…',
)
def test_unicode(self):
response = self.app.get('/feeds/changes.xml')
self.assertTrue("Just for tests..." in response)
self.assertTrue("Something with unicode in it…" in response)
def tearDown(self):
self.action2.delete()
self.action1.delete()
self.person2.delete()
self.person1.delete()
|
BCriswell/crud-fusion | refs/heads/master | config/settings/production.py | 1 | # -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use mailgun to send emails
- Use Redis on Heroku
'''
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# django-secure
# ------------------------------------------------------------------------------
INSTALLED_APPS += ("djangosecure", )
SECURITY_MIDDLEWARE = (
'djangosecure.middleware.SecurityMiddleware',
)
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
MIDDLEWARE_CLASSES = SECURITY_MIDDLEWARE + MIDDLEWARE_CLASSES
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['example.com'])
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
AWS_S3_CALLING_FORMAT = OrdinaryCallingFormat()
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# Static Assets
# ------------------------
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='crud_fusion <[email protected]>')
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY')
MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[crud_fusion] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db("DATABASE_URL")
# CACHING
# ------------------------------------------------------------------------------
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "{0}/{1}".format(env.cache_url('REDIS_URL', default="redis://127.0.0.1:6379"), 0),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"IGNORE_EXCEPTIONS": True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'mail_admins'],
'propagate': True
}
}
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
# Your production stuff: Below this line define 3rd party library settings
|
ibelem/crosswalk-test-suite | refs/heads/master | webapi/tct-csp-w3c-tests/csp-py/csp_object-src_none_blocked_int-manual.py | 30 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
_CSP = "object-src 'none'"
response.headers.set("Content-Security-Policy", _CSP)
response.headers.set("X-Content-Security-Policy", _CSP)
response.headers.set("X-WebKit-CSP", _CSP)
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <[email protected]>
-->
<html>
<head>
<title>CSP Test: csp_object-src_none_blocked_int</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#object-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="object-src 'none'"/>
<meta charset="utf-8"/>
</head>
<body>
<p>Test passes if there is <strong>no red</strong>.</p>
<object data="support/red-100x100.png"/>
</body>
</html> """
|
CSC301H-Fall2013/JuakStore | refs/heads/master | site-packages/django/conf/locale/pt_BR/formats.py | 107 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \d\e F \d\e Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \d\e Y'
MONTH_DAY_FORMAT = r'j \d\e F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
)
DATETIME_INPUT_FORMATS = (
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
consulo/consulo-python | refs/heads/master | plugin/src/test/resources/refactoring/inlinelocal/multiple.after.py | 4 | aoo = 10 + 10
boo = 10 + 10
coo = 10 + 10
doo = 10 + 10
eoo = 10 + 10
goo = 10 + 10
hoo = 10 + 10
ioo = 10 + 10
joo = 10 + 10
koo = 10 + 10
loo = 10 + 10
moo = 10 + 10
noo = 10 + 10
ooo = 10 + 10
poo = 10 + 10
qoo = 10 + 10
roo = 10 + 10
soo = 10 + 10
too = 10 + 10
uoo = 10 + 10
voo = 10 + 10
woo = 10 + 10
xoo = 10 + 10
yoo = 10 + 10
zoo = 10 + 10 |
technologiescollege/s2a_fr | refs/heads/portable | s2a/Python/Lib/test/test_grp.py | 39 | """Test script for the grp module."""
import unittest
from test import test_support
grp = test_support.import_module('grp')
class GroupDatabaseTestCase(unittest.TestCase):
def check_value(self, value):
# check that a grp tuple has the entries and
# attributes promised by the docs
self.assertEqual(len(value), 4)
self.assertEqual(value[0], value.gr_name)
self.assertIsInstance(value.gr_name, basestring)
self.assertEqual(value[1], value.gr_passwd)
self.assertIsInstance(value.gr_passwd, basestring)
self.assertEqual(value[2], value.gr_gid)
self.assertIsInstance(value.gr_gid, (long, int))
self.assertEqual(value[3], value.gr_mem)
self.assertIsInstance(value.gr_mem, list)
def test_values(self):
entries = grp.getgrall()
for e in entries:
self.check_value(e)
if len(entries) > 1000: # Huge group file (NIS?) -- skip the rest
return
for e in entries:
e2 = grp.getgrgid(e.gr_gid)
self.check_value(e2)
self.assertEqual(e2.gr_gid, e.gr_gid)
name = e.gr_name
if name.startswith('+') or name.startswith('-'):
# NIS-related entry
continue
e2 = grp.getgrnam(name)
self.check_value(e2)
# There are instances where getgrall() returns group names in
# lowercase while getgrgid() returns proper casing.
# Discovered on Ubuntu 5.04 (custom).
self.assertEqual(e2.gr_name.lower(), name.lower())
def test_errors(self):
self.assertRaises(TypeError, grp.getgrgid)
self.assertRaises(TypeError, grp.getgrnam)
self.assertRaises(TypeError, grp.getgrall, 42)
# try to get some errors
bynames = {}
bygids = {}
for (n, p, g, mem) in grp.getgrall():
if not n or n == '+':
continue # skip NIS entries etc.
bynames[n] = g
bygids[g] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while fakename in bynames:
chars = list(fakename)
for i in xrange(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = ''.join(chars)
self.assertRaises(KeyError, grp.getgrnam, fakename)
# Choose a non-existent gid.
fakegid = 4127
while fakegid in bygids:
fakegid = (fakegid * 3) % 0x10000
self.assertRaises(KeyError, grp.getgrgid, fakegid)
def test_main():
test_support.run_unittest(GroupDatabaseTestCase)
if __name__ == "__main__":
test_main()
|
lthall/Leonard_ardupilot | refs/heads/master | Tools/autotest/examples.py | 14 | #!/usr/bin/env python
"""
Contains functions used to test the ArduPilot examples
AP_FLAKE8_CLEAN
"""
from __future__ import print_function
import os
import pexpect
import signal
import subprocess
import time
from pysim import util
def run_example(filepath, valgrind=False, gdb=False):
cmd = []
if valgrind:
cmd.append("valgrind")
if gdb:
cmd.append("gdb")
cmd.append(filepath)
print("Running: (%s)" % str(cmd))
bob = subprocess.Popen(cmd, stdin=None, close_fds=True)
retcode = bob.poll()
time.sleep(10)
print("pre-kill retcode: %s" % str(retcode))
if retcode is not None:
raise ValueError("Process exited before I could kill it (%s)" % str(retcode))
bob.send_signal(signal.SIGTERM)
time.sleep(1)
retcode = bob.poll()
print("retcode: %s" % str(retcode))
if retcode is None:
# if we get this far then we're not going to get a gcda file
# out of this process for coverage analysis; it has to exit
# normally, and it hasn't responded to a TERM.
bob.kill()
retcode2 = bob.wait()
print("retcode2: %s" % str(retcode2))
elif retcode == -15:
print("process exited with -15, indicating it didn't catch the TERM signal and exit properly")
elif retcode != 0:
# note that process could exit with code 0 and we couldn't tell...
raise ValueError("Process exitted with non-zero exitcode %s" % str(retcode))
print("Ran: (%s)" % str(cmd))
def run_examples(debug=False, valgrind=False, gdb=False):
dirpath = util.reltopdir(os.path.join('build', 'linux', 'examples'))
print("Running Hello")
# explicitly run helloworld and check for output
hello_path = os.path.join(dirpath, "Hello")
p = pexpect.spawn(hello_path, ["Hello"])
ex = None
try:
p.expect("hello world", timeout=5)
except pexpect.TIMEOUT as e:
ex = e
print("ran Hello")
p.close()
if ex is not None:
raise ex
skip = {
"BARO_generic": "Most linux computers don't have baros...",
"RCProtocolDecoder": "This assumes specific hardware is connected",
"FlashTest": "https://github.com/ArduPilot/ardupilot/issues/14168",
"UART_chargen": "This nuke the term",
}
for afile in os.listdir(dirpath):
if afile in skip:
print("Skipping %s: %s" % (afile, skip[afile]))
continue
filepath = os.path.join(dirpath, afile)
if not os.path.isfile(filepath):
continue
run_example(filepath, valgrind=valgrind, gdb=gdb)
return True
|
anhstudios/swganh | refs/heads/develop | data/scripts/templates/object/mobile/shared_dressed_criminal_organized_human_female_01.py | 2 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_criminal_organized_human_female_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_female")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
WillWeatherford/mars-rover | refs/heads/master | api/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
SmartcitySantiagoChile/onlineGPS | refs/heads/master | gpsmap/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
JSchwerberg/review | refs/heads/master | review/review/settings/test.py | 1 | from __future__ import absolute_import
from .base import *
########## TEST SETTINGS
TEST_RUNNER = 'discover_runner.DiscoverRunner'
TEST_DISCOVER_TOP_LEVEL = SITE_ROOT
TEST_DISCOVER_ROOT = SITE_ROOT
TEST_DISCOVER_PATTERN = "test_*.py"
########## IN-MEMORY TEST DATABASE
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
},
}
|
odahoda/noisicaa | refs/heads/master | noisicaa/builtin_nodes/step_sequencer/model_test.py | 1 | #!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
from typing import cast
from noisidev import unittest
from noisidev import unittest_mixins
from . import model
class StepSequencerTest(unittest_mixins.ProjectMixin, unittest.AsyncTestCase):
async def _add_node(self) -> model.StepSequencer:
with self.project.apply_mutations('test'):
return cast(
model.StepSequencer,
self.project.create_node('builtin://step-sequencer'))
async def test_add_node(self):
node = await self._add_node()
self.assertIsInstance(node, model.StepSequencer)
self.assertFalse(node.time_synched)
self.assertEqual(len(node.channels), 1)
self.assertEqual(len(node.channels[0].steps), node.num_steps)
async def test_set_num_steps_increase(self):
node = await self._add_node()
with self.project.apply_mutations('test'):
node.set_num_steps(13)
self.assertEqual(node.num_steps, 13)
self.assertEqual(len(node.channels[0].steps), 13)
async def test_set_num_steps_decrease(self):
node = await self._add_node()
with self.project.apply_mutations('test'):
node.set_num_steps(5)
self.assertEqual(node.num_steps, 5)
self.assertEqual(len(node.channels[0].steps), 5)
async def test_add_channel(self):
node = await self._add_node()
old_channel = node.channels[0]
with self.project.apply_mutations('test'):
node.create_channel(0)
self.assertIs(node.channels[1], old_channel)
self.assertEqual(len(node.channels), 2)
self.assertEqual(len(node.channels[0].steps), node.num_steps)
async def test_delete_channel(self):
node = await self._add_node()
old_channel = node.channels[0]
with self.project.apply_mutations('test'):
channel = node.create_channel(0)
with self.project.apply_mutations('test'):
node.delete_channel(channel)
self.assertEqual(len(node.channels), 1)
self.assertIs(node.channels[0], old_channel)
|
insequent/libcalico | refs/heads/master | calico_containers/tests/unit/test_handle.py | 3 | # Copyright (c) 2015-2016 Tigera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from netaddr import IPNetwork
from nose.tools import *
from mock import Mock
import unittest
import json
from pycalico.handle import (AllocationHandle, AddressCountTooLow)
from etcd import EtcdResult
class TestAllocationHandle(unittest.TestCase):
def test_to_json(self):
handle = AllocationHandle("test_id")
expected_json = {AllocationHandle.HANDLE_ID: "test_id",
AllocationHandle.BLOCK: {}}
json_str = handle.to_json()
json_result = json.loads(json_str)
assert_dict_equal(expected_json, json_result)
block_cidr = IPNetwork("10.11.12.0/24")
handle.increment_block(block_cidr, 5)
expected_json[AllocationHandle.BLOCK]["10.11.12.0/24"] = 5
json_str = handle.to_json()
json_result = json.loads(json_str)
assert_dict_equal(expected_json, json_result)
block_cidr2 = IPNetwork("10.11.45.0/24")
handle.increment_block(block_cidr2, 20)
expected_json[AllocationHandle.BLOCK]["10.11.45.0/24"] = 20
json_str = handle.to_json()
json_result = json.loads(json_str)
assert_dict_equal(expected_json, json_result)
def test_from_etcd_result(self):
block_dict = {
"10.23.24.0/24": 50,
"10.23.35.0/24": 60
}
json_dict = {
AllocationHandle.HANDLE_ID: "test_id2",
AllocationHandle.BLOCK: block_dict
}
m_result = Mock(spec=EtcdResult)
m_result.value = json.dumps(json_dict)
handle = AllocationHandle.from_etcd_result(m_result)
assert_dict_equal(block_dict, handle.block)
assert_equal(m_result, handle.db_result)
# Convert to JSON and back
m_result.value = handle.to_json()
handle2 = AllocationHandle.from_etcd_result(m_result)
assert_equal(block_dict, handle2.block)
def test_update_result(self):
block_dict = {
"10.23.24.0/24": 50,
"10.23.35.0/24": 60
}
json_dict = {
AllocationHandle.HANDLE_ID: "test_id2",
AllocationHandle.BLOCK: block_dict
}
m_result = Mock(spec=EtcdResult)
m_result.value = json.dumps(json_dict)
handle = AllocationHandle.from_etcd_result(m_result)
handle.decrement_block(IPNetwork("10.23.35.0/24"), 15)
result = handle.update_result()
assert_equal(result, m_result)
result_json = json.loads(result.value)
assert_equal(result_json[AllocationHandle.BLOCK]["10.23.35.0/24"],
45)
def test_inc_dec_block(self):
block = [IPNetwork("10.11.12.0/24"),
IPNetwork("2001:abcd:def0::/120"),
IPNetwork("192.168.1.0")]
handle = AllocationHandle("tst_id1")
result = handle.increment_block(block[0], 20)
assert_equal(result, 20)
result = handle.decrement_block(block[0], 15)
assert_equal(result, 5)
assert_raises(AddressCountTooLow,
handle.decrement_block, block[1], 1)
result = handle.increment_block(block[1], 1)
assert_equal(result, 1)
result = handle.increment_block(block[2], 10)
assert_equal(result, 10)
result = handle.decrement_block(block[1], 1)
assert_equal(result, 0)
assert_false(str(block[1]) in handle.block)
assert_raises(AddressCountTooLow,
handle.decrement_block, block[2], 11)
result = handle.decrement_block(block[2], 10)
assert_equal(result, 0)
assert_false(str(block[2]) in handle.block)
result = handle.decrement_block(block[0], 5)
assert_equal(result, 0)
assert_false(str(block[0]) in handle.block)
|
adomasalcore3/android_kernel_Vodafone_VDF600 | refs/heads/master | tools/perf/tests/attr.py | 3174 | #! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
saguziel/incubator-airflow | refs/heads/master | tests/executors/__init__.py | 44 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .dask_executor import *
|
cosmiclattes/TPBviz | refs/heads/master | torrent/lib/python2.7/posixpath.py | 4 | /usr/lib/python2.7/posixpath.py |
imsparsh/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/encodings/cp775.py | 272 | """ Python Character Mapping Codec cp775 generated from 'VENDORS/MICSFT/PC/CP775.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp775',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x0101, # LATIN SMALL LETTER A WITH MACRON
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x0113, # LATIN SMALL LETTER E WITH MACRON
0x008a: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
0x008b: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
0x008c: 0x012b, # LATIN SMALL LETTER I WITH MACRON
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x014d, # LATIN SMALL LETTER O WITH MACRON
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
0x0096: 0x00a2, # CENT SIGN
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x00a4, # CURRENCY SIGN
0x00a0: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
0x00a1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00a4: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00a5: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00a6: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00a7: 0x00a6, # BROKEN BAR
0x00a8: 0x00a9, # COPYRIGHT SIGN
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00b6: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00b7: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00b8: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
0x00be: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
0x00c7: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00d0: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00d1: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00d2: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00d3: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
0x00d4: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
0x00d5: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00d6: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
0x00d7: 0x016b, # LATIN SMALL LETTER U WITH MACRON
0x00d8: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
0x00e2: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e8: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
0x00e9: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
0x00ea: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
0x00eb: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
0x00ec: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
0x00ed: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
0x00ee: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
0x00ef: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\u0106' # 0x0080 -> LATIN CAPITAL LETTER C WITH ACUTE
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\u0101' # 0x0083 -> LATIN SMALL LETTER A WITH MACRON
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\u0123' # 0x0085 -> LATIN SMALL LETTER G WITH CEDILLA
'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
'\u0107' # 0x0087 -> LATIN SMALL LETTER C WITH ACUTE
'\u0142' # 0x0088 -> LATIN SMALL LETTER L WITH STROKE
'\u0113' # 0x0089 -> LATIN SMALL LETTER E WITH MACRON
'\u0156' # 0x008a -> LATIN CAPITAL LETTER R WITH CEDILLA
'\u0157' # 0x008b -> LATIN SMALL LETTER R WITH CEDILLA
'\u012b' # 0x008c -> LATIN SMALL LETTER I WITH MACRON
'\u0179' # 0x008d -> LATIN CAPITAL LETTER Z WITH ACUTE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
'\u014d' # 0x0093 -> LATIN SMALL LETTER O WITH MACRON
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\u0122' # 0x0095 -> LATIN CAPITAL LETTER G WITH CEDILLA
'\xa2' # 0x0096 -> CENT SIGN
'\u015a' # 0x0097 -> LATIN CAPITAL LETTER S WITH ACUTE
'\u015b' # 0x0098 -> LATIN SMALL LETTER S WITH ACUTE
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
'\xa3' # 0x009c -> POUND SIGN
'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
'\xd7' # 0x009e -> MULTIPLICATION SIGN
'\xa4' # 0x009f -> CURRENCY SIGN
'\u0100' # 0x00a0 -> LATIN CAPITAL LETTER A WITH MACRON
'\u012a' # 0x00a1 -> LATIN CAPITAL LETTER I WITH MACRON
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\u017b' # 0x00a3 -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
'\u017c' # 0x00a4 -> LATIN SMALL LETTER Z WITH DOT ABOVE
'\u017a' # 0x00a5 -> LATIN SMALL LETTER Z WITH ACUTE
'\u201d' # 0x00a6 -> RIGHT DOUBLE QUOTATION MARK
'\xa6' # 0x00a7 -> BROKEN BAR
'\xa9' # 0x00a8 -> COPYRIGHT SIGN
'\xae' # 0x00a9 -> REGISTERED SIGN
'\xac' # 0x00aa -> NOT SIGN
'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
'\u0141' # 0x00ad -> LATIN CAPITAL LETTER L WITH STROKE
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\u0104' # 0x00b5 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u010c' # 0x00b6 -> LATIN CAPITAL LETTER C WITH CARON
'\u0118' # 0x00b7 -> LATIN CAPITAL LETTER E WITH OGONEK
'\u0116' # 0x00b8 -> LATIN CAPITAL LETTER E WITH DOT ABOVE
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u012e' # 0x00bd -> LATIN CAPITAL LETTER I WITH OGONEK
'\u0160' # 0x00be -> LATIN CAPITAL LETTER S WITH CARON
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u0172' # 0x00c6 -> LATIN CAPITAL LETTER U WITH OGONEK
'\u016a' # 0x00c7 -> LATIN CAPITAL LETTER U WITH MACRON
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\u017d' # 0x00cf -> LATIN CAPITAL LETTER Z WITH CARON
'\u0105' # 0x00d0 -> LATIN SMALL LETTER A WITH OGONEK
'\u010d' # 0x00d1 -> LATIN SMALL LETTER C WITH CARON
'\u0119' # 0x00d2 -> LATIN SMALL LETTER E WITH OGONEK
'\u0117' # 0x00d3 -> LATIN SMALL LETTER E WITH DOT ABOVE
'\u012f' # 0x00d4 -> LATIN SMALL LETTER I WITH OGONEK
'\u0161' # 0x00d5 -> LATIN SMALL LETTER S WITH CARON
'\u0173' # 0x00d6 -> LATIN SMALL LETTER U WITH OGONEK
'\u016b' # 0x00d7 -> LATIN SMALL LETTER U WITH MACRON
'\u017e' # 0x00d8 -> LATIN SMALL LETTER Z WITH CARON
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u258c' # 0x00dd -> LEFT HALF BLOCK
'\u2590' # 0x00de -> RIGHT HALF BLOCK
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S (GERMAN)
'\u014c' # 0x00e2 -> LATIN CAPITAL LETTER O WITH MACRON
'\u0143' # 0x00e3 -> LATIN CAPITAL LETTER N WITH ACUTE
'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xb5' # 0x00e6 -> MICRO SIGN
'\u0144' # 0x00e7 -> LATIN SMALL LETTER N WITH ACUTE
'\u0136' # 0x00e8 -> LATIN CAPITAL LETTER K WITH CEDILLA
'\u0137' # 0x00e9 -> LATIN SMALL LETTER K WITH CEDILLA
'\u013b' # 0x00ea -> LATIN CAPITAL LETTER L WITH CEDILLA
'\u013c' # 0x00eb -> LATIN SMALL LETTER L WITH CEDILLA
'\u0146' # 0x00ec -> LATIN SMALL LETTER N WITH CEDILLA
'\u0112' # 0x00ed -> LATIN CAPITAL LETTER E WITH MACRON
'\u0145' # 0x00ee -> LATIN CAPITAL LETTER N WITH CEDILLA
'\u2019' # 0x00ef -> RIGHT SINGLE QUOTATION MARK
'\xad' # 0x00f0 -> SOFT HYPHEN
'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
'\u201c' # 0x00f2 -> LEFT DOUBLE QUOTATION MARK
'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
'\xb6' # 0x00f4 -> PILCROW SIGN
'\xa7' # 0x00f5 -> SECTION SIGN
'\xf7' # 0x00f6 -> DIVISION SIGN
'\u201e' # 0x00f7 -> DOUBLE LOW-9 QUOTATION MARK
'\xb0' # 0x00f8 -> DEGREE SIGN
'\u2219' # 0x00f9 -> BULLET OPERATOR
'\xb7' # 0x00fa -> MIDDLE DOT
'\xb9' # 0x00fb -> SUPERSCRIPT ONE
'\xb3' # 0x00fc -> SUPERSCRIPT THREE
'\xb2' # 0x00fd -> SUPERSCRIPT TWO
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a2: 0x0096, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x009f, # CURRENCY SIGN
0x00a6: 0x00a7, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a9: 0x00a8, # COPYRIGHT SIGN
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S (GERMAN)
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x0100: 0x00a0, # LATIN CAPITAL LETTER A WITH MACRON
0x0101: 0x0083, # LATIN SMALL LETTER A WITH MACRON
0x0104: 0x00b5, # LATIN CAPITAL LETTER A WITH OGONEK
0x0105: 0x00d0, # LATIN SMALL LETTER A WITH OGONEK
0x0106: 0x0080, # LATIN CAPITAL LETTER C WITH ACUTE
0x0107: 0x0087, # LATIN SMALL LETTER C WITH ACUTE
0x010c: 0x00b6, # LATIN CAPITAL LETTER C WITH CARON
0x010d: 0x00d1, # LATIN SMALL LETTER C WITH CARON
0x0112: 0x00ed, # LATIN CAPITAL LETTER E WITH MACRON
0x0113: 0x0089, # LATIN SMALL LETTER E WITH MACRON
0x0116: 0x00b8, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x0117: 0x00d3, # LATIN SMALL LETTER E WITH DOT ABOVE
0x0118: 0x00b7, # LATIN CAPITAL LETTER E WITH OGONEK
0x0119: 0x00d2, # LATIN SMALL LETTER E WITH OGONEK
0x0122: 0x0095, # LATIN CAPITAL LETTER G WITH CEDILLA
0x0123: 0x0085, # LATIN SMALL LETTER G WITH CEDILLA
0x012a: 0x00a1, # LATIN CAPITAL LETTER I WITH MACRON
0x012b: 0x008c, # LATIN SMALL LETTER I WITH MACRON
0x012e: 0x00bd, # LATIN CAPITAL LETTER I WITH OGONEK
0x012f: 0x00d4, # LATIN SMALL LETTER I WITH OGONEK
0x0136: 0x00e8, # LATIN CAPITAL LETTER K WITH CEDILLA
0x0137: 0x00e9, # LATIN SMALL LETTER K WITH CEDILLA
0x013b: 0x00ea, # LATIN CAPITAL LETTER L WITH CEDILLA
0x013c: 0x00eb, # LATIN SMALL LETTER L WITH CEDILLA
0x0141: 0x00ad, # LATIN CAPITAL LETTER L WITH STROKE
0x0142: 0x0088, # LATIN SMALL LETTER L WITH STROKE
0x0143: 0x00e3, # LATIN CAPITAL LETTER N WITH ACUTE
0x0144: 0x00e7, # LATIN SMALL LETTER N WITH ACUTE
0x0145: 0x00ee, # LATIN CAPITAL LETTER N WITH CEDILLA
0x0146: 0x00ec, # LATIN SMALL LETTER N WITH CEDILLA
0x014c: 0x00e2, # LATIN CAPITAL LETTER O WITH MACRON
0x014d: 0x0093, # LATIN SMALL LETTER O WITH MACRON
0x0156: 0x008a, # LATIN CAPITAL LETTER R WITH CEDILLA
0x0157: 0x008b, # LATIN SMALL LETTER R WITH CEDILLA
0x015a: 0x0097, # LATIN CAPITAL LETTER S WITH ACUTE
0x015b: 0x0098, # LATIN SMALL LETTER S WITH ACUTE
0x0160: 0x00be, # LATIN CAPITAL LETTER S WITH CARON
0x0161: 0x00d5, # LATIN SMALL LETTER S WITH CARON
0x016a: 0x00c7, # LATIN CAPITAL LETTER U WITH MACRON
0x016b: 0x00d7, # LATIN SMALL LETTER U WITH MACRON
0x0172: 0x00c6, # LATIN CAPITAL LETTER U WITH OGONEK
0x0173: 0x00d6, # LATIN SMALL LETTER U WITH OGONEK
0x0179: 0x008d, # LATIN CAPITAL LETTER Z WITH ACUTE
0x017a: 0x00a5, # LATIN SMALL LETTER Z WITH ACUTE
0x017b: 0x00a3, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x017c: 0x00a4, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x017d: 0x00cf, # LATIN CAPITAL LETTER Z WITH CARON
0x017e: 0x00d8, # LATIN SMALL LETTER Z WITH CARON
0x2019: 0x00ef, # RIGHT SINGLE QUOTATION MARK
0x201c: 0x00f2, # LEFT DOUBLE QUOTATION MARK
0x201d: 0x00a6, # RIGHT DOUBLE QUOTATION MARK
0x201e: 0x00f7, # DOUBLE LOW-9 QUOTATION MARK
0x2219: 0x00f9, # BULLET OPERATOR
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
|
arnaudsj/titanium_mobile | refs/heads/master | support/project.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Unified Titanium Mobile Project Script
#
import os, sys, subprocess, shutil, codecs
def run(args):
return subprocess.Popen(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE).communicate()
def main(args):
argc = len(args)
if argc < 5 or args[1]=='--help':
print "Usage: %s <name> <id> <directory> [iphone,android] [android_sdk]" % os.path.basename(args[0])
sys.exit(1)
name = args[1].decode("utf-8")
appid = args[2].decode("utf-8")
directory = os.path.abspath(os.path.expanduser(args[3].decode("utf-8")))
iphone = False
android = False
android_sdk = None
sdk = None
if args[4] == 'iphone' or (argc > 5 and args[5] == 'iphone'):
iphone = True
if args[4] == 'android' or (argc > 5 and args[5] == 'android'):
android = True
if android:
sys.path.append(os.path.join(os.path.dirname(args[0]), "android"))
from androidsdk import AndroidSDK
android_sdk = args[argc-1].decode("utf-8")
try:
sdk = AndroidSDK(android_sdk)
except Exception, e:
print >>sys.stderr, e
sys.exit(1)
if not os.path.exists(directory):
os.makedirs(directory)
project_dir = os.path.join(directory,name)
if not os.path.exists(project_dir):
os.makedirs(project_dir)
template_dir = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
all_dir = os.path.abspath(os.path.join(template_dir,'all'))
if not os.path.exists(all_dir):
all_dir = template_dir
tiapp = codecs.open(os.path.join(all_dir,'tiapp.xml'),'r','utf-8','replace').read()
tiapp = tiapp.replace('__PROJECT_ID__',appid)
tiapp = tiapp.replace('__PROJECT_NAME__',name)
tiapp = tiapp.replace('__PROJECT_VERSION__','1.0')
tiapp_file = codecs.open(os.path.join(project_dir,'tiapp.xml'),'w+','utf-8','replace')
tiapp_file.write(tiapp)
tiapp_file.close()
# create the titanium resources
resources_dir = os.path.join(project_dir,'Resources')
if not os.path.exists(resources_dir):
os.makedirs(resources_dir)
# write out our gitignore
gitignore = open(os.path.join(project_dir,'.gitignore'),'w')
# start in 1.4, we can safely exclude build folder from git
gitignore.write("tmp\n")
gitignore.close()
if iphone:
iphone_resources = os.path.join(resources_dir,'iphone')
if not os.path.exists(iphone_resources): os.makedirs(iphone_resources)
iphone_gen = os.path.join(template_dir,'iphone','iphone.py')
run([sys.executable, iphone_gen, name, appid, directory])
if android:
android_resources = os.path.join(resources_dir,'android')
if not os.path.exists(android_resources): os.makedirs(android_resources)
android_gen = os.path.join(template_dir,'android','android.py')
run([sys.executable, android_gen, name, appid, directory, android_sdk])
# copy LICENSE and README
for file in ['LICENSE','README']:
shutil.copy(os.path.join(all_dir,file),os.path.join(project_dir,file))
# copy RESOURCES
for file in ['app.js']:
shutil.copy(os.path.join(all_dir,file),os.path.join(resources_dir,file))
# copy IMAGES
for file in ['KS_nav_ui.png', 'KS_nav_views.png']:
shutil.copy(os.path.join(all_dir,file),os.path.join(resources_dir,file))
if __name__ == '__main__':
main(sys.argv)
|
jackytu/newbrandx | refs/heads/rankx | sites/us/apps/shipping/models.py | 34 | from oscar.apps.shipping.models import *
|
hiway/micropython | refs/heads/master | examples/network/http_server_ssl.py | 39 | try:
import usocket as socket
except:
import socket
import ussl as ssl
CONTENT = b"""\
HTTP/1.0 200 OK
Hello #%d from MicroPython!
"""
def main(use_stream=True):
s = socket.socket()
# Binding to all interfaces - server will be accessible to other hosts!
ai = socket.getaddrinfo("0.0.0.0", 8443)
print("Bind address info:", ai)
addr = ai[0][-1]
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(addr)
s.listen(5)
print("Listening, connect your browser to https://<this_host>:8443/")
counter = 0
while True:
res = s.accept()
client_s = res[0]
client_addr = res[1]
print("Client address:", client_addr)
print("Client socket:", client_s)
client_s = ssl.wrap_socket(client_s, server_side=True)
print(client_s)
print("Request:")
if use_stream:
# Both CPython and MicroPython SSLSocket objects support read() and
# write() methods.
# Browsers are prone to terminate SSL connection abruptly if they
# see unknown certificate, etc. We must continue in such case -
# next request they issue will likely be more well-behaving and
# will succeed.
try:
req = client_s.readline()
print(req)
while True:
h = client_s.readline()
if h == b"" or h == b"\r\n":
break
print(h)
if req:
client_s.write(CONTENT % counter)
except Exception as e:
print("Exception serving request:", e)
else:
print(client_s.recv(4096))
client_s.send(CONTENT % counter)
client_s.close()
counter += 1
print()
main()
|
YannChemin/distRS | refs/heads/master | prog/prog_ETa_Global/Global_ET_Biome_parameters.py | 2 | #!/usr/bin/python
from math import *
def fc( ndvi ):
"""Fraction of vegetation cover"""
ndvimin = 0.05
ndvimax = 0.95
return ( ( ndvi - ndvimin ) / ( ndvimax - ndvimin ) )
def bdpc( ndvi, b1, b2, b3, b4 ):
"""Biome dependent potential conductance (g0 in Zhang et al, 2009. WRR)"""
return (1.0 / (b1 + b2 * exp(-b3*ndvi))+b4)
def mTday( tday, tclosemin, topenmax, topt, beta ):
"""Temperature dependent reduction function of the biome dependent potential conductance"""
if tday <= tclosemin:
return 0.01
elif tday >= topenmax:
return 0.01
else:
return ( exp( - ( (tday-topt)/beta )*( (tday-topt)/beta ) ) )
def mVPD( vpd, vpdclose, vpdopen ):
"""VPD dependent reduction function of the biome dependent potential conductance"""
if vpd <= vpdopen:
return 1.0
elif vpd >= vpdclose:
return 1.0
else:
return ( ( vpdclose - vpd ) / ( vpdclose - vpdopen ) )
def esat( tday ):
"""From FAO56, output is in [Pa]"""
return( 1000 * 0.6108 * exp( 17.27 * tday / (tday + 237.3) ) )
def vpdeficit( rh, tday ):
"""From FAO56, vpd is esat - eact"""
e_sat = esat( tday )
return ( ( 1.0 - rh ) * e_sat )
def rhumidity( sh, tday, patm ):
"""http://en.wikipedia.org/wiki/Humidity#Specific_humidity
Output [0.0-1.0]"""
e_sat = esat( tday )
print "e_sat\t=",e_sat,"\t[Pa]"
b = ( 0.622 + sh ) * e_sat
return ( sh * patm / b )
def slopesvpcurve( tday ):
"""From FAO56, output is in [Pa/C]"""
e_sat = esat( tday )
return( 4098.0 * e_sat / pow( tday + 237.3, 2 ) )
def rhoair( dem, tday ):
"""Requires T in Kelvin"""
t = tday+273.15
b = ( ( t - ( 0.00627 * dem ) ) / t )
return( 349.467 * pow( b, 5.26 ) / t )
def ETa (biome_type, ndvi, tday, sh, patm, Rn, G, dem):
"""ETa global model from Zhang et al 2010.
A continuous satellite derived global record of land surface evapotranspiration from 1983 to 2006.
WRR 46"""
b4 = 0.0 #except for WSV with NDVI > 0.64
if biome_type == "BENF":
tclosemin = -8.0
topenmax = 40.0
vpdclose = 2800.0
vpdopen = 500.0
topt = 12.0
beta = 25.0
k = 150.0
ga = 0.03
gtot = 0.002
gch = 0.08
b1 = 208.3
b2 = 8333.3
b3 = 10.0
if biome_type == "TENF":
tclosemin = -8.0
topenmax = 40.0
vpdclose = 2800.0
vpdopen = 500.0
topt = 25.0
beta = 25.0
k = 200.0
ga = 0.03
gtot = 0.004
gch = 0.08
b1 = 133.3
b2 = 888.9
b3 = 6.0
if biome_type == "EBF":
tclosemin = -8.0
topenmax = 50.0
vpdclose = 4000.0
vpdopen = 500.0
topt = 40.0
beta = 40.0
k = 300.0
ga = 0.03
gtot = 0.006
gch = 0.01
b1 = 57.7
b2 = 769.2
b3 = 4.5
if biome_type == "DBF":
tclosemin = -6.0
topenmax = 45.0
vpdclose = 2800.0
vpdopen = 650.0
topt = 28.0
beta = 25.0
k = 200.0
ga = 0.04
gtot = 0.002
gch = 0.01
b1 = 85.8
b2 = 694.7
b3 = 4
if biome_type == "CSH":
tclosemin = -8.0
topenmax = 45.0
vpdclose = 3300.0
vpdopen = 500.0
topt = 19.0
beta = 20.0
k = 400.0
ga = 0.01
gtot = 0.001
gch = 0.04
b1 = 202.0
b2 = 4040.4
b3 = 6.5
if biome_type == "OSH":
tclosemin = -8.0
topenmax = 40.0
vpdclose = 3700.0
vpdopen = 500.0
topt = 10.0
beta = 30.0
k = 50.0
ga = 0.005
gtot = 0.012
gch = 0.04
b1 = 178.6
b2 = 178.6
b3 = 8
if biome_type == "WSV" and ndvi < 0.64:
tclosemin = -8.0
topenmax = 50.0
vpdclose = 3200.0
vpdopen = 500.0
topt = 32.0
beta = 28.0
k = 900.0
ga = 0.002
gtot = 0.0018
gch = 0.04
b1 = 0.2
b2 = 24000
b3 = 6.5
if biome_type == "WSV" and ndvi > 0.64:
tclosemin = -8.0
topenmax = 50.0
vpdclose = 3200.0
vpdopen = 500.0
topt = 32.0
beta = 28.0
k = 900.0
ga = 0.002
gtot = 0.0018
gch = 0.04
b1 = 57.1
b2 = 3333.3
b3 = 8.0
b4 = -0.01035
if biome_type == "SV":
tclosemin = -8.0
topenmax = 40.0
vpdclose = 5000.0
vpdopen = 650.0
topt = 32.0
beta = 30.0
k = 800.0
ga = 0.001
gtot = 0.001
gch = 0.04
b1 = 790.9
b2 = 8181.8
b3 = 10.0
if biome_type == "GRS":
tclosemin = -8.0
topenmax = 40.0
vpdclose = 3800.0
vpdopen = 650.0
topt = 20.0
beta = 30.0
k = 500.0
ga = 0.001
gtot = 0.001
gch = 0.04
b1 = 175.0
b2 = 2000
b3 = 6.0
if biome_type == "CRP":
tclosemin = -8.0
topenmax = 45.0
vpdclose = 3800.0
vpdopen = 650.0
topt = 20.0
beta = 30.0
k = 450.0
ga = 0.005
gtot = 0.003
gch = 0.04
b1 = 105.0
b2 = 300.0
b3 = 3.0
#Compute potential conductance for this biome and this NDVI
g0 = bdpc(ndvi,b1,b2,b3,b4)
#Preprocessing for Surface conductance (gs) in PM (FAO56), gc in this article
mtday = mTday( tday, tclosemin, topenmax, topt, beta )
#relative humidity
rh = rhumidity( sh, tday, patm )
print "rh\t=",rh,"\t[-]"
vpd = vpdeficit( rh, tday )
print "vpd\t=",vpd,"\t\t[Pa]"
mvpd = mVPD( vpd, vpdclose, vpdopen )
#Actually computing Surface conductance (gs) in PM (FAO56), gc in this article
gs = g0 * mtday * mvpd
print "rs\t=",1/gs,"\t[s/m]"
#Fraction of vegetation cover
fracveg = fc(ndvi)
print "fc\t=", fracveg, "\t[-]"
#preprocessing for soil Evaporation
latent = 2.45 #MJ/Kg FAO56
MaMw = 0.622 # - FAO56
Cp = 1.013 * 0.001 # MJ/Kg/C FAO56
psi = patm * Cp / (MaMw * latent) #psi = patm * 0.6647 / 1000
print "psi\t=",psi,"\t[Pa/C]"
gtotc = gtot * ((273.15+tday) / 293.13) * (101300.0 / patm)
Delta = slopesvpcurve( tday ) #slope in Pa/C
print "Delta\t=",Delta,"\t[de/dt]"
rho = rhoair( dem, tday )
print "rho\t=",rho,"\t[kg/m3]"
#soil Evaporation
Esoil = pow(rh,vpd/k) * (Delta*(1-fracveg)*(Rn-G)+rho*Cp*vpd*ga) / (Delta+psi*ga/gtotc)
#Canopy evapotranspiration
Ecanopy = (Delta*fracveg*(Rn-G)+rho*Cp*vpd*ga) / (Delta+psi*(1.0+ga/gs))
print "-------------------------------------------------------------"
print "Esoil\t=", Esoil, "\t[mm/d]"
print "Ecanopy\t=", Ecanopy, "\t[mm/d]"
print "-------------------------------------------------------------"
return( (1-fracveg) * Esoil + fracveg * Ecanopy )
def run():
print "-------------------------------------------------------------"
print "ETa (biome_type, ndvi, tday, sh, patm, Rn, G, dem)"
print "-------------------------------------------------------------"
print "biome_type\t= SV\t\t(Savannah)"
print "ndvi\t\t= 0.3\t\t[-]"
print "tday\t\t= 32.0\t\t[C]\t\t(from GCM)"
print "sh\t\t= 0.0189\t[-]\t\t(from GCM)"
print "patm\t\t= 100994.2\t[Pa]\t\t(from GCM)"
print "Rn\t\t= 20.0\t\t[MJ/m2/d]\t(from GCM eq. 236.4 W/m2)"
print "G\t\t= 2.0\t\t[MJ/m2/d]\t(10% Rn eq. 23.64 W/m2)"
print "dem\t\t= 5\t\t[m]\t\t(from SRTM 1Km)"
print "-------------------------------------------------------------"
eta = ETa ("SV", 0.3, 32.0, 0.0189, 100994.2, 20.0, 2.0, 5.0)
print "-------------------------------------------------------------"
print "ETa\t=",eta,"\t[mm/d]"
print "-------------------------------------------------------------"
def info():
print "Zhang, K., Kimball, J.S., Nemani, R.R., Running, S.W. (2010). A continuous satellite-derived global record of land surface evapotranspiration from 1983 to 2006. WRR 46, W09522"
print "-------------------------------------------------------------------------------------------------------------------------------------------------------"
print "to run test use the function run()"
print "-------------------------------------------------------------------------------------------------------------------------------------------------------"
print "to run with your data use the function ETa()"
print "ETa (biome_type, ndvi, tday, sh, patm, Rn, G)"
print "-------------------------------------------------------------"
print "Biome_type\t as defined at the end of this info bubble"
print "ndvi\t\t NDVI value\t\t [-]"
print "tday\t\t day temperature\t [C]"
print "sh\t\t specific humidity\t [-]"
print "patm\t\t atmospheric pressure\t [Pa]"
print "Rn\t\t day net radiation\t [MJ/m2/d]"
print "G\t\t day soil heat flux\t [MJ/m2/d]"
print "-------------------------------------------------------------------------------------------------------------------------------------------------------"
print "IGBP Biome types used in this model"
print "-------------------------------------------------------------------------------------------------------------------------------------------------------"
print "Code\t Description"
print "BENF\t Boreal Evergreen Needleleaf Forest"
print "TENF\t Temperate Evergreen Needleleaf Forest"
print "EBF\t Evergreen Broadleaf Forest"
print "DBF\t Deciduous Broadleaf Forest"
print "CSH\t Closed Shrubland"
print "OSH\t Open Shrubland"
print "WSV\t Woody Savannah"
print "SV\t Savannah"
print "GRS\t Grassland"
print "CRP\t Cropland"
print "-------------------------------------------------------------------------------------------------------------------------------------------------------"
print "IGBP Biome types and configuration of internal parameters of the model"
print "-------------------------------------------------------------------------------------------------------------------------------------------------------"
print "#Code Description TcloseMinC TopenMaxC VPDClosePa VPDOpenPa ToptC BetaC kPa GaMs-1 GtotMs-1 GchMs-1 B1Sm-1 B2Sm-1 B3 b1 b2 b3 b4"
print "#BENF Boreal Evergreen Needleleaf Forest -8 40 2800 500 12 25 150 0.03 0.002 0.08 208.3 8333.3 10"
print "#TENF Temperate Evergreen Needleleaf Forest -8 40 2800 500 25 25 200 0.03 0.004 0.08 133.3 888.9 6"
print "#EBF Evergreen Broadleaf Forest -8 50 4000 500 40 40 300 0.03 0.006 0.01 57.7 769.2 4.5"
print "#DBF Deciduous Broadleaf Forest -6 45 2800 650 28 25 200 0.04 0.002 0.01 85.8 694.7 4"
print "#MF Mixed Forest"
print "#CSH Closed Shrubland -8 45 3300 500 19 20 400 0.01 0.001 0.04 202 4040.4 6.5"
print "#OSH Open Shrubland -8 40 3700 500 10 30 50 0.005 0.012 0.04 178.6 178.6 8"
print "#WSV Woody Savannah -8 50 3200 500 32 28 900 0.002 0.0018 0.04 0.2 24000 6.5 57.1 3333.3 8 -0.01035"
print "#SV Savannah -8 40 5000 650 32 30 800 0.001 0.001 0.04 790.9 8181.8 10"
print "#GRS Grassland -8 40 3800 650 20 30 500 0.001 0.001 0.04 175 2000 6"
print "#CRP Cropland -8 45 3800 650 20 30 450 0.005 0.003 0.04 105 3000 3"
print " #For WSV when NDVI>0.64"
|
srivassumit/servo | refs/heads/master | tests/wpt/css-tests/tools/pywebsocket/src/example/origin_check_wsh.py | 516 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This example is derived from test/testdata/handlers/origin_check_wsh.py.
def web_socket_do_extra_handshake(request):
if request.ws_origin == 'http://example.com':
return
raise ValueError('Unacceptable origin: %r' % request.ws_origin)
def web_socket_transfer_data(request):
request.connection.write('origin_check_wsh.py is called for %s, %s' %
(request.ws_resource, request.ws_protocol))
# vi:sts=4 sw=4 et
|
lileeyao/acm | refs/heads/master | linked_list/intermediate/copy_list_with_random_pointer.py | 2 | # Definition for singly-linked list with a random pointer.
# class RandomListNode:
# def __init__(self, x):
# self.label = x
# self.next = None
# self.random = None
class Solution:
# @param head, a RandomListNode
# @return a RandomListNode
def copyRandomList(self, head):
if not head: return head
cur = head
#insert a copy node after each node.
while cur:
tmp = RandomListNode(cur.label)
tmp.next = cur.next
cur.next = tmp
cur = tmp.next
#copy random pointer
cur = head
while cur:
tmp = cur.next
if cur.random:
tmp.random = cur.random.next
cur = tmp.next
# decouple
cur = head
dup = head.next
while cur:
tmp = cur.next
cur.next = tmp.next
if tmp.next:
tmp.next = tmp.next.next
cur = cur.next
return dup
|
alhashash/odoo | refs/heads/master | addons/l10n_ca/__init__.py | 8 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
|
GerHobbelt/civet-webserver | refs/heads/master | conan/build.py | 2 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
from cpt.packager import ConanMultiPackager
from cpt.ci_manager import CIManager
from cpt.printer import Printer
class BuilderSettings(object):
@property
def branch(self):
""" Get branch name
"""
printer = Printer(None)
ci_manager = CIManager(printer)
return ci_manager.get_branch()
@property
def username(self):
""" Set civetweb as package's owner
"""
return os.getenv("CONAN_USERNAME", "civetweb")
@property
def upload(self):
""" Set civetweb repository to be used on upload.
The upload server address could be customized by env var
CONAN_UPLOAD. If not defined, the method will check the branch name.
Only master or CONAN_STABLE_BRANCH_PATTERN will be accepted.
The master branch will be pushed to testing channel, because it does
not match the stable pattern. Otherwise it will upload to stable
channel.
"""
if os.getenv("CONAN_UPLOAD", None) is not None:
return os.getenv("CONAN_UPLOAD")
prog = re.compile(self.stable_branch_pattern)
if self.branch and prog.match(self.branch):
return "https://api.bintray.com/conan/civetweb/conan"
return None
@property
def upload_only_when_stable(self):
""" Force to upload when match stable pattern branch
"""
return os.getenv("CONAN_UPLOAD_ONLY_WHEN_STABLE", True)
@property
def stable_branch_pattern(self):
""" Only upload the package the branch name is like a tag
"""
return os.getenv("CONAN_STABLE_BRANCH_PATTERN", r"v(\d+\.\d+)")
@property
def version(self):
regex = re.compile(self.stable_branch_pattern)
match = regex.match(self.branch)
if match:
return match.group(1)
return "latest"
@property
def reference(self):
""" Read project version from branch name to create Conan referece
"""
return os.getenv("CONAN_REFERENCE", "civetweb/{}".format(self.version))
if __name__ == "__main__":
settings = BuilderSettings()
builder = ConanMultiPackager(
reference=settings.reference,
username=settings.username,
upload=settings.upload,
upload_only_when_stable=settings.upload_only_when_stable,
stable_branch_pattern=settings.stable_branch_pattern,
test_folder=os.path.join("conan", "test_package"))
builder.add_common_builds(pure_c=False)
builder.run()
|
pshowalter/solutions-geoprocessing-toolbox | refs/heads/dev | military_aspects_of_weather/scripts/MultidimensionSupplementalTools/MultidimensionSupplementalTools/Scripts/mds/netcdf/convention/__init__.py | 2 | # -*- coding: utf-8 -*-
from coordinate import *
from coards import *
from cf import *
from conventions import *
from generic import *
CONVENTION_CLASSES = [
CF,
Coards,
Coordinate
]
"""
Classes that implement a netcdf convention.
"""
def select_convention(
dataset,
filter_out_nd_coordinates,
favor_class=None):
"""
Return a Convention specialization that implements the conventions used in
*dataset*.
filter_out_nd_coordinates
Most coordinate variables are one-dimensional. If this argument is
True, data variables depending on more-dimensional coordinate
variables are filtered out. A reason for this may be that your
application doesn't support such variables.
favor_class
In case *dataset* adheres to multiple supported conventions and
*favor_class* is one of them, then it is used. Otherwise
:py:class:`Conventions` is used.
In case *dataset* doesn't adhere to a supported convention,
:py:class:`Generic` is used. Supported conventions are listed in
:py:data:`CONVENTION_CLASSES`.
"""
assert favor_class is None or favor_class in CONVENTION_CLASSES, favor_class
conventions = []
for convention_class in CONVENTION_CLASSES:
if convention_class.conforms(dataset):
conventions.append(convention_class(dataset,
filter_out_nd_coordinates))
result = None
if len(conventions) == 0:
# Dataset doesn't adhere to one of the supported conventions.
result = Generic(dataset, filter_out_nd_coordinates)
elif len(conventions) == 1:
# Dataset adheres to exactly one supported convention.
result = conventions[0]
else:
# Dataset adheres to more than one supported convention.
if favor_class is not None and favor_class in [type(convention) for
convention in conventions]:
# Select the favored convention.
result = favor_class(dataset, filter_out_nd_coordinates)
else:
# Use all conventions.
result = Conventions(dataset, filter_out_nd_coordinates,
conventions)
return result
|
ThiefMaster/werkzeug | refs/heads/master | werkzeug/wsgi.py | 85 | # -*- coding: utf-8 -*-
"""
werkzeug.wsgi
~~~~~~~~~~~~~
This module implements WSGI related helpers.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import posixpath
import mimetypes
from itertools import chain
from zlib import adler32
from time import time, mktime
from datetime import datetime
from functools import partial, update_wrapper
from werkzeug._compat import iteritems, text_type, string_types, \
implements_iterator, make_literal_wrapper, to_unicode, to_bytes, \
wsgi_get_bytes, try_coerce_native, PY2, BytesIO
from werkzeug._internal import _empty_stream, _encode_idna
from werkzeug.http import is_resource_modified, http_date
from werkzeug.urls import uri_to_iri, url_quote, url_parse, url_join
from werkzeug.filesystem import get_filesystem_encoding
def responder(f):
"""Marks a function as responder. Decorate a function with it and it
will automatically call the return value as WSGI application.
Example::
@responder
def application(environ, start_response):
return Response('Hello World!')
"""
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
def get_current_url(environ, root_only=False, strip_querystring=False,
host_only=False, trusted_hosts=None):
"""A handy helper function that recreates the full URL as IRI for the
current request or parts of it. Here's an example:
>>> from werkzeug.test import create_environ
>>> env = create_environ("/?param=foo", "http://localhost/script")
>>> get_current_url(env)
'http://localhost/script/?param=foo'
>>> get_current_url(env, root_only=True)
'http://localhost/script/'
>>> get_current_url(env, host_only=True)
'http://localhost/'
>>> get_current_url(env, strip_querystring=True)
'http://localhost/script/'
This optionally it verifies that the host is in a list of trusted hosts.
If the host is not in there it will raise a
:exc:`~werkzeug.exceptions.SecurityError`.
Note that the string returned might contain unicode characters as the
representation is an IRI not an URI. If you need an ASCII only
representation you can use the :func:`~werkzeug.urls.iri_to_uri`
function:
>>> from werkzeug.urls import iri_to_uri
>>> iri_to_uri(get_current_url(env))
'http://localhost/script/?param=foo'
:param environ: the WSGI environment to get the current URL from.
:param root_only: set `True` if you only want the root URL.
:param strip_querystring: set to `True` if you don't want the querystring.
:param host_only: set to `True` if the host URL should be returned.
:param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted`
for more information.
"""
tmp = [environ['wsgi.url_scheme'], '://', get_host(environ, trusted_hosts)]
cat = tmp.append
if host_only:
return uri_to_iri(''.join(tmp) + '/')
cat(url_quote(wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))).rstrip('/'))
cat('/')
if not root_only:
cat(url_quote(wsgi_get_bytes(environ.get('PATH_INFO', '')).lstrip(b'/')))
if not strip_querystring:
qs = get_query_string(environ)
if qs:
cat('?' + qs)
return uri_to_iri(''.join(tmp))
def host_is_trusted(hostname, trusted_list):
"""Checks if a host is trusted against a list. This also takes care
of port normalization.
.. versionadded:: 0.9
:param hostname: the hostname to check
:param trusted_list: a list of hostnames to check against. If a
hostname starts with a dot it will match against
all subdomains as well.
"""
if not hostname:
return False
if isinstance(trusted_list, string_types):
trusted_list = [trusted_list]
def _normalize(hostname):
if ':' in hostname:
hostname = hostname.rsplit(':', 1)[0]
return _encode_idna(hostname)
try:
hostname = _normalize(hostname)
except UnicodeError:
return False
for ref in trusted_list:
if ref.startswith('.'):
ref = ref[1:]
suffix_match = True
else:
suffix_match = False
try:
ref = _normalize(ref)
except UnicodeError:
return False
if ref == hostname:
return True
if suffix_match and hostname.endswith('.' + ref):
return True
return False
def get_host(environ, trusted_hosts=None):
"""Return the real host for the given WSGI environment. This first checks
the `X-Forwarded-Host` header, then the normal `Host` header, and finally
the `SERVER_NAME` environment variable (using the first one it finds).
Optionally it verifies that the host is in a list of trusted hosts.
If the host is not in there it will raise a
:exc:`~werkzeug.exceptions.SecurityError`.
:param environ: the WSGI environment to get the host of.
:param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted`
for more information.
"""
if 'HTTP_X_FORWARDED_HOST' in environ:
rv = environ['HTTP_X_FORWARDED_HOST'].split(',', 1)[0].strip()
elif 'HTTP_HOST' in environ:
rv = environ['HTTP_HOST']
else:
rv = environ['SERVER_NAME']
if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \
in (('https', '443'), ('http', '80')):
rv += ':' + environ['SERVER_PORT']
if trusted_hosts is not None:
if not host_is_trusted(rv, trusted_hosts):
from werkzeug.exceptions import SecurityError
raise SecurityError('Host "%s" is not trusted' % rv)
return rv
def get_content_length(environ):
"""Returns the content length from the WSGI environment as
integer. If it's not available `None` is returned.
.. versionadded:: 0.9
:param environ: the WSGI environ to fetch the content length from.
"""
content_length = environ.get('CONTENT_LENGTH')
if content_length is not None:
try:
return max(0, int(content_length))
except (ValueError, TypeError):
pass
def get_input_stream(environ, safe_fallback=True):
"""Returns the input stream from the WSGI environment and wraps it
in the most sensible way possible. The stream returned is not the
raw WSGI stream in most cases but one that is safe to read from
without taking into account the content length.
.. versionadded:: 0.9
:param environ: the WSGI environ to fetch the stream from.
:param safe: indicates whether the function should use an empty
stream as safe fallback or just return the original
WSGI input stream if it can't wrap it safely. The
default is to return an empty string in those cases.
"""
stream = environ['wsgi.input']
content_length = get_content_length(environ)
# A wsgi extension that tells us if the input is terminated. In
# that case we return the stream unchanged as we know we can safely
# read it until the end.
if environ.get('wsgi.input_terminated'):
return stream
# If we don't have a content length we fall back to an empty stream
# in case of a safe fallback, otherwise we return the stream unchanged.
# The non-safe fallback is not recommended but might be useful in
# some situations.
if content_length is None:
return safe_fallback and _empty_stream or stream
# Otherwise limit the stream to the content length
return LimitedStream(stream, content_length)
def get_query_string(environ):
"""Returns the `QUERY_STRING` from the WSGI environment. This also takes
care about the WSGI decoding dance on Python 3 environments as a
native string. The string returned will be restricted to ASCII
characters.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the query string from.
"""
qs = wsgi_get_bytes(environ.get('QUERY_STRING', ''))
# QUERY_STRING really should be ascii safe but some browsers
# will send us some unicode stuff (I am looking at you IE).
# In that case we want to urllib quote it badly.
return try_coerce_native(url_quote(qs, safe=':&%=+$!*\'(),'))
def get_path_info(environ, charset='utf-8', errors='replace'):
"""Returns the `PATH_INFO` from the WSGI environment and properly
decodes it. This also takes care about the WSGI decoding dance
on Python 3 environments. if the `charset` is set to `None` a
bytestring is returned.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the path from.
:param charset: the charset for the path info, or `None` if no
decoding should be performed.
:param errors: the decoding error handling.
"""
path = wsgi_get_bytes(environ.get('PATH_INFO', ''))
return to_unicode(path, charset, errors, allow_none_charset=True)
def get_script_name(environ, charset='utf-8', errors='replace'):
"""Returns the `SCRIPT_NAME` from the WSGI environment and properly
decodes it. This also takes care about the WSGI decoding dance
on Python 3 environments. if the `charset` is set to `None` a
bytestring is returned.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the path from.
:param charset: the charset for the path, or `None` if no
decoding should be performed.
:param errors: the decoding error handling.
"""
path = wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))
return to_unicode(path, charset, errors, allow_none_charset=True)
def pop_path_info(environ, charset='utf-8', errors='replace'):
"""Removes and returns the next segment of `PATH_INFO`, pushing it onto
`SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`.
If the `charset` is set to `None` a bytestring is returned.
If there are empty segments (``'/foo//bar``) these are ignored but
properly pushed to the `SCRIPT_NAME`:
>>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
>>> pop_path_info(env)
'a'
>>> env['SCRIPT_NAME']
'/foo/a'
>>> pop_path_info(env)
'b'
>>> env['SCRIPT_NAME']
'/foo/a/b'
.. versionadded:: 0.5
.. versionchanged:: 0.9
The path is now decoded and a charset and encoding
parameter can be provided.
:param environ: the WSGI environment that is modified.
"""
path = environ.get('PATH_INFO')
if not path:
return None
script_name = environ.get('SCRIPT_NAME', '')
# shift multiple leading slashes over
old_path = path
path = path.lstrip('/')
if path != old_path:
script_name += '/' * (len(old_path) - len(path))
if '/' not in path:
environ['PATH_INFO'] = ''
environ['SCRIPT_NAME'] = script_name + path
rv = wsgi_get_bytes(path)
else:
segment, path = path.split('/', 1)
environ['PATH_INFO'] = '/' + path
environ['SCRIPT_NAME'] = script_name + segment
rv = wsgi_get_bytes(segment)
return to_unicode(rv, charset, errors, allow_none_charset=True)
def peek_path_info(environ, charset='utf-8', errors='replace'):
"""Returns the next segment on the `PATH_INFO` or `None` if there
is none. Works like :func:`pop_path_info` without modifying the
environment:
>>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
>>> peek_path_info(env)
'a'
>>> peek_path_info(env)
'a'
If the `charset` is set to `None` a bytestring is returned.
.. versionadded:: 0.5
.. versionchanged:: 0.9
The path is now decoded and a charset and encoding
parameter can be provided.
:param environ: the WSGI environment that is checked.
"""
segments = environ.get('PATH_INFO', '').lstrip('/').split('/', 1)
if segments:
return to_unicode(wsgi_get_bytes(segments[0]),
charset, errors, allow_none_charset=True)
def extract_path_info(environ_or_baseurl, path_or_url, charset='utf-8',
errors='replace', collapse_http_schemes=True):
"""Extracts the path info from the given URL (or WSGI environment) and
path. The path info returned is a unicode string, not a bytestring
suitable for a WSGI environment. The URLs might also be IRIs.
If the path info could not be determined, `None` is returned.
Some examples:
>>> extract_path_info('http://example.com/app', '/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello',
... collapse_http_schemes=False) is None
True
Instead of providing a base URL you can also pass a WSGI environment.
.. versionadded:: 0.6
:param environ_or_baseurl: a WSGI environment dict, a base URL or
base IRI. This is the root of the
application.
:param path_or_url: an absolute path from the server root, a
relative path (in which case it's the path info)
or a full URL. Also accepts IRIs and unicode
parameters.
:param charset: the charset for byte data in URLs
:param errors: the error handling on decode
:param collapse_http_schemes: if set to `False` the algorithm does
not assume that http and https on the
same server point to the same
resource.
"""
def _normalize_netloc(scheme, netloc):
parts = netloc.split(u'@', 1)[-1].split(u':', 1)
if len(parts) == 2:
netloc, port = parts
if (scheme == u'http' and port == u'80') or \
(scheme == u'https' and port == u'443'):
port = None
else:
netloc = parts[0]
port = None
if port is not None:
netloc += u':' + port
return netloc
# make sure whatever we are working on is a IRI and parse it
path = uri_to_iri(path_or_url, charset, errors)
if isinstance(environ_or_baseurl, dict):
environ_or_baseurl = get_current_url(environ_or_baseurl,
root_only=True)
base_iri = uri_to_iri(environ_or_baseurl, charset, errors)
base_scheme, base_netloc, base_path = url_parse(base_iri)[:3]
cur_scheme, cur_netloc, cur_path, = \
url_parse(url_join(base_iri, path))[:3]
# normalize the network location
base_netloc = _normalize_netloc(base_scheme, base_netloc)
cur_netloc = _normalize_netloc(cur_scheme, cur_netloc)
# is that IRI even on a known HTTP scheme?
if collapse_http_schemes:
for scheme in base_scheme, cur_scheme:
if scheme not in (u'http', u'https'):
return None
else:
if not (base_scheme in (u'http', u'https') and
base_scheme == cur_scheme):
return None
# are the netlocs compatible?
if base_netloc != cur_netloc:
return None
# are we below the application path?
base_path = base_path.rstrip(u'/')
if not cur_path.startswith(base_path):
return None
return u'/' + cur_path[len(base_path):].lstrip(u'/')
class SharedDataMiddleware(object):
"""A WSGI middleware that provides static content for development
environments or simple server setups. Usage is quite simple::
import os
from werkzeug.wsgi import SharedDataMiddleware
app = SharedDataMiddleware(app, {
'/shared': os.path.join(os.path.dirname(__file__), 'shared')
})
The contents of the folder ``./shared`` will now be available on
``http://example.com/shared/``. This is pretty useful during development
because a standalone media server is not required. One can also mount
files on the root folder and still continue to use the application because
the shared data middleware forwards all unhandled requests to the
application, even if the requests are below one of the shared folders.
If `pkg_resources` is available you can also tell the middleware to serve
files from package data::
app = SharedDataMiddleware(app, {
'/shared': ('myapplication', 'shared_files')
})
This will then serve the ``shared_files`` folder in the `myapplication`
Python package.
The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch`
rules for files that are not accessible from the web. If `cache` is set to
`False` no caching headers are sent.
Currently the middleware does not support non ASCII filenames. If the
encoding on the file system happens to be the encoding of the URI it may
work but this could also be by accident. We strongly suggest using ASCII
only file names for static files.
The middleware will guess the mimetype using the Python `mimetype`
module. If it's unable to figure out the charset it will fall back
to `fallback_mimetype`.
.. versionchanged:: 0.5
The cache timeout is configurable now.
.. versionadded:: 0.6
The `fallback_mimetype` parameter was added.
:param app: the application to wrap. If you don't want to wrap an
application you can pass it :exc:`NotFound`.
:param exports: a dict of exported files and folders.
:param disallow: a list of :func:`~fnmatch.fnmatch` rules.
:param fallback_mimetype: the fallback mimetype for unknown files.
:param cache: enable or disable caching headers.
:param cache_timeout: the cache timeout in seconds for the headers.
"""
def __init__(self, app, exports, disallow=None, cache=True,
cache_timeout=60 * 60 * 12, fallback_mimetype='text/plain'):
self.app = app
self.exports = {}
self.cache = cache
self.cache_timeout = cache_timeout
for key, value in iteritems(exports):
if isinstance(value, tuple):
loader = self.get_package_loader(*value)
elif isinstance(value, string_types):
if os.path.isfile(value):
loader = self.get_file_loader(value)
else:
loader = self.get_directory_loader(value)
else:
raise TypeError('unknown def %r' % value)
self.exports[key] = loader
if disallow is not None:
from fnmatch import fnmatch
self.is_allowed = lambda x: not fnmatch(x, disallow)
self.fallback_mimetype = fallback_mimetype
def is_allowed(self, filename):
"""Subclasses can override this method to disallow the access to
certain files. However by providing `disallow` in the constructor
this method is overwritten.
"""
return True
def _opener(self, filename):
return lambda: (
open(filename, 'rb'),
datetime.utcfromtimestamp(os.path.getmtime(filename)),
int(os.path.getsize(filename))
)
def get_file_loader(self, filename):
return lambda x: (os.path.basename(filename), self._opener(filename))
def get_package_loader(self, package, package_path):
from pkg_resources import DefaultProvider, ResourceManager, \
get_provider
loadtime = datetime.utcnow()
provider = get_provider(package)
manager = ResourceManager()
filesystem_bound = isinstance(provider, DefaultProvider)
def loader(path):
if path is None:
return None, None
path = posixpath.join(package_path, path)
if not provider.has_resource(path):
return None, None
basename = posixpath.basename(path)
if filesystem_bound:
return basename, self._opener(
provider.get_resource_filename(manager, path))
s = provider.get_resource_string(manager, path)
return basename, lambda: (
BytesIO(s),
loadtime,
len(s)
)
return loader
def get_directory_loader(self, directory):
def loader(path):
if path is not None:
path = os.path.join(directory, path)
else:
path = directory
if os.path.isfile(path):
return os.path.basename(path), self._opener(path)
return None, None
return loader
def generate_etag(self, mtime, file_size, real_filename):
if not isinstance(real_filename, bytes):
real_filename = real_filename.encode(get_filesystem_encoding())
return 'wzsdm-%d-%s-%s' % (
mktime(mtime.timetuple()),
file_size,
adler32(real_filename) & 0xffffffff
)
def __call__(self, environ, start_response):
cleaned_path = get_path_info(environ)
if PY2:
cleaned_path = cleaned_path.encode(get_filesystem_encoding())
# sanitize the path for non unix systems
cleaned_path = cleaned_path.strip('/')
for sep in os.sep, os.altsep:
if sep and sep != '/':
cleaned_path = cleaned_path.replace(sep, '/')
path = '/' + '/'.join(x for x in cleaned_path.split('/')
if x and x != '..')
file_loader = None
for search_path, loader in iteritems(self.exports):
if search_path == path:
real_filename, file_loader = loader(None)
if file_loader is not None:
break
if not search_path.endswith('/'):
search_path += '/'
if path.startswith(search_path):
real_filename, file_loader = loader(path[len(search_path):])
if file_loader is not None:
break
if file_loader is None or not self.is_allowed(real_filename):
return self.app(environ, start_response)
guessed_type = mimetypes.guess_type(real_filename)
mime_type = guessed_type[0] or self.fallback_mimetype
f, mtime, file_size = file_loader()
headers = [('Date', http_date())]
if self.cache:
timeout = self.cache_timeout
etag = self.generate_etag(mtime, file_size, real_filename)
headers += [
('Etag', '"%s"' % etag),
('Cache-Control', 'max-age=%d, public' % timeout)
]
if not is_resource_modified(environ, etag, last_modified=mtime):
f.close()
start_response('304 Not Modified', headers)
return []
headers.append(('Expires', http_date(time() + timeout)))
else:
headers.append(('Cache-Control', 'public'))
headers.extend((
('Content-Type', mime_type),
('Content-Length', str(file_size)),
('Last-Modified', http_date(mtime))
))
start_response('200 OK', headers)
return wrap_file(environ, f)
class DispatcherMiddleware(object):
"""Allows one to mount middlewares or applications in a WSGI application.
This is useful if you want to combine multiple WSGI applications::
app = DispatcherMiddleware(app, {
'/app2': app2,
'/app3': app3
})
"""
def __init__(self, app, mounts=None):
self.app = app
self.mounts = mounts or {}
def __call__(self, environ, start_response):
script = environ.get('PATH_INFO', '')
path_info = ''
while '/' in script:
if script in self.mounts:
app = self.mounts[script]
break
script, last_item = script.rsplit('/', 1)
path_info = '/%s%s' % (last_item, path_info)
else:
app = self.mounts.get(script, self.app)
original_script_name = environ.get('SCRIPT_NAME', '')
environ['SCRIPT_NAME'] = original_script_name + script
environ['PATH_INFO'] = path_info
return app(environ, start_response)
@implements_iterator
class ClosingIterator(object):
"""The WSGI specification requires that all middlewares and gateways
respect the `close` callback of an iterator. Because it is useful to add
another close action to a returned iterator and adding a custom iterator
is a boring task this class can be used for that::
return ClosingIterator(app(environ, start_response), [cleanup_session,
cleanup_locals])
If there is just one close function it can be passed instead of the list.
A closing iterator is not needed if the application uses response objects
and finishes the processing if the response is started::
try:
return response(environ, start_response)
finally:
cleanup_session()
cleanup_locals()
"""
def __init__(self, iterable, callbacks=None):
iterator = iter(iterable)
self._next = partial(next, iterator)
if callbacks is None:
callbacks = []
elif callable(callbacks):
callbacks = [callbacks]
else:
callbacks = list(callbacks)
iterable_close = getattr(iterator, 'close', None)
if iterable_close:
callbacks.insert(0, iterable_close)
self._callbacks = callbacks
def __iter__(self):
return self
def __next__(self):
return self._next()
def close(self):
for callback in self._callbacks:
callback()
def wrap_file(environ, file, buffer_size=8192):
"""Wraps a file. This uses the WSGI server's file wrapper if available
or otherwise the generic :class:`FileWrapper`.
.. versionadded:: 0.5
If the file wrapper from the WSGI server is used it's important to not
iterate over it from inside the application but to pass it through
unchanged. If you want to pass out a file wrapper inside a response
object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`.
More information about file wrappers are available in :pep:`333`.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration.
"""
return environ.get('wsgi.file_wrapper', FileWrapper)(file, buffer_size)
@implements_iterator
class FileWrapper(object):
"""This class can be used to convert a :class:`file`-like object into
an iterable. It yields `buffer_size` blocks until the file is fully
read.
You should not use this class directly but rather use the
:func:`wrap_file` function that uses the WSGI server's file wrapper
support if it's available.
.. versionadded:: 0.5
If you're using this object together with a :class:`BaseResponse` you have
to use the `direct_passthrough` mode.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration.
"""
def __init__(self, file, buffer_size=8192):
self.file = file
self.buffer_size = buffer_size
def close(self):
if hasattr(self.file, 'close'):
self.file.close()
def seekable(self):
if hasattr(self.file, 'seekable'):
return self.file.seekable()
if hasattr(self.file, 'seek'):
return True
return False
def seek(self, *args):
if hasattr(self.file, 'seek'):
self.file.seek(*args)
def tell(self):
if hasattr(self.file, 'tell'):
return self.file.tell()
return None
def __iter__(self):
return self
def __next__(self):
data = self.file.read(self.buffer_size)
if data:
return data
raise StopIteration()
@implements_iterator
class _RangeWrapper(object):
# private for now, but should we make it public in the future ?
"""This class can be used to convert an iterable object into
an iterable that will only yield a piece of the underlying content.
It yields blocks until the underlying stream range is fully read.
The yielded blocks will have a size that can't exceed the original
iterator defined block size, but that can be smaller.
If you're using this object together with a :class:`BaseResponse` you have
to use the `direct_passthrough` mode.
:param iterable: an iterable object with a :meth:`__next__` method.
:param start_byte: byte from which read will start.
:param byte_range: how many bytes to read.
"""
def __init__(self, iterable, start_byte=0, byte_range=None):
self.iterable = iter(iterable)
self.byte_range = byte_range
self.start_byte = start_byte
self.end_byte = None
if byte_range is not None:
self.end_byte = self.start_byte + self.byte_range
self.read_length = 0
self.seekable = hasattr(iterable, 'seekable') and iterable.seekable()
self.end_reached = False
def __iter__(self):
return self
def _next_chunk(self):
try:
chunk = next(self.iterable)
self.read_length += len(chunk)
return chunk
except StopIteration:
self.end_reached = True
raise
def _first_iteration(self):
chunk = None
if self.seekable:
self.iterable.seek(self.start_byte)
self.read_length = self.iterable.tell()
contextual_read_length = self.read_length
else:
while self.read_length <= self.start_byte:
chunk = self._next_chunk()
if chunk is not None:
chunk = chunk[self.start_byte - self.read_length:]
contextual_read_length = self.start_byte
return chunk, contextual_read_length
def _next(self):
if self.end_reached:
raise StopIteration()
chunk = None
contextual_read_length = self.read_length
if self.read_length == 0:
chunk, contextual_read_length = self._first_iteration()
if chunk is None:
chunk = self._next_chunk()
if self.end_byte is not None and self.read_length >= self.end_byte:
self.end_reached = True
return chunk[:self.end_byte - contextual_read_length]
return chunk
def __next__(self):
chunk = self._next()
if chunk:
return chunk
self.end_reached = True
raise StopIteration()
def close(self):
if hasattr(self.iterable, 'close'):
self.iterable.close()
def _make_chunk_iter(stream, limit, buffer_size):
"""Helper for the line and chunk iter functions."""
if isinstance(stream, (bytes, bytearray, text_type)):
raise TypeError('Passed a string or byte object instead of '
'true iterator or stream.')
if not hasattr(stream, 'read'):
for item in stream:
if item:
yield item
return
if not isinstance(stream, LimitedStream) and limit is not None:
stream = LimitedStream(stream, limit)
_read = stream.read
while 1:
item = _read(buffer_size)
if not item:
break
yield item
def make_line_iter(stream, limit=None, buffer_size=10 * 1024,
cap_at_buffer=False):
"""Safely iterates line-based over an input stream. If the input stream
is not a :class:`LimitedStream` the `limit` parameter is mandatory.
This uses the stream's :meth:`~file.read` method internally as opposite
to the :meth:`~file.readline` method that is unsafe and can only be used
in violation of the WSGI specification. The same problem applies to the
`__iter__` function of the input stream which calls :meth:`~file.readline`
without arguments.
If you need line-by-line processing it's strongly recommended to iterate
over the input stream using this helper function.
.. versionchanged:: 0.8
This function now ensures that the limit was reached.
.. versionadded:: 0.9
added support for iterators as input stream.
.. versionadded:: 0.11.10
added support for the `cap_at_buffer` parameter.
:param stream: the stream or iterate to iterate over.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is a :class:`LimitedStream`.
:param buffer_size: The optional buffer size.
:param cap_at_buffer: if this is set chunks are split if they are longer
than the buffer size. Internally this is implemented
that the buffer size might be exhausted by a factor
of two however.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return
s = make_literal_wrapper(first_item)
empty = s('')
cr = s('\r')
lf = s('\n')
crlf = s('\r\n')
_iter = chain((first_item,), _iter)
def _iter_basic_lines():
_join = empty.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break
new_buf = []
buf_size = 0
for item in chain(buffer, new_data.splitlines(True)):
new_buf.append(item)
buf_size += len(item)
if item and item[-1:] in crlf:
yield _join(new_buf)
new_buf = []
elif cap_at_buffer and buf_size >= buffer_size:
rv = _join(new_buf)
while len(rv) >= buffer_size:
yield rv[:buffer_size]
rv = rv[buffer_size:]
new_buf = [rv]
buffer = new_buf
if buffer:
yield _join(buffer)
# This hackery is necessary to merge 'foo\r' and '\n' into one item
# of 'foo\r\n' if we were unlucky and we hit a chunk boundary.
previous = empty
for item in _iter_basic_lines():
if item == lf and previous[-1:] == cr:
previous += item
item = empty
if previous:
yield previous
previous = item
if previous:
yield previous
def make_chunk_iter(stream, separator, limit=None, buffer_size=10 * 1024,
cap_at_buffer=False):
"""Works like :func:`make_line_iter` but accepts a separator
which divides chunks. If you want newline based processing
you should use :func:`make_line_iter` instead as it
supports arbitrary newline markers.
.. versionadded:: 0.8
.. versionadded:: 0.9
added support for iterators as input stream.
.. versionadded:: 0.11.10
added support for the `cap_at_buffer` parameter.
:param stream: the stream or iterate to iterate over.
:param separator: the separator that divides chunks.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is otherwise already limited).
:param buffer_size: The optional buffer size.
:param cap_at_buffer: if this is set chunks are split if they are longer
than the buffer size. Internally this is implemented
that the buffer size might be exhausted by a factor
of two however.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return
_iter = chain((first_item,), _iter)
if isinstance(first_item, text_type):
separator = to_unicode(separator)
_split = re.compile(r'(%s)' % re.escape(separator)).split
_join = u''.join
else:
separator = to_bytes(separator)
_split = re.compile(b'(' + re.escape(separator) + b')').split
_join = b''.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break
chunks = _split(new_data)
new_buf = []
buf_size = 0
for item in chain(buffer, chunks):
if item == separator:
yield _join(new_buf)
new_buf = []
buf_size = 0
else:
buf_size += len(item)
new_buf.append(item)
if cap_at_buffer and buf_size >= buffer_size:
rv = _join(new_buf)
while len(rv) >= buffer_size:
yield rv[:buffer_size]
rv = rv[buffer_size:]
new_buf = [rv]
buf_size = len(rv)
buffer = new_buf
if buffer:
yield _join(buffer)
@implements_iterator
class LimitedStream(object):
"""Wraps a stream so that it doesn't read more than n bytes. If the
stream is exhausted and the caller tries to get more bytes from it
:func:`on_exhausted` is called which by default returns an empty
string. The return value of that function is forwarded
to the reader function. So if it returns an empty string
:meth:`read` will return an empty string as well.
The limit however must never be higher than what the stream can
output. Otherwise :meth:`readlines` will try to read past the
limit.
.. admonition:: Note on WSGI compliance
calls to :meth:`readline` and :meth:`readlines` are not
WSGI compliant because it passes a size argument to the
readline methods. Unfortunately the WSGI PEP is not safely
implementable without a size argument to :meth:`readline`
because there is no EOF marker in the stream. As a result
of that the use of :meth:`readline` is discouraged.
For the same reason iterating over the :class:`LimitedStream`
is not portable. It internally calls :meth:`readline`.
We strongly suggest using :meth:`read` only or using the
:func:`make_line_iter` which safely iterates line-based
over a WSGI input stream.
:param stream: the stream to wrap.
:param limit: the limit for the stream, must not be longer than
what the string can provide if the stream does not
end with `EOF` (like `wsgi.input`)
"""
def __init__(self, stream, limit):
self._read = stream.read
self._readline = stream.readline
self._pos = 0
self.limit = limit
def __iter__(self):
return self
@property
def is_exhausted(self):
"""If the stream is exhausted this attribute is `True`."""
return self._pos >= self.limit
def on_exhausted(self):
"""This is called when the stream tries to read past the limit.
The return value of this function is returned from the reading
function.
"""
# Read null bytes from the stream so that we get the
# correct end of stream marker.
return self._read(0)
def on_disconnect(self):
"""What should happen if a disconnect is detected? The return
value of this function is returned from read functions in case
the client went away. By default a
:exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised.
"""
from werkzeug.exceptions import ClientDisconnected
raise ClientDisconnected()
def exhaust(self, chunk_size=1024 * 64):
"""Exhaust the stream. This consumes all the data left until the
limit is reached.
:param chunk_size: the size for a chunk. It will read the chunk
until the stream is exhausted and throw away
the results.
"""
to_read = self.limit - self._pos
chunk = chunk_size
while to_read > 0:
chunk = min(to_read, chunk)
self.read(chunk)
to_read -= chunk
def read(self, size=None):
"""Read `size` bytes or if size is not provided everything is read.
:param size: the number of bytes read.
"""
if self._pos >= self.limit:
return self.on_exhausted()
if size is None or size == -1: # -1 is for consistence with file
size = self.limit
to_read = min(self.limit - self._pos, size)
try:
read = self._read(to_read)
except (IOError, ValueError):
return self.on_disconnect()
if to_read and len(read) != to_read:
return self.on_disconnect()
self._pos += len(read)
return read
def readline(self, size=None):
"""Reads one line from the stream."""
if self._pos >= self.limit:
return self.on_exhausted()
if size is None:
size = self.limit - self._pos
else:
size = min(size, self.limit - self._pos)
try:
line = self._readline(size)
except (ValueError, IOError):
return self.on_disconnect()
if size and not line:
return self.on_disconnect()
self._pos += len(line)
return line
def readlines(self, size=None):
"""Reads a file into a list of strings. It calls :meth:`readline`
until the file is read to the end. It does support the optional
`size` argument if the underlaying stream supports it for
`readline`.
"""
last_pos = self._pos
result = []
if size is not None:
end = min(self.limit, last_pos + size)
else:
end = self.limit
while 1:
if size is not None:
size -= last_pos - self._pos
if self._pos >= end:
break
result.append(self.readline(size))
if size is not None:
last_pos = self._pos
return result
def tell(self):
"""Returns the position of the stream.
.. versionadded:: 0.9
"""
return self._pos
def __next__(self):
line = self.readline()
if not line:
raise StopIteration()
return line
|
chrisdjscott/Atoman | refs/heads/master | atoman/filtering/filters/tests/test_cropSphere.py | 1 |
"""
Unit tests for the crop sphere filter
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import unittest
import numpy as np
from ....system import lattice
from .. import cropSphereFilter
from .. import base
################################################################################
class TestCropSphereFilter(unittest.TestCase):
"""
Test crop sphere filter
"""
def setUp(self):
"""
Called before each test
"""
# generate lattice
self.lattice = lattice.Lattice()
self.lattice.addAtom("He", [0,0,0], 0)
self.lattice.addAtom("He", [0,0,4], 0)
self.lattice.addAtom("He", [2,0,0], 0)
self.lattice.addAtom("He", [0,2,0], 0)
self.lattice.addAtom("He", [4,0,0], 0)
self.lattice.addAtom("He", [0,0,2], 0)
self.lattice.addAtom("He", [0,4,0], 0)
self.lattice.addAtom("He", [4,4,4], 0)
self.lattice.addAtom("He", [99,99,99], 0)
# filter
self.filter = cropSphereFilter.CropSphereFilter("Crop sphere")
def tearDown(self):
"""
Called after each test
"""
# remove refs
self.lattice = None
self.filter = None
def test_cropBoxFilter(self):
"""
Crop sphere
"""
# TEST 1
# settings - all clusters visible
settings = cropSphereFilter.CropSphereFilterSettings()
settings.updateSetting("xCentre", 0.0)
settings.updateSetting("yCentre", 0.0)
settings.updateSetting("zCentre", 0.0)
settings.updateSetting("radius", 2.1)
settings.updateSetting("invertSelection", False)
# set PBC
self.lattice.PBC[:] = 1
# filter input
filterInput = base.FilterInput()
filterInput.inputState = self.lattice
visibleAtoms = np.arange(self.lattice.NAtoms, dtype=np.int32)
filterInput.visibleAtoms = visibleAtoms
filterInput.NScalars = 0
filterInput.fullScalars = np.empty(0, np.float64)
filterInput.NVectors = 0
filterInput.fullVectors = np.empty(0, np.float64)
# call filter
result = self.filter.apply(filterInput, settings)
self.assertIsInstance(result, base.FilterResult)
# make sure num visible is correct
self.assertEqual(len(visibleAtoms), 4)
# make sure correct atoms selected
self.assertTrue(1 in visibleAtoms)
self.assertTrue(4 in visibleAtoms)
self.assertTrue(6 in visibleAtoms)
self.assertTrue(7 in visibleAtoms)
# TEST 2
# settings - all clusters visible
settings = cropSphereFilter.CropSphereFilterSettings()
settings.updateSetting("xCentre", 0.0)
settings.updateSetting("yCentre", 0.0)
settings.updateSetting("zCentre", 0.0)
settings.updateSetting("radius", 2.1)
settings.updateSetting("invertSelection", True)
# set PBC
self.lattice.PBC[:] = 1
# filter input
filterInput = base.FilterInput()
filterInput.inputState = self.lattice
visibleAtoms = np.arange(self.lattice.NAtoms, dtype=np.int32)
filterInput.visibleAtoms = visibleAtoms
filterInput.NScalars = 0
filterInput.fullScalars = np.empty(0, np.float64)
filterInput.NVectors = 0
filterInput.fullVectors = np.empty(0, np.float64)
# call filter
result = self.filter.apply(filterInput, settings)
self.assertIsInstance(result, base.FilterResult)
# make sure num visible is correct
self.assertEqual(len(visibleAtoms), 5)
# make sure correct atoms selected
self.assertTrue(0 in visibleAtoms)
self.assertTrue(2 in visibleAtoms)
self.assertTrue(3 in visibleAtoms)
self.assertTrue(5 in visibleAtoms)
self.assertTrue(8 in visibleAtoms)
|
goddardl/gaffer | refs/heads/master | python/Gaffer/UndoContext.py | 2 | ##########################################################################
#
# Copyright (c) 2011, John Haddon. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from _Gaffer import _UndoContext
class UndoContext() :
State = _UndoContext.State
def __init__( self, script, state=_UndoContext.State.Enabled, mergeGroup="" ) :
self.__script = script
self.__state = state
self.__mergeGroup = mergeGroup
def __enter__( self ) :
self.__context = _UndoContext( self.__script, self.__state, self.__mergeGroup )
def __exit__( self, type, value, traceBack ) :
del self.__context
|
shaistaansari/django | refs/heads/master | tests/sessions_tests/tests.py | 80 | import base64
import os
import shutil
import string
import sys
import tempfile
import unittest
from datetime import timedelta
from django.conf import settings
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import \
SessionStore as CacheDBSession
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import \
SessionStore as CookieSession
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import (
JSONSerializer, PickleSerializer,
)
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.test import (
RequestFactory, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import patch_logger
from django.utils import six, timezone
from django.utils.encoding import force_text
from django.utils.six.moves import http_cookies
class SessionTestsMixin(object):
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertFalse(self.session.modified)
self.assertFalse(self.session.accessed)
def test_get_empty(self):
self.assertEqual(self.session.get('cat'), None)
def test_store(self):
self.session['cat'] = "dog"
self.assertTrue(self.session.modified)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
self.assertEqual(self.session.get('some key'), None)
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
def test_update(self):
self.session.update({'update key': 1})
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn('some key', self.session)
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertTrue(self.session.accessed)
self.session['some key'] = 1
self.assertEqual(list(self.session.values()), [1])
def test_iterkeys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
i = six.iterkeys(self.session)
self.assertTrue(hasattr(i, '__iter__'))
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
self.assertEqual(list(i), ['x'])
def test_itervalues(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
i = six.itervalues(self.session)
self.assertTrue(hasattr(i, '__iter__'))
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
self.assertEqual(list(i), [1])
def test_iteritems(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
i = six.iteritems(self.session)
self.assertTrue(hasattr(i, '__iter__'))
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
self.assertEqual(list(i), [('x', 1)])
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
def test_save(self):
if (hasattr(self.session, '_cache') and 'DummyCache' in
settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND']):
raise unittest.SkipTest("Session saving tests require a real cache backend")
self.session.save()
self.assertTrue(self.session.exists(self.session.session_key))
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertFalse(self.session.exists(self.session.session_key))
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertFalse(self.session.exists(prev_key))
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertTrue(self.session.modified)
self.assertTrue(self.session.accessed)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_save_doesnt_clear_data(self):
self.session['a'] = 'b'
self.session.save()
self.assertEqual(self.session['a'], 'b')
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend('1')
try:
session.save()
except AttributeError:
self.fail(
"The session object did not save properly. "
"Middleware may be saving cache items without namespaces."
)
self.assertNotEqual(session.session_key, '1')
self.assertEqual(session.get('cat'), None)
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete('1')
def test_session_key_empty_string_invalid(self):
"""Falsey values (Such as an empty string) are rejected."""
self.session._session_key = ''
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
"""Strings shorter than 8 characters are rejected."""
self.session._session_key = '1234567'
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
"""Strings of length 8 and up are accepted and stored."""
self.session._session_key = '12345678'
self.assertEqual(self.session.session_key, '12345678')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
self.assertRaises(AttributeError, set_session_key, self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertFalse(self.session.get_expire_at_browser_close())
self.session.set_expiry(0)
self.assertTrue(self.session.get_expire_at_browser_close())
self.session.set_expiry(None)
self.assertFalse(self.session.get_expire_at_browser_close())
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertFalse(self.session.get_expire_at_browser_close())
self.session.set_expiry(0)
self.assertTrue(self.session.get_expire_at_browser_close())
self.session.set_expiry(None)
self.assertTrue(self.session.get_expire_at_browser_close())
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
bad_encode = base64.b64encode(b'flaskdj:alkdjf')
with patch_logger('django.security.SuspiciousSession', 'warning') as calls:
self.assertEqual({}, self.session.decode(bad_encode))
# check that the failed decode is logged
self.assertEqual(len(calls), 1)
self.assertIn('corrupted', calls[0])
def test_actual_expiry(self):
# this doesn't work with JSONSerializer (serializing timedelta)
with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'):
self.session = self.backend() # reinitialize after overriding settings
# Regression test for #19200
old_session_key = None
new_session_key = None
try:
self.session['foo'] = 'bar'
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
# With an expiry date in the past, the session expires instantly.
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn('foo', new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
"""
Loading an unknown session key does not create a session record.
Creating session records on load is a DOS vulnerability.
"""
if self.backend is CookieSession:
raise unittest.SkipTest("Cookie backend doesn't have an external store to create records in.")
session = self.backend('someunknownkey')
session.load()
self.assertFalse(session.exists(session.session_key))
# provided unknown key was cycled, not reused
self.assertNotEqual(session.session_key, 'someunknownkey')
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
def test_session_str(self):
"Session repr should be the session key."
self.session['x'] = 1
self.session.save()
session_key = self.session.session_key
s = Session.objects.get(session_key=session_key)
self.assertEqual(force_text(s), session_key)
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session['x'] = 1
self.session.save()
s = Session.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session['y'] = 1
self.session.save()
s = Session.objects.get(session_key=self.session.session_key)
# Change it
Session.objects.save(s.session_key, {'y': 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.db")
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
self.assertEqual(0, Session.objects.count())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the database before clearsessions...
self.assertEqual(2, Session.objects.count())
management.call_command('clearsessions')
# ... and one is deleted.
self.assertEqual(1, Session.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
@unittest.skipIf('DummyCache' in
settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND'],
"Session saving tests require a real cache backend")
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertTrue(self.session.exists(self.session.session_key))
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS.
self.assertRaises(InvalidCacheBackendError, self.backend)
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
# Reset the file session backend's internal caches
if hasattr(self.backend, '_storage_path'):
del self.backend._storage_path
super(FileSessionTests, self).setUp()
def tearDown(self):
super(FileSessionTests, self).tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer")
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
self.assertRaises(ImproperlyConfigured, self.backend)
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
# This is tested directly on _key_to_file, as load() will swallow
# a SuspiciousOperation in the same way as an IOError - by creating
# a new session, making it unclear whether the slashes were detected.
self.assertRaises(InvalidSessionKey,
self.backend()._key_to_file, "a\\b\\c")
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
self.assertRaises(InvalidSessionKey,
self.backend()._key_to_file, "a/b/c")
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file")
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len([session_file for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)])
self.assertEqual(0, count_sessions())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the filesystem before clearsessions...
self.assertEqual(2, count_sessions())
management.call_command('clearsessions')
# ... and one is deleted.
self.assertEqual(1, count_sessions())
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertNotEqual(caches['default'].get(self.session.cache_key), None)
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session',
},
}, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertEqual(caches['default'].get(self.session.cache_key), None)
self.assertNotEqual(caches['sessions'].get(self.session.cache_key), None)
class SessionMiddlewareTests(TestCase):
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertTrue(
response.cookies[settings.SESSION_COOKIE_NAME]['secure'])
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertTrue(
response.cookies[settings.SESSION_COOKIE_NAME]['httponly'])
self.assertIn(http_cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME]))
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertFalse(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'])
self.assertNotIn(http_cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME]))
def test_session_save_on_500(self):
request = RequestFactory().get('/')
response = HttpResponse('Horrible error')
response.status_code = 500
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# Check that the value wasn't saved above.
self.assertNotIn('hello', request.session.load())
def test_session_delete_on_end(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# Check that the cookie was deleted, not recreated.
# A deleted cookie header looks like:
# Set-Cookie: sessionid=; expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual(
'Set-Cookie: {}={}; expires=Thu, 01-Jan-1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
'""' if sys.version_info >= (3, 5) else '',
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_DOMAIN='.example.local')
def test_session_delete_on_end_with_custom_domain(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# Check that the cookie was deleted, not recreated.
# A deleted cookie header with a custom domain looks like:
# Set-Cookie: sessionid=; Domain=.example.local;
# expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual(
'Set-Cookie: {}={}; Domain=.example.local; expires=Thu, '
'01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
'""' if sys.version_info >= (3, 5) else '',
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class CookieSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
@unittest.expectedFailure
def test_actual_expiry(self):
# The cookie backend doesn't handle non-default expiry dates, see #19201
super(CookieSessionTests, self).test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
self.session.serializer = PickleSerializer
self.session.load()
|
google-code/android-scripting | refs/heads/master | python/src/Lib/plat-mac/Carbon/Snd.py | 82 | from _Snd import *
|
kalcho83/black-hat-python | refs/heads/master | demos/demo_server.py | 1 | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
import base64
from binascii import hexlify
import os
import socket
import sys
import threading
import traceback
import paramiko
from paramiko.py3compat import b, u, decodebytes
# setup logging
paramiko.util.log_to_file('demo_server.log')
host_key = paramiko.RSAKey(filename='test_rsa.key')
#host_key = paramiko.DSSKey(filename='test_dss.key')
print('Read key: ' + u(hexlify(host_key.get_fingerprint())))
class Server (paramiko.ServerInterface):
# 'data' is the output of base64.b64encode(key)
# (using the "user_rsa_key" files)
data = (b'AAAAB3NzaC1yc2EAAAABIwAAAIEAyO4it3fHlmGZWJaGrfeHOVY7RWO3P9M7hp'
b'fAu7jJ2d7eothvfeuoRFtJwhUmZDluRdFyhFY/hFAh76PJKGAusIqIQKlkJxMC'
b'KDqIexkgHAfID/6mqvmnSJf0b5W8v5h2pI/stOSwTQ+pxVhwJ9ctYDhRSlF0iT'
b'UWT10hcuO4Ks8=')
good_pub_key = paramiko.RSAKey(data=decodebytes(data))
def __init__(self):
self.event = threading.Event()
def check_channel_request(self, kind, chanid):
if kind == 'session':
return paramiko.OPEN_SUCCEEDED
return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def check_auth_password(self, username, password):
if (username == 'robey') and (password == 'foo'):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_publickey(self, username, key):
print('Auth attempt with key: ' + u(hexlify(key.get_fingerprint())))
if (username == 'robey') and (key == self.good_pub_key):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_gssapi_with_mic(self, username,
gss_authenticated=paramiko.AUTH_FAILED,
cc_file=None):
"""
.. note::
We are just checking in `AuthHandler` that the given user is a
valid krb5 principal! We don't check if the krb5 principal is
allowed to log in on the server, because there is no way to do that
in python. So if you develop your own SSH server with paramiko for
a certain platform like Linux, you should call ``krb5_kuserok()`` in
your local kerberos library to make sure that the krb5_principal
has an account on the server and is allowed to log in as a user.
.. seealso::
`krb5_kuserok() man page
<http://www.unix.com/man-page/all/3/krb5_kuserok/>`_
"""
if gss_authenticated == paramiko.AUTH_SUCCESSFUL:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def check_auth_gssapi_keyex(self, username,
gss_authenticated=paramiko.AUTH_FAILED,
cc_file=None):
if gss_authenticated == paramiko.AUTH_SUCCESSFUL:
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def enable_auth_gssapi(self):
UseGSSAPI = True
GSSAPICleanupCredentials = False
return UseGSSAPI
def get_allowed_auths(self, username):
return 'gssapi-keyex,gssapi-with-mic,password,publickey'
def check_channel_shell_request(self, channel):
self.event.set()
return True
def check_channel_pty_request(self, channel, term, width, height, pixelwidth,
pixelheight, modes):
return True
DoGSSAPIKeyExchange = True
# now connect
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('', 2200))
except Exception as e:
print('*** Bind failed: ' + str(e))
traceback.print_exc()
sys.exit(1)
try:
sock.listen(100)
print('Listening for connection ...')
client, addr = sock.accept()
except Exception as e:
print('*** Listen/accept failed: ' + str(e))
traceback.print_exc()
sys.exit(1)
print('Got a connection!')
try:
t = paramiko.Transport(client, gss_kex=DoGSSAPIKeyExchange)
t.set_gss_host(socket.getfqdn(""))
try:
t.load_server_moduli()
except:
print('(Failed to load moduli -- gex will be unsupported.)')
raise
t.add_server_key(host_key)
server = Server()
try:
t.start_server(server=server)
except paramiko.SSHException:
print('*** SSH negotiation failed.')
sys.exit(1)
# wait for auth
chan = t.accept(20)
if chan is None:
print('*** No channel.')
sys.exit(1)
print('Authenticated!')
server.event.wait(10)
if not server.event.is_set():
print('*** Client never asked for a shell.')
sys.exit(1)
chan.send('\r\n\r\nWelcome to my dorky little BBS!\r\n\r\n')
chan.send('We are on fire all the time! Hooray! Candy corn for everyone!\r\n')
chan.send('Happy birthday to Robot Dave!\r\n\r\n')
chan.send('Username: ')
f = chan.makefile('rU')
username = f.readline().strip('\r\n')
chan.send('\r\nI don\'t like you, ' + username + '.\r\n')
chan.close()
except Exception as e:
print('*** Caught exception: ' + str(e.__class__) + ': ' + str(e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
|
ZLLab-Mooc/edx-platform | refs/heads/named-release/dogwood.rc | common/test/acceptance/fixtures/library.py | 147 | """
Fixture to create a Content Library
"""
from opaque_keys.edx.keys import CourseKey
from . import STUDIO_BASE_URL
from .base import XBlockContainerFixture, FixtureError
class LibraryFixture(XBlockContainerFixture):
"""
Fixture for ensuring that a library exists.
WARNING: This fixture is NOT idempotent. To avoid conflicts
between tests, you should use unique library identifiers for each fixture.
"""
def __init__(self, org, number, display_name):
"""
Configure the library fixture to create a library with
"""
super(LibraryFixture, self).__init__()
self.library_info = {
'org': org,
'number': number,
'display_name': display_name
}
self.display_name = display_name
self._library_key = None
super(LibraryFixture, self).__init__()
def __str__(self):
"""
String representation of the library fixture, useful for debugging.
"""
return "<LibraryFixture: org='{org}', number='{number}'>".format(**self.library_info)
def install(self):
"""
Create the library and XBlocks within the library.
This is NOT an idempotent method; if the library already exists, this will
raise a `FixtureError`. You should use unique library identifiers to avoid
conflicts between tests.
"""
self._create_library()
self._create_xblock_children(self.library_location, self.children)
return self
@property
def library_key(self):
"""
Get the LibraryLocator for this library, as a string.
"""
return self._library_key
@property
def library_location(self):
"""
Return the locator string for the LibraryRoot XBlock that is the root of the library hierarchy.
"""
lib_key = CourseKey.from_string(self._library_key)
return unicode(lib_key.make_usage_key('library', 'library'))
def _create_library(self):
"""
Create the library described in the fixture.
Will fail if the library already exists.
"""
response = self.session.post(
STUDIO_BASE_URL + '/library/',
data=self._encode_post_dict(self.library_info),
headers=self.headers
)
if response.ok:
self._library_key = response.json()['library_key']
else:
try:
err_msg = response.json().get('ErrMsg')
except ValueError:
err_msg = "Unknown Error"
raise FixtureError("Could not create library {}. Status was {}, error was: {}".format(
self.library_info, response.status_code, err_msg
))
def create_xblock(self, parent_loc, xblock_desc):
# Disable publishing for library XBlocks:
xblock_desc.publish = "not-applicable"
return super(LibraryFixture, self).create_xblock(parent_loc, xblock_desc)
|
gptech/ansible | refs/heads/devel | lib/ansible/modules/cloud/profitbricks/profitbricks_volume.py | 66 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: profitbricks_volume
short_description: Create or destroy a volume.
description:
- Allows you to create or remove a volume from a ProfitBricks datacenter. This module has a dependency on profitbricks >= 1.0.0
version_added: "2.0"
options:
datacenter:
description:
- The datacenter in which to create the volumes.
required: true
name:
description:
- The name of the volumes. You can enumerate the names using auto_increment.
required: true
size:
description:
- The size of the volume.
required: false
default: 10
bus:
description:
- The bus type.
required: false
default: VIRTIO
choices: [ "IDE", "VIRTIO"]
image:
description:
- The system image ID for the volume, e.g. a3eae284-a2fe-11e4-b187-5f1f641608c8. This can also be a snapshot image ID.
required: true
image_password:
description:
- Password set for the administrative user.
required: false
version_added: '2.2'
ssh_keys:
description:
- Public SSH keys allowing access to the virtual machine.
required: false
version_added: '2.2'
disk_type:
description:
- The disk type of the volume.
required: false
default: HDD
choices: [ "HDD", "SSD" ]
licence_type:
description:
- The licence type for the volume. This is used when the image is non-standard.
required: false
default: UNKNOWN
choices: ["LINUX", "WINDOWS", "UNKNOWN" , "OTHER"]
count:
description:
- The number of volumes you wish to create.
required: false
default: 1
auto_increment:
description:
- Whether or not to increment a single number in the name for created virtual machines.
default: yes
choices: ["yes", "no"]
instance_ids:
description:
- list of instance ids, currently only used when state='absent' to remove instances.
required: false
subscription_user:
description:
- The ProfitBricks username. Overrides the PB_SUBSCRIPTION_ID environment variable.
required: false
subscription_password:
description:
- THe ProfitBricks password. Overrides the PB_PASSWORD environment variable.
required: false
wait:
description:
- wait for the datacenter to be created before returning
required: false
default: "yes"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
state:
description:
- create or terminate datacenters
required: false
default: 'present'
choices: ["present", "absent"]
requirements: [ "profitbricks" ]
author: Matt Baldwin ([email protected])
'''
EXAMPLES = '''
# Create Multiple Volumes
- profitbricks_volume:
datacenter: Tardis One
name: vol%02d
count: 5
auto_increment: yes
wait_timeout: 500
state: present
# Remove Volumes
- profitbricks_volume:
datacenter: Tardis One
instance_ids:
- 'vol01'
- 'vol02'
wait_timeout: 500
state: absent
'''
import re
import time
HAS_PB_SDK = True
try:
from profitbricks.client import ProfitBricksService, Volume
except ImportError:
HAS_PB_SDK = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
uuid_match = re.compile(
'[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}', re.I)
def _wait_for_completion(profitbricks, promise, wait_timeout, msg):
if not promise:
return
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
time.sleep(5)
operation_result = profitbricks.get_request(
request_id=promise['requestId'],
status=True)
if operation_result['metadata']['status'] == "DONE":
return
elif operation_result['metadata']['status'] == "FAILED":
raise Exception(
'Request failed to complete ' + msg + ' "' + str(
promise['requestId']) + '" to complete.')
raise Exception(
'Timed out waiting for async operation ' + msg + ' "' + str(
promise['requestId']
) + '" to complete.')
def _create_volume(module, profitbricks, datacenter, name):
size = module.params.get('size')
bus = module.params.get('bus')
image = module.params.get('image')
image_password = module.params.get('image_password')
ssh_keys = module.params.get('ssh_keys')
disk_type = module.params.get('disk_type')
licence_type = module.params.get('licence_type')
wait_timeout = module.params.get('wait_timeout')
wait = module.params.get('wait')
try:
v = Volume(
name=name,
size=size,
bus=bus,
image=image,
image_password=image_password,
ssh_keys=ssh_keys,
disk_type=disk_type,
licence_type=licence_type
)
volume_response = profitbricks.create_volume(datacenter, v)
if wait:
_wait_for_completion(profitbricks, volume_response,
wait_timeout, "_create_volume")
except Exception as e:
module.fail_json(msg="failed to create the volume: %s" % str(e))
return volume_response
def _delete_volume(module, profitbricks, datacenter, volume):
try:
profitbricks.delete_volume(datacenter, volume)
except Exception as e:
module.fail_json(msg="failed to remove the volume: %s" % str(e))
def create_volume(module, profitbricks):
"""
Creates a volume.
This will create a volume in a datacenter.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the volume was created, false otherwise
"""
datacenter = module.params.get('datacenter')
name = module.params.get('name')
auto_increment = module.params.get('auto_increment')
count = module.params.get('count')
datacenter_found = False
failed = True
volumes = []
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
datacenter_found = True
break
if not datacenter_found:
module.fail_json(msg='datacenter could not be found.')
if auto_increment:
numbers = set()
count_offset = 1
try:
name % 0
except TypeError:
e = get_exception()
if e.message.startswith('not all'):
name = '%s%%d' % name
else:
module.fail_json(msg=e.message)
number_range = xrange(count_offset, count_offset + count + len(numbers))
available_numbers = list(set(number_range).difference(numbers))
names = []
numbers_to_use = available_numbers[:count]
for number in numbers_to_use:
names.append(name % number)
else:
names = [name] * count
for name in names:
create_response = _create_volume(module, profitbricks, str(datacenter), name)
volumes.append(create_response)
_attach_volume(module, profitbricks, datacenter, create_response['id'])
failed = False
results = {
'failed': failed,
'volumes': volumes,
'action': 'create',
'instance_ids': {
'instances': [i['id'] for i in volumes],
}
}
return results
def delete_volume(module, profitbricks):
"""
Removes a volume.
This will create a volume in a datacenter.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the volume was removed, false otherwise
"""
if not isinstance(module.params.get('instance_ids'), list) or len(module.params.get('instance_ids')) < 1:
module.fail_json(msg='instance_ids should be a list of virtual machine ids or names, aborting')
datacenter = module.params.get('datacenter')
changed = False
instance_ids = module.params.get('instance_ids')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
for n in instance_ids:
if(uuid_match.match(n)):
_delete_volume(module, profitbricks, datacenter, volume)
changed = True
else:
volumes = profitbricks.list_volumes(datacenter)
for v in volumes['items']:
if n == v['properties']['name']:
volume_id = v['id']
_delete_volume(module, profitbricks, datacenter, volume_id)
changed = True
return changed
def _attach_volume(module, profitbricks, datacenter, volume):
"""
Attaches a volume.
This will attach a volume to the server.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the volume was attached, false otherwise
"""
server = module.params.get('server')
# Locate UUID for Server
if server:
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server = s['id']
break
try:
return profitbricks.attach_volume(datacenter, server, volume)
except Exception:
e = get_exception()
module.fail_json(msg='failed to attach volume: %s' % str(e))
def main():
module = AnsibleModule(
argument_spec=dict(
datacenter=dict(),
server=dict(),
name=dict(),
size=dict(type='int', default=10),
bus=dict(choices=['VIRTIO', 'IDE'], default='VIRTIO'),
image=dict(),
image_password=dict(default=None, no_log=True),
ssh_keys=dict(type='list', default=[]),
disk_type=dict(choices=['HDD', 'SSD'], default='HDD'),
licence_type=dict(default='UNKNOWN'),
count=dict(type='int', default=1),
auto_increment=dict(type='bool', default=True),
instance_ids=dict(type='list', default=[]),
subscription_user=dict(),
subscription_password=dict(no_log=True),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=600),
state=dict(default='present'),
)
)
if not module.params.get('subscription_user'):
module.fail_json(msg='subscription_user parameter is required')
if not module.params.get('subscription_password'):
module.fail_json(msg='subscription_password parameter is required')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
profitbricks = ProfitBricksService(
username=subscription_user,
password=subscription_password)
state = module.params.get('state')
if state == 'absent':
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required for running or stopping machines.')
try:
(changed) = delete_volume(module, profitbricks)
module.exit_json(changed=changed)
except Exception:
e = get_exception()
module.fail_json(msg='failed to set volume state: %s' % str(e))
elif state == 'present':
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required for new instance')
if not module.params.get('name'):
module.fail_json(msg='name parameter is required for new instance')
try:
(volume_dict_array) = create_volume(module, profitbricks)
module.exit_json(**volume_dict_array)
except Exception:
e = get_exception()
module.fail_json(msg='failed to set volume state: %s' % str(e))
if __name__ == '__main__':
main()
|
ofer43211/unisubs | refs/heads/staging | apps/teams/migrations/0037_auto__add_field_invite_author.py | 5 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Invite.author'
db.add_column('teams_invite', 'author', self.gf('django.db.models.fields.related.ForeignKey')(default=10000, to=orm['auth.CustomUser']), keep_default=False)
def backwards(self, orm):
# Deleting field 'Invite.author'
db.delete_column('teams_invite', 'author_id')
models = {
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'changes_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'follow_new_video': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'new_message_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'teams.application': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'Application'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"})
},
'teams.invite': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'Invite'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'max_length': '200', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invitations'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_invitations'", 'to': "orm['auth.CustomUser']"})
},
'teams.team': {
'Meta': {'object_name': 'Team'},
'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}),
'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'})
},
'teams.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'changes_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'teams.teamvideo': {
'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'completed_languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.SubtitleLanguage']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'teams.teamvideolanguage': {
'Meta': {'unique_together': "(('team_video', 'subtitle_language'),)", 'object_name': 'TeamVideoLanguage'},
'forked': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'is_lingua_franca': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'subtitle_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'team_video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'languages'", 'to': "orm['teams.TeamVideo']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'teams.teamvideolanguagepair': {
'Meta': {'object_name': 'TeamVideoLanguagePair'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_0': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'language_1': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'language_pair': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'percent_complete': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'subtitle_language_0': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_video_language_pairs_0'", 'to': "orm['videos.SubtitleLanguage']"}),
'subtitle_language_1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_video_language_pairs_1'", 'null': 'True', 'to': "orm['videos.SubtitleLanguage']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'team_video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.TeamVideo']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.subtitlelanguage': {
'Meta': {'unique_together': "(('video', 'language', 'standard_language'),)", 'object_name': 'SubtitleLanguage'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_languages'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'had_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'last_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True', 'blank': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'standard_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'subtitle_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.subtitleversion': {
'Meta': {'unique_together': "(('language', 'version_no'),)", 'object_name': 'SubtitleVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'result_of_rollback': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'widget_views_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
}
}
complete_apps = ['teams']
|
onitake/ansible | refs/heads/devel | lib/ansible/plugins/connection/buildah.py | 12 | # Based on the docker connection plugin
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Connection plugin for building container images using buildah tool
# https://github.com/projectatomic/buildah
#
# Written by: Tomas Tomecek (https://github.com/TomasTomecek)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
connection: buildah
short_description: Interact with an existing buildah container
description:
- Run commands or put/fetch files to an existing container using buildah tool.
author: Tomas Tomecek ([email protected])
version_added: 2.4
options:
remote_addr:
description:
- The ID of the container you want to access.
default: inventory_hostname
vars:
- name: ansible_host
# keyword:
# - name: hosts
remote_user:
description:
- User specified via name or ID which is used to execute commands inside the container.
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
# keyword:
# - name: remote_user
"""
import shlex
import shutil
import subprocess
import ansible.constants as C
from ansible.module_utils._text import to_bytes, to_native
from ansible.plugins.connection import ConnectionBase, ensure_connect
from ansible.utils.display import Display
display = Display()
# this _has to be_ named Connection
class Connection(ConnectionBase):
"""
This is a connection plugin for buildah: it uses buildah binary to interact with the containers
"""
# String used to identify this Connection class from other classes
transport = 'buildah'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS)
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._container_id = self._play_context.remote_addr
self._connected = False
# container filesystem will be mounted here on host
self._mount_point = None
# `buildah inspect` doesn't contain info about what the default user is -- if it's not
# set, it's empty
self.user = self._play_context.remote_user
def _set_user(self):
self._buildah(b"config", [b"--user=" + to_bytes(self.user, errors='surrogate_or_strict')])
def _buildah(self, cmd, cmd_args=None, in_data=None):
"""
run buildah executable
:param cmd: buildah's command to execute (str)
:param cmd_args: list of arguments to pass to the command (list of str/bytes)
:param in_data: data passed to buildah's stdin
:return: return code, stdout, stderr
"""
local_cmd = ['buildah', cmd, '--', self._container_id]
if cmd_args:
local_cmd += cmd_args
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
display.vvv("RUN %s" % (local_cmd,), host=self._container_id)
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=in_data)
stdout = to_bytes(stdout, errors='surrogate_or_strict')
stderr = to_bytes(stderr, errors='surrogate_or_strict')
return p.returncode, stdout, stderr
def _connect(self):
"""
no persistent connection is being maintained, mount container's filesystem
so we can easily access it
"""
super(Connection, self)._connect()
rc, self._mount_point, stderr = self._buildah("mount")
self._mount_point = self._mount_point.strip()
display.vvvvv("MOUNTPOINT %s RC %s STDERR %r" % (self._mount_point, rc, stderr))
self._connected = True
@ensure_connect
def exec_command(self, cmd, in_data=None, sudoable=False):
""" run specified command in a running OCI container using buildah """
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
# shlex.split has a bug with text strings on Python-2.6 and can only handle text strings on Python-3
cmd_args_list = shlex.split(to_native(cmd, errors='surrogate_or_strict'))
rc, stdout, stderr = self._buildah("run", cmd_args_list)
display.vvvvv("STDOUT %r STDERR %r" % (stderr, stderr))
return rc, stdout, stderr
def put_file(self, in_path, out_path):
""" Place a local file located in 'in_path' inside container at 'out_path' """
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._container_id)
real_out_path = self._mount_point + to_bytes(out_path, errors='surrogate_or_strict')
shutil.copyfile(
to_bytes(in_path, errors='surrogate_or_strict'),
to_bytes(real_out_path, errors='surrogate_or_strict')
)
# alternatively, this can be implemented using `buildah copy`:
# rc, stdout, stderr = self._buildah(
# "copy",
# [to_bytes(in_path, errors='surrogate_or_strict'),
# to_bytes(out_path, errors='surrogate_or_strict')]
# )
def fetch_file(self, in_path, out_path):
""" obtain file specified via 'in_path' from the container and place it at 'out_path' """
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._container_id)
real_in_path = self._mount_point + to_bytes(in_path, errors='surrogate_or_strict')
shutil.copyfile(
to_bytes(real_in_path, errors='surrogate_or_strict'),
to_bytes(out_path, errors='surrogate_or_strict')
)
def close(self):
""" unmount container's filesystem """
super(Connection, self).close()
rc, stdout, stderr = self._buildah("umount")
display.vvvvv("RC %s STDOUT %r STDERR %r" % (rc, stdout, stderr))
self._connected = False
|
Subsets and Splits