repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
moondrop-entertainment/django-nonrel-drawp
|
django/core/management/base.py
|
248
|
16452
|
"""
Base classes for writing management commands (named commands which can
be executed through ``django-admin.py`` or ``manage.py``).
"""
import os
import sys
from optparse import make_option, OptionParser
import django
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style
from django.utils.encoding import smart_str
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin.py`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``OptionParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` raised a ``CommandError``, ``execute()`` will
instead print an error message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<appname
appname ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_model_validation``
A boolean; if ``True``, validation of installed models will be
performed prior to executing the command. Default value is
``True``. To validate an individual application's models
rather than all applications' models, call
``self.validate(app)`` from ``handle()``, where ``app`` is the
application's Python module.
"""
# Metadata about this command.
option_list = (
make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
make_option('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
make_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
make_option('--traceback', action='store_true',
help='Print traceback on exception'),
)
help = ''
args = ''
# Configuration shortcuts that alter various logic.
can_import_settings = True
requires_model_validation = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
def __init__(self):
self.style = color_style()
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version(),
option_list=self.option_list)
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command.
"""
parser = self.create_parser(argv[0], argv[1])
options, args = parser.parse_args(argv[2:])
handle_default_options(options)
self.execute(*args, **options.__dict__)
def execute(self, *args, **options):
"""
Try to execute this command, performing model validation if
needed (as controlled by the attribute
``self.requires_model_validation``). If the command raises a
``CommandError``, intercept it and print it sensibly to
stderr.
"""
# Switch to English, because django-admin.py creates database content
# like permissions, and those shouldn't contain any translations.
# But only do this if we can assume we have a working settings file,
# because django.utils.translation requires settings.
if self.can_import_settings:
try:
from django.utils import translation
translation.activate('en-us')
except ImportError, e:
# If settings should be available, but aren't,
# raise the error and quit.
sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
sys.exit(1)
try:
self.stdout = options.get('stdout', sys.stdout)
self.stderr = options.get('stderr', sys.stderr)
if self.requires_model_validation:
self.validate()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()) + '\n')
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;") + '\n')
except CommandError, e:
self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
sys.exit(1)
def validate(self, app=None, display_num_errors=False):
"""
Validates the given app, raising CommandError for any errors.
If app is None, then this will validate all installed apps.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
s = StringIO()
num_errors = get_validation_errors(s, app)
if num_errors:
s.seek(0)
error_text = s.read()
raise CommandError("One or more models did not validate:\n%s" % error_text)
if display_num_errors:
self.stdout.write("%s error%s found\n" % (num_errors, num_errors != 1 and 's' or ''))
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError()
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application
names as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app()``, which will be called once for each application.
"""
args = '<appname appname ...>'
def handle(self, *app_labels, **options):
from django.db import models
if not app_labels:
raise CommandError('Enter at least one appname.')
try:
app_list = [models.get_app(app_label) for app_label in app_labels]
except (ImproperlyConfigured, ImportError), e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app in app_list:
app_output = self.handle_app(app, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app(self, app, **options):
"""
Perform the command's actions for ``app``, which will be the
Python module corresponding to an application name given on
the command line.
"""
raise NotImplementedError()
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
if not labels:
raise CommandError('Enter at least one %s.' % self.label)
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError()
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError()
def copy_helper(style, app_or_project, name, directory, other_name=''):
"""
Copies either a Django application layout template or a Django project
layout template into the specified directory.
"""
# style -- A color style object (see django.core.management.color).
# app_or_project -- The string 'app' or 'project'.
# name -- The name of the application or project.
# directory -- The directory to which the layout template should be copied.
# other_name -- When copying an application layout, this should be the name
# of the project.
import re
import shutil
other = {'project': 'app', 'app': 'project'}[app_or_project]
if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name.
# Provide a smart error message, depending on the error.
if not re.search(r'^[_a-zA-Z]', name):
message = 'make sure the name begins with a letter or underscore'
else:
message = 'use only numbers, letters and underscores'
raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message))
top_dir = os.path.join(directory, name)
try:
os.mkdir(top_dir)
except OSError, e:
raise CommandError(e)
# Determine where the app or project templates are. Use
# django.__path__[0] because we don't know into which directory
# django has been installed.
template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project)
for d, subdirs, files in os.walk(template_dir):
relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
if relative_dir:
os.mkdir(os.path.join(top_dir, relative_dir))
for subdir in subdirs[:]:
if subdir.startswith('.'):
subdirs.remove(subdir)
for f in files:
if not f.endswith('.py'):
# Ignore .pyc, .pyo, .py.class etc, as they cause various
# breakages.
continue
path_old = os.path.join(d, f)
path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
fp_old = open(path_old, 'r')
fp_new = open(path_new, 'w')
fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
fp_old.close()
fp_new.close()
try:
shutil.copymode(path_old, path_new)
_make_writeable(path_new)
except OSError:
sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
def _make_writeable(filename):
"""
Make sure that the file is writeable. Useful if our source is
read-only.
"""
import stat
if sys.platform.startswith('java'):
# On Jython there is no os.access()
return
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
|
bsd-3-clause
|
paddyvishnubhatt/daranalysis
|
lib/jinja2/constants.py
|
220
|
1626
|
# -*- coding: utf-8 -*-
"""
jinja.constants
~~~~~~~~~~~~~~~
Various constants.
:copyright: (c) 2017 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS = u'''\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
penatibus per pharetra phasellus placerat platea porta porttitor posuere
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate'''
|
apache-2.0
|
frejanordsiek/hdf5storage
|
tests/test_dict_like_storage_methods.py
|
1
|
7762
|
# Copyright (c) 2013-2021, Freja Nordsiek
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import random
import tempfile
import numpy as np
import h5py
import pytest
import hdf5storage
from hdf5storage.pathesc import escape_path
from make_randoms import random_name, random_dict, random_int, \
random_str_ascii, random_str_some_unicode, max_dict_key_length
random.seed()
# Need a list of dict-like types, which will depend on Python
# version.
dict_like = ['dict', 'OrderedDict']
# Need a list of previously invalid characters.
invalid_characters = ('\x00', '/')
# Generate a bunch of random key_values_names.
keys_values_names = [('keys', 'values')]
for i in range(5):
names = ('a', 'a')
while names[0] == names[1]:
names = [random_str_ascii(8) for i in range(2)]
keys_values_names.append(names)
# Set the other key types.
other_key_types = ('bytes', 'numpy.bytes_', 'numpy.unicode_')
@pytest.mark.parametrize(
'tp,option_keywords',
[(tp, {'store_python_metadata': pyth_meta,
'matlab_compatible': mat_meta,
'dict_like_keys_name': names[0],
'dict_like_values_name': names[1]})
for tp in dict_like
for pyth_meta in (True, False)
for mat_meta in (True, False)
for names in keys_values_names])
def test_all_valid_str_keys(tp, option_keywords):
options = hdf5storage.Options(**option_keywords)
key_value_names = (options.dict_like_keys_name,
options.dict_like_values_name)
data = random_dict(tp)
for k in key_value_names:
if k in data:
del data[k]
# Make a random name.
name = random_name()
# Write the data to the file with the given name with the provided
# options.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
options=options)
with h5py.File(filename, mode='r') as f:
for k in key_value_names:
assert escape_path(k) not in f[name]
for k in data:
assert escape_path(k) in f[name]
@pytest.mark.parametrize(
'tp,ch,option_keywords',
[(tp, ch, {'store_python_metadata': pyth_meta,
'matlab_compatible': mat_meta,
'dict_like_keys_name': names[0],
'dict_like_values_name': names[1]})
for tp in dict_like
for pyth_meta in (True, False)
for mat_meta in (True, False)
for ch in invalid_characters
for names in keys_values_names])
def test_str_key_previously_invalid_char(tp, ch, option_keywords):
options = hdf5storage.Options(**option_keywords)
key_value_names = (options.dict_like_keys_name,
options.dict_like_values_name)
data = random_dict(tp)
for k in key_value_names:
if k in data:
del data[k]
# Add a random invalid str key using the provided character
key = key_value_names[0]
while key in key_value_names:
key = ch.join([random_str_ascii(max_dict_key_length)
for i in range(2)])
data[key] = random_int()
# Make a random name.
name = random_name()
# Write the data to the file with the given name with the provided
# options.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
options=options)
with h5py.File(filename, mode='r') as f:
for k in key_value_names:
assert escape_path(k) not in f[name]
for k in data:
assert escape_path(k) in f[name]
@pytest.mark.parametrize(
'tp,other_tp,option_keywords',
[(tp, otp, {'store_python_metadata': pyth_meta,
'matlab_compatible': mat_meta,
'dict_like_keys_name': names[0],
'dict_like_values_name': names[1]})
for tp in dict_like
for pyth_meta in (True, False)
for mat_meta in (True, False)
for otp in other_key_types
for names in keys_values_names])
def test_string_type_non_str_key(tp, other_tp, option_keywords):
options = hdf5storage.Options(**option_keywords)
key_value_names = (options.dict_like_keys_name,
options.dict_like_values_name)
data = random_dict(tp)
for k in key_value_names:
if k in data:
del data[k]
keys = list(data.keys())
key_gen = random_str_some_unicode(max_dict_key_length)
if other_tp == 'numpy.bytes_':
key = np.bytes_(key_gen.encode('UTF-8'))
elif other_tp == 'numpy.unicode_':
key = np.unicode_(key_gen)
elif other_tp == 'bytes':
key = key_gen.encode('UTF-8')
data[key] = random_int()
keys.append(key_gen)
# Make a random name.
name = random_name()
# Write the data to the file with the given name with the provided
# options.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
options=options)
with h5py.File(filename, mode='r') as f:
assert set(keys) == set(f[name].keys())
@pytest.mark.parametrize(
'tp,option_keywords',
[(tp, {'store_python_metadata': pyth_meta,
'matlab_compatible': mat_meta,
'dict_like_keys_name': names[0],
'dict_like_values_name': names[1]})
for tp in dict_like
for pyth_meta in (True, False)
for mat_meta in (True, False)
for names in keys_values_names])
def test_int_key(tp, option_keywords):
options = hdf5storage.Options(**option_keywords)
key_value_names = (options.dict_like_keys_name,
options.dict_like_values_name)
data = random_dict(tp)
for k in key_value_names:
if k in data:
del data[k]
key = random_int()
data[key] = random_int()
# Make a random name.
name = random_name()
# Write the data to the file with the given name with the provided
# options.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
options=options)
with h5py.File(filename, mode='r') as f:
assert set(key_value_names) == set(f[name].keys())
|
bsd-2-clause
|
tedelhourani/ansible
|
lib/ansible/module_utils/facts/system/chroot.py
|
40
|
1029
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.facts.collector import BaseFactCollector
def is_chroot():
is_chroot = None
if os.environ.get('debian_chroot', False):
is_chroot = True
else:
my_root = os.stat('/')
try:
# check if my file system is the root one
proc_root = os.stat('/proc/1/root/.')
is_chroot = my_root.st_ino != proc_root.st_ino or my_root.st_dev != proc_root.st_dev
except:
# I'm not root or no proc, fallback to checking it is inode #2
is_chroot = (my_root.st_ino != 2)
return is_chroot
class ChrootFactCollector(BaseFactCollector):
name = 'chroot'
_fact_ids = set(['is_chroot'])
def collect(self, module=None, collected_facts=None):
return {'is_chroot': is_chroot()}
|
gpl-3.0
|
greenify/zodiacy
|
bin/import_to_sql.py
|
1
|
1369
|
#!/usr/bin/env python3
# encoding: utf-8
import argparse
import os
import sqlite3
import json
"""import_to_sql.py: Imports the Horoscope database into a SQLite"""
__author__ = "Project Zodiacy"
__copyright__ = "Copyright 2015, Project Zodiacy"
parser = argparse.ArgumentParser(description="Awesome SQLite importer")
parser.add_argument('-i', '--inFile', dest='inFile',
required=True, help='Input file')
parser.add_argument('-s', '--sqlFile', dest='sqlFile',
required=True, help='SQLite file')
args = parser.parse_args()
# Create SQLite
if os.path.exists(args.sqlFile):
os.remove(args.sqlFile)
with sqlite3.connect(args.sqlFile) as conn, open(args.inFile) as f:
c = conn.cursor()
c.execute('''CREATE TABLE horoscopes
(sign int, keyword text, subject_line text, sms_interp text, interp text, rating int, date text)''')
for horoscopesStr in f:
horoscopes = json.loads(horoscopesStr)
if isinstance(horoscopes, dict):
# WHY does tarot.com use different formats?
horoscopes = horoscopes.values()
for h in horoscopes:
c.execute("INSERT INTO horoscopes VALUES (?,?,?,?,?,?,?)",
(int(h['sign']), h['keyword'], h['subject_line'],
h['sms_interp'], h['interp'], int(h['rating']), h['date']))
|
mit
|
Neamar/django
|
tests/utils_tests/test_baseconv.py
|
326
|
1787
|
from unittest import TestCase
from django.utils.baseconv import (
BaseConverter, base2, base16, base36, base56, base62, base64,
)
from django.utils.six.moves import range
class TestBaseConv(TestCase):
def test_baseconv(self):
nums = [-10 ** 10, 10 ** 10] + list(range(-100, 100))
for converter in [base2, base16, base36, base56, base62, base64]:
for i in nums:
self.assertEqual(i, converter.decode(converter.encode(i)))
def test_base11(self):
base11 = BaseConverter('0123456789-', sign='$')
self.assertEqual(base11.encode(1234), '-22')
self.assertEqual(base11.decode('-22'), 1234)
self.assertEqual(base11.encode(-1234), '$-22')
self.assertEqual(base11.decode('$-22'), -1234)
def test_base20(self):
base20 = BaseConverter('0123456789abcdefghij')
self.assertEqual(base20.encode(1234), '31e')
self.assertEqual(base20.decode('31e'), 1234)
self.assertEqual(base20.encode(-1234), '-31e')
self.assertEqual(base20.decode('-31e'), -1234)
def test_base64(self):
self.assertEqual(base64.encode(1234), 'JI')
self.assertEqual(base64.decode('JI'), 1234)
self.assertEqual(base64.encode(-1234), '$JI')
self.assertEqual(base64.decode('$JI'), -1234)
def test_base7(self):
base7 = BaseConverter('cjdhel3', sign='g')
self.assertEqual(base7.encode(1234), 'hejd')
self.assertEqual(base7.decode('hejd'), 1234)
self.assertEqual(base7.encode(-1234), 'ghejd')
self.assertEqual(base7.decode('ghejd'), -1234)
def test_exception(self):
self.assertRaises(ValueError, BaseConverter, 'abc', sign='a')
self.assertIsInstance(BaseConverter('abc', sign='d'), BaseConverter)
|
bsd-3-clause
|
lovedaybrooke/gender-decoder
|
db_repository/versions/005_migration.py
|
1
|
1581
|
from sqlalchemy import *
from migrate import *
import datetime
from migrate.changeset import schema
pre_meta = MetaData()
post_meta = MetaData()
job_ad = Table('job_ad', pre_meta,
Column('hash', VARCHAR, primary_key=True, nullable=False),
Column('date', TIMESTAMP),
Column('jobAdText', TEXT),
Column('masculine_word_count', INTEGER),
Column('feminine_word_count', INTEGER),
Column('coding', VARCHAR),
Column('masculine_coded_words', TEXT),
Column('feminine_coded_words', TEXT),
)
job_ad = Table('job_ad', post_meta,
Column('hash', String, primary_key=True, nullable=False),
Column('date', DateTime, default=ColumnDefault(datetime.datetime.utcnow)),
Column('ad_text', Text),
Column('masculine_word_count', Integer, default=ColumnDefault(0)),
Column('feminine_word_count', Integer, default=ColumnDefault(0)),
Column('masculine_coded_words', Text),
Column('feminine_coded_words', Text),
Column('coding', String),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
pre_meta.tables['job_ad'].columns['jobAdText'].drop()
post_meta.tables['job_ad'].columns['ad_text'].create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
pre_meta.tables['job_ad'].columns['jobAdText'].create()
post_meta.tables['job_ad'].columns['ad_text'].drop()
|
mit
|
CodePath-Parse/MiAR
|
Firebase/functions/node_modules/firebase-admin/node_modules/grpc/third_party/boringssl/util/generate_build_files.py
|
10
|
24976
|
# Copyright (c) 2015, Google Inc.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Enumerates source files for consumption by various build systems."""
import optparse
import os
import subprocess
import sys
import json
# OS_ARCH_COMBOS maps from OS and platform to the OpenSSL assembly "style" for
# that platform and the extension used by asm files.
OS_ARCH_COMBOS = [
('linux', 'arm', 'linux32', [], 'S'),
('linux', 'aarch64', 'linux64', [], 'S'),
('linux', 'ppc64le', 'ppc64le', [], 'S'),
('linux', 'x86', 'elf', ['-fPIC', '-DOPENSSL_IA32_SSE2'], 'S'),
('linux', 'x86_64', 'elf', [], 'S'),
('mac', 'x86', 'macosx', ['-fPIC', '-DOPENSSL_IA32_SSE2'], 'S'),
('mac', 'x86_64', 'macosx', [], 'S'),
('win', 'x86', 'win32n', ['-DOPENSSL_IA32_SSE2'], 'asm'),
('win', 'x86_64', 'nasm', [], 'asm'),
]
# NON_PERL_FILES enumerates assembly files that are not processed by the
# perlasm system.
NON_PERL_FILES = {
('linux', 'arm'): [
'src/crypto/curve25519/asm/x25519-asm-arm.S',
'src/crypto/poly1305/poly1305_arm_asm.S',
],
('linux', 'x86_64'): [
'src/crypto/curve25519/asm/x25519-asm-x86_64.S',
],
('mac', 'x86_64'): [
'src/crypto/curve25519/asm/x25519-asm-x86_64.S',
],
}
PREFIX = None
def PathOf(x):
return x if not PREFIX else os.path.join(PREFIX, x)
class Android(object):
def __init__(self):
self.header = \
"""# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is created by generate_build_files.py. Do not edit manually.
"""
def PrintVariableSection(self, out, name, files):
out.write('%s := \\\n' % name)
for f in sorted(files):
out.write(' %s\\\n' % f)
out.write('\n')
def WriteFiles(self, files, asm_outputs):
# New Android.bp format
with open('sources.bp', 'w+') as blueprint:
blueprint.write(self.header.replace('#', '//'))
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "libcrypto_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['crypto']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write(' target: {\n')
for ((osname, arch), asm_files) in asm_outputs:
if osname != 'linux' or arch == 'ppc64le':
continue
if arch == 'aarch64':
arch = 'arm64'
blueprint.write(' android_%s: {\n' % arch)
blueprint.write(' srcs: [\n')
for f in sorted(asm_files):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write(' },\n')
if arch == 'x86' or arch == 'x86_64':
blueprint.write(' linux_%s: {\n' % arch)
blueprint.write(' srcs: [\n')
for f in sorted(asm_files):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write(' },\n')
blueprint.write(' },\n')
blueprint.write('}\n\n')
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "libssl_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['ssl']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write('}\n\n')
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "bssl_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['tool']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write('}\n\n')
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "boringssl_test_support_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['test_support']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write('}\n\n')
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "boringssl_crypto_test_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['crypto_test']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write('}\n\n')
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "boringssl_ssl_test_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['ssl_test']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write('}\n\n')
blueprint.write('cc_defaults {\n')
blueprint.write(' name: "boringssl_tests_sources",\n')
blueprint.write(' srcs: [\n')
for f in sorted(files['test']):
blueprint.write(' "%s",\n' % f)
blueprint.write(' ],\n')
blueprint.write('}\n')
# Legacy Android.mk format, only used by Trusty in new branches
with open('sources.mk', 'w+') as makefile:
makefile.write(self.header)
self.PrintVariableSection(makefile, 'crypto_sources', files['crypto'])
for ((osname, arch), asm_files) in asm_outputs:
if osname != 'linux':
continue
self.PrintVariableSection(
makefile, '%s_%s_sources' % (osname, arch), asm_files)
class Bazel(object):
"""Bazel outputs files suitable for including in Bazel files."""
def __init__(self):
self.firstSection = True
self.header = \
"""# This file is created by generate_build_files.py. Do not edit manually.
"""
def PrintVariableSection(self, out, name, files):
if not self.firstSection:
out.write('\n')
self.firstSection = False
out.write('%s = [\n' % name)
for f in sorted(files):
out.write(' "%s",\n' % PathOf(f))
out.write(']\n')
def WriteFiles(self, files, asm_outputs):
with open('BUILD.generated.bzl', 'w+') as out:
out.write(self.header)
self.PrintVariableSection(out, 'ssl_headers', files['ssl_headers'])
self.PrintVariableSection(
out, 'ssl_internal_headers', files['ssl_internal_headers'])
self.PrintVariableSection(out, 'ssl_sources', files['ssl'])
self.PrintVariableSection(out, 'ssl_c_sources', files['ssl_c'])
self.PrintVariableSection(out, 'ssl_cc_sources', files['ssl_cc'])
self.PrintVariableSection(out, 'crypto_headers', files['crypto_headers'])
self.PrintVariableSection(
out, 'crypto_internal_headers', files['crypto_internal_headers'])
self.PrintVariableSection(out, 'crypto_sources', files['crypto'])
self.PrintVariableSection(out, 'tool_sources', files['tool'])
self.PrintVariableSection(out, 'tool_headers', files['tool_headers'])
for ((osname, arch), asm_files) in asm_outputs:
self.PrintVariableSection(
out, 'crypto_sources_%s_%s' % (osname, arch), asm_files)
with open('BUILD.generated_tests.bzl', 'w+') as out:
out.write(self.header)
out.write('test_support_sources = [\n')
for filename in sorted(files['test_support'] +
files['test_support_headers'] +
files['crypto_internal_headers'] +
files['ssl_internal_headers']):
if os.path.basename(filename) == 'malloc.cc':
continue
out.write(' "%s",\n' % PathOf(filename))
out.write(']\n\n')
self.PrintVariableSection(out, 'crypto_test_sources',
files['crypto_test'])
self.PrintVariableSection(out, 'ssl_test_sources', files['ssl_test'])
out.write('def create_tests(copts, crypto, ssl):\n')
name_counts = {}
for test in files['tests']:
name = os.path.basename(test[0])
name_counts[name] = name_counts.get(name, 0) + 1
first = True
for test in files['tests']:
name = os.path.basename(test[0])
if name_counts[name] > 1:
if '/' in test[1]:
name += '_' + os.path.splitext(os.path.basename(test[1]))[0]
else:
name += '_' + test[1].replace('-', '_')
if not first:
out.write('\n')
first = False
src_prefix = 'src/' + test[0]
for src in files['test']:
if src.startswith(src_prefix):
src = src
break
else:
raise ValueError("Can't find source for %s" % test[0])
out.write(' native.cc_test(\n')
out.write(' name = "%s",\n' % name)
out.write(' size = "small",\n')
out.write(' srcs = ["%s"] + test_support_sources,\n' %
PathOf(src))
data_files = []
if len(test) > 1:
out.write(' args = [\n')
for arg in test[1:]:
if '/' in arg:
out.write(' "$(location %s)",\n' %
PathOf(os.path.join('src', arg)))
data_files.append('src/%s' % arg)
else:
out.write(' "%s",\n' % arg)
out.write(' ],\n')
out.write(' copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],\n')
if len(data_files) > 0:
out.write(' data = [\n')
for filename in data_files:
out.write(' "%s",\n' % PathOf(filename))
out.write(' ],\n')
if 'ssl/' in test[0]:
out.write(' deps = [\n')
out.write(' crypto,\n')
out.write(' ssl,\n')
out.write(' ],\n')
else:
out.write(' deps = [crypto],\n')
out.write(' )\n')
class GN(object):
def __init__(self):
self.firstSection = True
self.header = \
"""# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is created by generate_build_files.py. Do not edit manually.
"""
def PrintVariableSection(self, out, name, files):
if not self.firstSection:
out.write('\n')
self.firstSection = False
out.write('%s = [\n' % name)
for f in sorted(files):
out.write(' "%s",\n' % f)
out.write(']\n')
def WriteFiles(self, files, asm_outputs):
with open('BUILD.generated.gni', 'w+') as out:
out.write(self.header)
self.PrintVariableSection(out, 'crypto_sources',
files['crypto'] + files['crypto_headers'] +
files['crypto_internal_headers'])
self.PrintVariableSection(out, 'ssl_sources',
files['ssl'] + files['ssl_headers'] +
files['ssl_internal_headers'])
for ((osname, arch), asm_files) in asm_outputs:
self.PrintVariableSection(
out, 'crypto_sources_%s_%s' % (osname, arch), asm_files)
fuzzers = [os.path.splitext(os.path.basename(fuzzer))[0]
for fuzzer in files['fuzz']]
self.PrintVariableSection(out, 'fuzzers', fuzzers)
with open('BUILD.generated_tests.gni', 'w+') as out:
self.firstSection = True
out.write(self.header)
self.PrintVariableSection(out, 'test_support_sources',
files['test_support'] +
files['test_support_headers'])
self.PrintVariableSection(out, 'crypto_test_sources',
files['crypto_test'])
self.PrintVariableSection(out, 'ssl_test_sources', files['ssl_test'])
out.write('\n')
out.write('template("create_tests") {\n')
all_tests = []
for test in sorted(files['test']):
test_name = 'boringssl_%s' % os.path.splitext(os.path.basename(test))[0]
all_tests.append(test_name)
out.write(' executable("%s") {\n' % test_name)
out.write(' sources = [\n')
out.write(' "%s",\n' % test)
out.write(' ]\n')
out.write(' sources += test_support_sources\n')
out.write(' if (defined(invoker.configs_exclude)) {\n')
out.write(' configs -= invoker.configs_exclude\n')
out.write(' }\n')
out.write(' configs += invoker.configs\n')
out.write(' deps = invoker.deps\n')
out.write(' }\n')
out.write('\n')
out.write(' group(target_name) {\n')
out.write(' deps = [\n')
for test_name in sorted(all_tests):
out.write(' ":%s",\n' % test_name)
out.write(' ]\n')
out.write(' }\n')
out.write('}\n')
class GYP(object):
def __init__(self):
self.header = \
"""# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is created by generate_build_files.py. Do not edit manually.
"""
def PrintVariableSection(self, out, name, files):
out.write(' \'%s\': [\n' % name)
for f in sorted(files):
out.write(' \'%s\',\n' % f)
out.write(' ],\n')
def WriteFiles(self, files, asm_outputs):
with open('boringssl.gypi', 'w+') as gypi:
gypi.write(self.header + '{\n \'variables\': {\n')
self.PrintVariableSection(gypi, 'boringssl_ssl_sources',
files['ssl'] + files['ssl_headers'] +
files['ssl_internal_headers'])
self.PrintVariableSection(gypi, 'boringssl_crypto_sources',
files['crypto'] + files['crypto_headers'] +
files['crypto_internal_headers'])
for ((osname, arch), asm_files) in asm_outputs:
self.PrintVariableSection(gypi, 'boringssl_%s_%s_sources' %
(osname, arch), asm_files)
gypi.write(' }\n}\n')
def FindCMakeFiles(directory):
"""Returns list of all CMakeLists.txt files recursively in directory."""
cmakefiles = []
for (path, _, filenames) in os.walk(directory):
for filename in filenames:
if filename == 'CMakeLists.txt':
cmakefiles.append(os.path.join(path, filename))
return cmakefiles
def NoTests(dent, is_dir):
"""Filter function that can be passed to FindCFiles in order to remove test
sources."""
if is_dir:
return dent != 'test'
return 'test.' not in dent and not dent.startswith('example_')
def OnlyTests(dent, is_dir):
"""Filter function that can be passed to FindCFiles in order to remove
non-test sources."""
if is_dir:
return dent != 'test'
return '_test.' in dent or dent.startswith('example_')
def AllFiles(dent, is_dir):
"""Filter function that can be passed to FindCFiles in order to include all
sources."""
return True
def NotGTestMain(dent, is_dir):
return dent != 'gtest_main.cc'
def SSLHeaderFiles(dent, is_dir):
return dent in ['ssl.h', 'tls1.h', 'ssl23.h', 'ssl3.h', 'dtls1.h']
def FindCFiles(directory, filter_func):
"""Recurses through directory and returns a list of paths to all the C source
files that pass filter_func."""
cfiles = []
for (path, dirnames, filenames) in os.walk(directory):
for filename in filenames:
if not filename.endswith('.c') and not filename.endswith('.cc'):
continue
if not filter_func(filename, False):
continue
cfiles.append(os.path.join(path, filename))
for (i, dirname) in enumerate(dirnames):
if not filter_func(dirname, True):
del dirnames[i]
return cfiles
def FindHeaderFiles(directory, filter_func):
"""Recurses through directory and returns a list of paths to all the header files that pass filter_func."""
hfiles = []
for (path, dirnames, filenames) in os.walk(directory):
for filename in filenames:
if not filename.endswith('.h'):
continue
if not filter_func(filename, False):
continue
hfiles.append(os.path.join(path, filename))
for (i, dirname) in enumerate(dirnames):
if not filter_func(dirname, True):
del dirnames[i]
return hfiles
def ExtractPerlAsmFromCMakeFile(cmakefile):
"""Parses the contents of the CMakeLists.txt file passed as an argument and
returns a list of all the perlasm() directives found in the file."""
perlasms = []
with open(cmakefile) as f:
for line in f:
line = line.strip()
if not line.startswith('perlasm('):
continue
if not line.endswith(')'):
raise ValueError('Bad perlasm line in %s' % cmakefile)
# Remove "perlasm(" from start and ")" from end
params = line[8:-1].split()
if len(params) < 2:
raise ValueError('Bad perlasm line in %s' % cmakefile)
perlasms.append({
'extra_args': params[2:],
'input': os.path.join(os.path.dirname(cmakefile), params[1]),
'output': os.path.join(os.path.dirname(cmakefile), params[0]),
})
return perlasms
def ReadPerlAsmOperations():
"""Returns a list of all perlasm() directives found in CMake config files in
src/."""
perlasms = []
cmakefiles = FindCMakeFiles('src')
for cmakefile in cmakefiles:
perlasms.extend(ExtractPerlAsmFromCMakeFile(cmakefile))
return perlasms
def PerlAsm(output_filename, input_filename, perlasm_style, extra_args):
"""Runs the a perlasm script and puts the output into output_filename."""
base_dir = os.path.dirname(output_filename)
if not os.path.isdir(base_dir):
os.makedirs(base_dir)
subprocess.check_call(
['perl', input_filename, perlasm_style] + extra_args + [output_filename])
def ArchForAsmFilename(filename):
"""Returns the architectures that a given asm file should be compiled for
based on substrings in the filename."""
if 'x86_64' in filename or 'avx2' in filename:
return ['x86_64']
elif ('x86' in filename and 'x86_64' not in filename) or '586' in filename:
return ['x86']
elif 'armx' in filename:
return ['arm', 'aarch64']
elif 'armv8' in filename:
return ['aarch64']
elif 'arm' in filename:
return ['arm']
elif 'ppc' in filename:
return ['ppc64le']
else:
raise ValueError('Unknown arch for asm filename: ' + filename)
def WriteAsmFiles(perlasms):
"""Generates asm files from perlasm directives for each supported OS x
platform combination."""
asmfiles = {}
for osarch in OS_ARCH_COMBOS:
(osname, arch, perlasm_style, extra_args, asm_ext) = osarch
key = (osname, arch)
outDir = '%s-%s' % key
for perlasm in perlasms:
filename = os.path.basename(perlasm['input'])
output = perlasm['output']
if not output.startswith('src'):
raise ValueError('output missing src: %s' % output)
output = os.path.join(outDir, output[4:])
if output.endswith('-armx.${ASM_EXT}'):
output = output.replace('-armx',
'-armx64' if arch == 'aarch64' else '-armx32')
output = output.replace('${ASM_EXT}', asm_ext)
if arch in ArchForAsmFilename(filename):
PerlAsm(output, perlasm['input'], perlasm_style,
perlasm['extra_args'] + extra_args)
asmfiles.setdefault(key, []).append(output)
for (key, non_perl_asm_files) in NON_PERL_FILES.iteritems():
asmfiles.setdefault(key, []).extend(non_perl_asm_files)
return asmfiles
def IsGTest(path):
with open(path) as f:
return "#include <gtest/gtest.h>" in f.read()
def main(platforms):
crypto_c_files = FindCFiles(os.path.join('src', 'crypto'), NoTests)
ssl_source_files = FindCFiles(os.path.join('src', 'ssl'), NoTests)
tool_c_files = FindCFiles(os.path.join('src', 'tool'), NoTests)
tool_h_files = FindHeaderFiles(os.path.join('src', 'tool'), AllFiles)
# Generate err_data.c
with open('err_data.c', 'w+') as err_data:
subprocess.check_call(['go', 'run', 'err_data_generate.go'],
cwd=os.path.join('src', 'crypto', 'err'),
stdout=err_data)
crypto_c_files.append('err_data.c')
test_support_c_files = FindCFiles(os.path.join('src', 'crypto', 'test'),
NotGTestMain)
test_support_h_files = (
FindHeaderFiles(os.path.join('src', 'crypto', 'test'), AllFiles) +
FindHeaderFiles(os.path.join('src', 'ssl', 'test'), AllFiles))
test_c_files = []
crypto_test_files = ['src/crypto/test/gtest_main.cc']
# TODO(davidben): Remove this loop once all tests are converted.
for path in FindCFiles(os.path.join('src', 'crypto'), OnlyTests):
if IsGTest(path):
crypto_test_files.append(path)
else:
test_c_files.append(path)
ssl_test_files = FindCFiles(os.path.join('src', 'ssl'), OnlyTests)
ssl_test_files.append('src/crypto/test/gtest_main.cc')
fuzz_c_files = FindCFiles(os.path.join('src', 'fuzz'), NoTests)
ssl_h_files = (
FindHeaderFiles(
os.path.join('src', 'include', 'openssl'),
SSLHeaderFiles))
def NotSSLHeaderFiles(filename, is_dir):
return not SSLHeaderFiles(filename, is_dir)
crypto_h_files = (
FindHeaderFiles(
os.path.join('src', 'include', 'openssl'),
NotSSLHeaderFiles))
ssl_internal_h_files = FindHeaderFiles(os.path.join('src', 'ssl'), NoTests)
crypto_internal_h_files = FindHeaderFiles(
os.path.join('src', 'crypto'), NoTests)
with open('src/util/all_tests.json', 'r') as f:
tests = json.load(f)
# For now, GTest-based tests are specified manually.
tests = [test for test in tests if test[0] not in ['crypto/crypto_test',
'decrepit/decrepit_test',
'ssl/ssl_test']]
test_binaries = set([test[0] for test in tests])
test_sources = set([
test.replace('.cc', '').replace('.c', '').replace(
'src/',
'')
for test in test_c_files])
if test_binaries != test_sources:
print 'Test sources and configured tests do not match'
a = test_binaries.difference(test_sources)
if len(a) > 0:
print 'These tests are configured without sources: ' + str(a)
b = test_sources.difference(test_binaries)
if len(b) > 0:
print 'These test sources are not configured: ' + str(b)
files = {
'crypto': crypto_c_files,
'crypto_headers': crypto_h_files,
'crypto_internal_headers': crypto_internal_h_files,
'crypto_test': sorted(crypto_test_files),
'fuzz': fuzz_c_files,
'ssl': ssl_source_files,
'ssl_c': [s for s in ssl_source_files if s.endswith('.c')],
'ssl_cc': [s for s in ssl_source_files if s.endswith('.cc')],
'ssl_headers': ssl_h_files,
'ssl_internal_headers': ssl_internal_h_files,
'ssl_test': sorted(ssl_test_files),
'tool': tool_c_files,
'tool_headers': tool_h_files,
'test': test_c_files,
'test_support': test_support_c_files,
'test_support_headers': test_support_h_files,
'tests': tests,
}
asm_outputs = sorted(WriteAsmFiles(ReadPerlAsmOperations()).iteritems())
for platform in platforms:
platform.WriteFiles(files, asm_outputs)
return 0
if __name__ == '__main__':
parser = optparse.OptionParser(usage='Usage: %prog [--prefix=<path>]'
' [android|bazel|gn|gyp]')
parser.add_option('--prefix', dest='prefix',
help='For Bazel, prepend argument to all source files')
options, args = parser.parse_args(sys.argv[1:])
PREFIX = options.prefix
if not args:
parser.print_help()
sys.exit(1)
platforms = []
for s in args:
if s == 'android':
platforms.append(Android())
elif s == 'bazel':
platforms.append(Bazel())
elif s == 'gn':
platforms.append(GN())
elif s == 'gyp':
platforms.append(GYP())
else:
parser.print_help()
sys.exit(1)
sys.exit(main(platforms))
|
apache-2.0
|
doismellburning/edx-platform
|
common/djangoapps/student/migrations/0006_expand_meta_field.py
|
188
|
9246
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'UserProfile.meta'
db.alter_column('auth_userprofile', 'meta', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'UserProfile.meta'
db.alter_column('auth_userprofile', 'meta', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
|
agpl-3.0
|
majora2007/plexpy
|
lib/unidecode/x006.py
|
252
|
3961
|
data = (
'[?]', # 0x00
'[?]', # 0x01
'[?]', # 0x02
'[?]', # 0x03
'[?]', # 0x04
'[?]', # 0x05
'[?]', # 0x06
'[?]', # 0x07
'[?]', # 0x08
'[?]', # 0x09
'[?]', # 0x0a
'[?]', # 0x0b
',', # 0x0c
'[?]', # 0x0d
'[?]', # 0x0e
'[?]', # 0x0f
'[?]', # 0x10
'[?]', # 0x11
'[?]', # 0x12
'[?]', # 0x13
'[?]', # 0x14
'[?]', # 0x15
'[?]', # 0x16
'[?]', # 0x17
'[?]', # 0x18
'[?]', # 0x19
'[?]', # 0x1a
';', # 0x1b
'[?]', # 0x1c
'[?]', # 0x1d
'[?]', # 0x1e
'?', # 0x1f
'[?]', # 0x20
'', # 0x21
'a', # 0x22
'\'', # 0x23
'w\'', # 0x24
'', # 0x25
'y\'', # 0x26
'', # 0x27
'b', # 0x28
'@', # 0x29
't', # 0x2a
'th', # 0x2b
'j', # 0x2c
'H', # 0x2d
'kh', # 0x2e
'd', # 0x2f
'dh', # 0x30
'r', # 0x31
'z', # 0x32
's', # 0x33
'sh', # 0x34
'S', # 0x35
'D', # 0x36
'T', # 0x37
'Z', # 0x38
'`', # 0x39
'G', # 0x3a
'[?]', # 0x3b
'[?]', # 0x3c
'[?]', # 0x3d
'[?]', # 0x3e
'[?]', # 0x3f
'', # 0x40
'f', # 0x41
'q', # 0x42
'k', # 0x43
'l', # 0x44
'm', # 0x45
'n', # 0x46
'h', # 0x47
'w', # 0x48
'~', # 0x49
'y', # 0x4a
'an', # 0x4b
'un', # 0x4c
'in', # 0x4d
'a', # 0x4e
'u', # 0x4f
'i', # 0x50
'W', # 0x51
'', # 0x52
'', # 0x53
'\'', # 0x54
'\'', # 0x55
'[?]', # 0x56
'[?]', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'0', # 0x60
'1', # 0x61
'2', # 0x62
'3', # 0x63
'4', # 0x64
'5', # 0x65
'6', # 0x66
'7', # 0x67
'8', # 0x68
'9', # 0x69
'%', # 0x6a
'.', # 0x6b
',', # 0x6c
'*', # 0x6d
'[?]', # 0x6e
'[?]', # 0x6f
'', # 0x70
'\'', # 0x71
'\'', # 0x72
'\'', # 0x73
'', # 0x74
'\'', # 0x75
'\'w', # 0x76
'\'u', # 0x77
'\'y', # 0x78
'tt', # 0x79
'tth', # 0x7a
'b', # 0x7b
't', # 0x7c
'T', # 0x7d
'p', # 0x7e
'th', # 0x7f
'bh', # 0x80
'\'h', # 0x81
'H', # 0x82
'ny', # 0x83
'dy', # 0x84
'H', # 0x85
'ch', # 0x86
'cch', # 0x87
'dd', # 0x88
'D', # 0x89
'D', # 0x8a
'Dt', # 0x8b
'dh', # 0x8c
'ddh', # 0x8d
'd', # 0x8e
'D', # 0x8f
'D', # 0x90
'rr', # 0x91
'R', # 0x92
'R', # 0x93
'R', # 0x94
'R', # 0x95
'R', # 0x96
'R', # 0x97
'j', # 0x98
'R', # 0x99
'S', # 0x9a
'S', # 0x9b
'S', # 0x9c
'S', # 0x9d
'S', # 0x9e
'T', # 0x9f
'GH', # 0xa0
'F', # 0xa1
'F', # 0xa2
'F', # 0xa3
'v', # 0xa4
'f', # 0xa5
'ph', # 0xa6
'Q', # 0xa7
'Q', # 0xa8
'kh', # 0xa9
'k', # 0xaa
'K', # 0xab
'K', # 0xac
'ng', # 0xad
'K', # 0xae
'g', # 0xaf
'G', # 0xb0
'N', # 0xb1
'G', # 0xb2
'G', # 0xb3
'G', # 0xb4
'L', # 0xb5
'L', # 0xb6
'L', # 0xb7
'L', # 0xb8
'N', # 0xb9
'N', # 0xba
'N', # 0xbb
'N', # 0xbc
'N', # 0xbd
'h', # 0xbe
'Ch', # 0xbf
'hy', # 0xc0
'h', # 0xc1
'H', # 0xc2
'@', # 0xc3
'W', # 0xc4
'oe', # 0xc5
'oe', # 0xc6
'u', # 0xc7
'yu', # 0xc8
'yu', # 0xc9
'W', # 0xca
'v', # 0xcb
'y', # 0xcc
'Y', # 0xcd
'Y', # 0xce
'W', # 0xcf
'', # 0xd0
'', # 0xd1
'y', # 0xd2
'y\'', # 0xd3
'.', # 0xd4
'ae', # 0xd5
'', # 0xd6
'', # 0xd7
'', # 0xd8
'', # 0xd9
'', # 0xda
'', # 0xdb
'', # 0xdc
'@', # 0xdd
'#', # 0xde
'', # 0xdf
'', # 0xe0
'', # 0xe1
'', # 0xe2
'', # 0xe3
'', # 0xe4
'', # 0xe5
'', # 0xe6
'', # 0xe7
'', # 0xe8
'^', # 0xe9
'', # 0xea
'', # 0xeb
'', # 0xec
'', # 0xed
'[?]', # 0xee
'[?]', # 0xef
'0', # 0xf0
'1', # 0xf1
'2', # 0xf2
'3', # 0xf3
'4', # 0xf4
'5', # 0xf5
'6', # 0xf6
'7', # 0xf7
'8', # 0xf8
'9', # 0xf9
'Sh', # 0xfa
'D', # 0xfb
'Gh', # 0xfc
'&', # 0xfd
'+m', # 0xfe
)
|
gpl-3.0
|
Serag8/Bachelor
|
google_appengine/lib/oauth2client/oauth2client/util.py
|
20
|
5617
|
#!/usr/bin/env python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common utility library."""
__author__ = [
'[email protected] (Rafe Kaplan)',
'[email protected] (Guido van Rossum)',
]
__all__ = [
'positional',
'POSITIONAL_WARNING',
'POSITIONAL_EXCEPTION',
'POSITIONAL_IGNORE',
]
import inspect
import logging
import types
import urllib
import urlparse
logger = logging.getLogger(__name__)
POSITIONAL_WARNING = 'WARNING'
POSITIONAL_EXCEPTION = 'EXCEPTION'
POSITIONAL_IGNORE = 'IGNORE'
POSITIONAL_SET = frozenset([POSITIONAL_WARNING, POSITIONAL_EXCEPTION,
POSITIONAL_IGNORE])
positional_parameters_enforcement = POSITIONAL_WARNING
def positional(max_positional_args):
"""A decorator to declare that only the first N arguments my be positional.
This decorator makes it easy to support Python 3 style key-word only
parameters. For example, in Python 3 it is possible to write:
def fn(pos1, *, kwonly1=None, kwonly1=None):
...
All named parameters after * must be a keyword:
fn(10, 'kw1', 'kw2') # Raises exception.
fn(10, kwonly1='kw1') # Ok.
Example:
To define a function like above, do:
@positional(1)
def fn(pos1, kwonly1=None, kwonly2=None):
...
If no default value is provided to a keyword argument, it becomes a required
keyword argument:
@positional(0)
def fn(required_kw):
...
This must be called with the keyword parameter:
fn() # Raises exception.
fn(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
'self' and 'cls':
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
...
@classmethod
@positional(2)
def my_method(cls, pos1, kwonly1=None):
...
The positional decorator behavior is controlled by
util.positional_parameters_enforcement, which may be set to
POSITIONAL_EXCEPTION, POSITIONAL_WARNING or POSITIONAL_IGNORE to raise an
exception, log a warning, or do nothing, respectively, if a declaration is
violated.
Args:
max_positional_arguments: Maximum number of positional arguments. All
parameters after the this index must be keyword only.
Returns:
A decorator that prevents using arguments after max_positional_args from
being used as positional parameters.
Raises:
TypeError if a key-word only argument is provided as a positional
parameter, but only if util.positional_parameters_enforcement is set to
POSITIONAL_EXCEPTION.
"""
def positional_decorator(wrapped):
def positional_wrapper(*args, **kwargs):
if len(args) > max_positional_args:
plural_s = ''
if max_positional_args != 1:
plural_s = 's'
message = '%s() takes at most %d positional argument%s (%d given)' % (
wrapped.__name__, max_positional_args, plural_s, len(args))
if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
raise TypeError(message)
elif positional_parameters_enforcement == POSITIONAL_WARNING:
logger.warning(message)
else: # IGNORE
pass
return wrapped(*args, **kwargs)
return positional_wrapper
if isinstance(max_positional_args, (int, long)):
return positional_decorator
else:
args, _, _, defaults = inspect.getargspec(max_positional_args)
return positional(len(args) - len(defaults))(max_positional_args)
def scopes_to_string(scopes):
"""Converts scope value to a string.
If scopes is a string then it is simply passed through. If scopes is an
iterable then a string is returned that is all the individual scopes
concatenated with spaces.
Args:
scopes: string or iterable of strings, the scopes.
Returns:
The scopes formatted as a single string.
"""
if isinstance(scopes, types.StringTypes):
return scopes
else:
return ' '.join(scopes)
def dict_to_tuple_key(dictionary):
"""Converts a dictionary to a tuple that can be used as an immutable key.
The resulting key is always sorted so that logically equivalent dictionaries
always produce an identical tuple for a key.
Args:
dictionary: the dictionary to use as the key.
Returns:
A tuple representing the dictionary in it's naturally sorted ordering.
"""
return tuple(sorted(dictionary.items()))
def _add_query_parameter(url, name, value):
"""Adds a query parameter to a url.
Replaces the current value if it already exists in the URL.
Args:
url: string, url to add the query parameter to.
name: string, query parameter name.
value: string, query parameter value.
Returns:
Updated query parameter. Does not update the url if value is None.
"""
if value is None:
return url
else:
parsed = list(urlparse.urlparse(url))
q = dict(urlparse.parse_qsl(parsed[4]))
q[name] = value
parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed)
|
mit
|
Daniel-CA/odoo
|
addons/report_webkit/__init__.py
|
382
|
1593
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import header
import company
import report_helper
import webkit_report
import ir_report
import wizard
import convert
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Mattze96/youtube-dl
|
youtube_dl/extractor/mixcloud.py
|
91
|
4042
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
from ..utils import (
ExtractorError,
HEADRequest,
str_to_int,
)
class MixcloudIE(InfoExtractor):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/([^/]+)/([^/]+)'
IE_NAME = 'mixcloud'
_TESTS = [{
'url': 'http://www.mixcloud.com/dholbach/cryptkeeper/',
'info_dict': {
'id': 'dholbach-cryptkeeper',
'ext': 'mp3',
'title': 'Cryptkeeper',
'description': 'After quite a long silence from myself, finally another Drum\'n\'Bass mix with my favourite current dance floor bangers.',
'uploader': 'Daniel Holbach',
'uploader_id': 'dholbach',
'thumbnail': 're:https?://.*\.jpg',
'view_count': int,
'like_count': int,
},
}, {
'url': 'http://www.mixcloud.com/gillespeterson/caribou-7-inch-vinyl-mix-chat/',
'info_dict': {
'id': 'gillespeterson-caribou-7-inch-vinyl-mix-chat',
'ext': 'mp3',
'title': 'Caribou 7 inch Vinyl Mix & Chat',
'description': 'md5:2b8aec6adce69f9d41724647c65875e8',
'uploader': 'Gilles Peterson Worldwide',
'uploader_id': 'gillespeterson',
'thumbnail': 're:https?://.*/images/',
'view_count': int,
'like_count': int,
},
}]
def _check_url(self, url, track_id, ext):
try:
# We only want to know if the request succeed
# don't download the whole file
self._request_webpage(
HEADRequest(url), track_id,
'Trying %s URL' % ext)
return True
except ExtractorError:
return False
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group(1)
cloudcast_name = mobj.group(2)
track_id = compat_urllib_parse_unquote('-'.join((uploader, cloudcast_name)))
webpage = self._download_webpage(url, track_id)
preview_url = self._search_regex(
r'\s(?:data-preview-url|m-preview)="([^"]+)"', webpage, 'preview url')
song_url = preview_url.replace('/previews/', '/c/originals/')
if not self._check_url(song_url, track_id, 'mp3'):
song_url = song_url.replace('.mp3', '.m4a').replace('originals/', 'm4a/64/')
if not self._check_url(song_url, track_id, 'm4a'):
raise ExtractorError('Unable to extract track url')
PREFIX = (
r'm-play-on-spacebar[^>]+'
r'(?:\s+[a-zA-Z0-9-]+(?:="[^"]+")?)*?\s+')
title = self._html_search_regex(
PREFIX + r'm-title="([^"]+)"', webpage, 'title')
thumbnail = self._proto_relative_url(self._html_search_regex(
PREFIX + r'm-thumbnail-url="([^"]+)"', webpage, 'thumbnail',
fatal=False))
uploader = self._html_search_regex(
PREFIX + r'm-owner-name="([^"]+)"',
webpage, 'uploader', fatal=False)
uploader_id = self._search_regex(
r'\s+"profile": "([^"]+)",', webpage, 'uploader id', fatal=False)
description = self._og_search_description(webpage)
like_count = str_to_int(self._search_regex(
r'\bbutton-favorite\b[^>]+m-ajax-toggle-count="([^"]+)"',
webpage, 'like count', fatal=False))
view_count = str_to_int(self._search_regex(
[r'<meta itemprop="interactionCount" content="UserPlays:([0-9]+)"',
r'/listeners/?">([0-9,.]+)</a>'],
webpage, 'play count', fatal=False))
return {
'id': track_id,
'title': title,
'url': song_url,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'view_count': view_count,
'like_count': like_count,
}
|
unlicense
|
agiliq/fundraiser
|
customadmin/tests.py
|
2
|
1215
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class CustomAdminTestcase(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(
username="admin", email="[email protected]", password="admin")
def test_indexView(self):
response = self.c.get(reverse("customadmin:customadmin_index"))
self.assertEqual(302, response.status_code)
self.c.login(username="admin", password="admin")
response = self.c.get(reverse("customadmin:customadmin_index"))
self.assertEqual(200, response.status_code)
def test_unapprovedusers(self):
response = self.c.get(reverse("customadmin:unapproved"))
self.assertEqual(302, response.status_code)
self.c.login(username="admin", password="admin")
response = self.c.get(reverse("customadmin:unapproved"))
self.assertEqual(200, response.status_code)
|
bsd-3-clause
|
purplewall1206/shadowsocks
|
tests/test_udp_src.py
|
1009
|
2482
|
#!/usr/bin/python
import socket
import socks
SERVER_IP = '127.0.0.1'
SERVER_PORT = 1081
if __name__ == '__main__':
# Test 1: same source port IPv4
sock_out = socks.socksocket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_out.set_proxy(socks.SOCKS5, SERVER_IP, SERVER_PORT)
sock_out.bind(('127.0.0.1', 9000))
sock_in1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_in2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_in1.bind(('127.0.0.1', 9001))
sock_in2.bind(('127.0.0.1', 9002))
sock_out.sendto(b'data', ('127.0.0.1', 9001))
result1 = sock_in1.recvfrom(8)
sock_out.sendto(b'data', ('127.0.0.1', 9002))
result2 = sock_in2.recvfrom(8)
sock_out.close()
sock_in1.close()
sock_in2.close()
# make sure they're from the same source port
assert result1 == result2
# Test 2: same source port IPv6
# try again from the same port but IPv6
sock_out = socks.socksocket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_out.set_proxy(socks.SOCKS5, SERVER_IP, SERVER_PORT)
sock_out.bind(('127.0.0.1', 9000))
sock_in1 = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_in2 = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_in1.bind(('::1', 9001))
sock_in2.bind(('::1', 9002))
sock_out.sendto(b'data', ('::1', 9001))
result1 = sock_in1.recvfrom(8)
sock_out.sendto(b'data', ('::1', 9002))
result2 = sock_in2.recvfrom(8)
sock_out.close()
sock_in1.close()
sock_in2.close()
# make sure they're from the same source port
assert result1 == result2
# Test 3: different source ports IPv6
sock_out = socks.socksocket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_out.set_proxy(socks.SOCKS5, SERVER_IP, SERVER_PORT)
sock_out.bind(('127.0.0.1', 9003))
sock_in1 = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM,
socket.SOL_UDP)
sock_in1.bind(('::1', 9001))
sock_out.sendto(b'data', ('::1', 9001))
result3 = sock_in1.recvfrom(8)
# make sure they're from different source ports
assert result1 != result3
sock_out.close()
sock_in1.close()
|
apache-2.0
|
ianblenke/awsebcli
|
ebcli/bundled/botocore/vendored/requests/packages/charade/escprober.py
|
206
|
3273
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
ISO2022KRSMModel)
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .compat import wrap_ord
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM:
continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM:
continue
if not codingSM.active:
continue
codingState = codingSM.next_state(wrap_ord(c))
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
return self.get_state()
return self.get_state()
|
apache-2.0
|
shawnbow/git-repo
|
subcmds/grep.py
|
89
|
7931
|
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
from color import Coloring
from command import PagedCommand
from git_command import git_require, GitCommand
class GrepColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'grep')
self.project = self.printer('project', attr='bold')
class Grep(PagedCommand):
common = True
helpSummary = "Print lines matching a pattern"
helpUsage = """
%prog {pattern | -e pattern} [<project>...]
"""
helpDescription = """
Search for the specified patterns in all project files.
Boolean Options
---------------
The following options can appear as often as necessary to express
the pattern to locate:
-e PATTERN
--and, --or, --not, -(, -)
Further, the -r/--revision option may be specified multiple times
in order to scan multiple trees. If the same file matches in more
than one tree, only the first result is reported, prefixed by the
revision name it was found under.
Examples
-------
Look for a line that has '#define' and either 'MAX_PATH or 'PATH_MAX':
repo grep -e '#define' --and -\\( -e MAX_PATH -e PATH_MAX \\)
Look for a line that has 'NODE' or 'Unexpected' in files that
contain a line that matches both expressions:
repo grep --all-match -e NODE -e Unexpected
"""
def _Options(self, p):
def carry(option,
opt_str,
value,
parser):
pt = getattr(parser.values, 'cmd_argv', None)
if pt is None:
pt = []
setattr(parser.values, 'cmd_argv', pt)
if opt_str == '-(':
pt.append('(')
elif opt_str == '-)':
pt.append(')')
else:
pt.append(opt_str)
if value is not None:
pt.append(value)
g = p.add_option_group('Sources')
g.add_option('--cached',
action='callback', callback=carry,
help='Search the index, instead of the work tree')
g.add_option('-r', '--revision',
dest='revision', action='append', metavar='TREEish',
help='Search TREEish, instead of the work tree')
g = p.add_option_group('Pattern')
g.add_option('-e',
action='callback', callback=carry,
metavar='PATTERN', type='str',
help='Pattern to search for')
g.add_option('-i', '--ignore-case',
action='callback', callback=carry,
help='Ignore case differences')
g.add_option('-a', '--text',
action='callback', callback=carry,
help="Process binary files as if they were text")
g.add_option('-I',
action='callback', callback=carry,
help="Don't match the pattern in binary files")
g.add_option('-w', '--word-regexp',
action='callback', callback=carry,
help='Match the pattern only at word boundaries')
g.add_option('-v', '--invert-match',
action='callback', callback=carry,
help='Select non-matching lines')
g.add_option('-G', '--basic-regexp',
action='callback', callback=carry,
help='Use POSIX basic regexp for patterns (default)')
g.add_option('-E', '--extended-regexp',
action='callback', callback=carry,
help='Use POSIX extended regexp for patterns')
g.add_option('-F', '--fixed-strings',
action='callback', callback=carry,
help='Use fixed strings (not regexp) for pattern')
g = p.add_option_group('Pattern Grouping')
g.add_option('--all-match',
action='callback', callback=carry,
help='Limit match to lines that have all patterns')
g.add_option('--and', '--or', '--not',
action='callback', callback=carry,
help='Boolean operators to combine patterns')
g.add_option('-(', '-)',
action='callback', callback=carry,
help='Boolean operator grouping')
g = p.add_option_group('Output')
g.add_option('-n',
action='callback', callback=carry,
help='Prefix the line number to matching lines')
g.add_option('-C',
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines around match')
g.add_option('-B',
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines before match')
g.add_option('-A',
action='callback', callback=carry,
metavar='CONTEXT', type='str',
help='Show CONTEXT lines after match')
g.add_option('-l', '--name-only', '--files-with-matches',
action='callback', callback=carry,
help='Show only file names containing matching lines')
g.add_option('-L', '--files-without-match',
action='callback', callback=carry,
help='Show only file names not containing matching lines')
def Execute(self, opt, args):
out = GrepColoring(self.manifest.manifestProject.config)
cmd_argv = ['grep']
if out.is_on and git_require((1, 6, 3)):
cmd_argv.append('--color')
cmd_argv.extend(getattr(opt, 'cmd_argv', []))
if '-e' not in cmd_argv:
if not args:
self.Usage()
cmd_argv.append('-e')
cmd_argv.append(args[0])
args = args[1:]
projects = self.GetProjects(args)
full_name = False
if len(projects) > 1:
cmd_argv.append('--full-name')
full_name = True
have_rev = False
if opt.revision:
if '--cached' in cmd_argv:
print('fatal: cannot combine --cached and --revision', file=sys.stderr)
sys.exit(1)
have_rev = True
cmd_argv.extend(opt.revision)
cmd_argv.append('--')
bad_rev = False
have_match = False
for project in projects:
p = GitCommand(project,
cmd_argv,
bare = False,
capture_stdout = True,
capture_stderr = True)
if p.Wait() != 0:
# no results
#
if p.stderr:
if have_rev and 'fatal: ambiguous argument' in p.stderr:
bad_rev = True
else:
out.project('--- project %s ---' % project.relpath)
out.nl()
out.write("%s", p.stderr)
out.nl()
continue
have_match = True
# We cut the last element, to avoid a blank line.
#
r = p.stdout.split('\n')
r = r[0:-1]
if have_rev and full_name:
for line in r:
rev, line = line.split(':', 1)
out.write("%s", rev)
out.write(':')
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
elif full_name:
for line in r:
out.project(project.relpath)
out.write('/')
out.write("%s", line)
out.nl()
else:
for line in r:
print(line)
if have_match:
sys.exit(0)
elif have_rev and bad_rev:
for r in opt.revision:
print("error: can't search revision %s" % r, file=sys.stderr)
sys.exit(1)
else:
sys.exit(1)
|
apache-2.0
|
ovaskevich/convnet-nu-discovery
|
python_util/data.py
|
180
|
7803
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as n
from numpy.random import randn, rand, random_integers
import os
from threading import Thread
from util import *
BATCH_META_FILE = "batches.meta"
class DataLoaderThread(Thread):
def __init__(self, path, tgt):
Thread.__init__(self)
self.path = path
self.tgt = tgt
def run(self):
self.tgt += [unpickle(self.path)]
class DataProvider:
BATCH_REGEX = re.compile('^data_batch_(\d+)(\.\d+)?$')
def __init__(self, data_dir, batch_range=None, init_epoch=1, init_batchnum=None, dp_params={}, test=False):
if batch_range == None:
batch_range = DataProvider.get_batch_nums(data_dir)
if init_batchnum is None or init_batchnum not in batch_range:
init_batchnum = batch_range[0]
self.data_dir = data_dir
self.batch_range = batch_range
self.curr_epoch = init_epoch
self.curr_batchnum = init_batchnum
self.dp_params = dp_params
self.batch_meta = self.get_batch_meta(data_dir)
self.data_dic = None
self.test = test
self.batch_idx = batch_range.index(init_batchnum)
def get_next_batch(self):
if self.data_dic is None or len(self.batch_range) > 1:
self.data_dic = self.get_batch(self.curr_batchnum)
epoch, batchnum = self.curr_epoch, self.curr_batchnum
self.advance_batch()
return epoch, batchnum, self.data_dic
def get_batch(self, batch_num):
fname = self.get_data_file_name(batch_num)
if os.path.isdir(fname): # batch in sub-batches
sub_batches = sorted(os.listdir(fname), key=alphanum_key)
#print sub_batches
num_sub_batches = len(sub_batches)
tgts = [[] for i in xrange(num_sub_batches)]
threads = [DataLoaderThread(os.path.join(fname, s), tgt) for (s, tgt) in zip(sub_batches, tgts)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
return [t[0] for t in tgts]
return unpickle(self.get_data_file_name(batch_num))
def get_data_dims(self,idx=0):
return self.batch_meta['num_vis'] if idx == 0 else 1
def advance_batch(self):
self.batch_idx = self.get_next_batch_idx()
self.curr_batchnum = self.batch_range[self.batch_idx]
if self.batch_idx == 0: # we wrapped
self.curr_epoch += 1
def get_next_batch_idx(self):
return (self.batch_idx + 1) % len(self.batch_range)
def get_next_batch_num(self):
return self.batch_range[self.get_next_batch_idx()]
# get filename of current batch
def get_data_file_name(self, batchnum=None):
if batchnum is None:
batchnum = self.curr_batchnum
return os.path.join(self.data_dir, 'data_batch_%d' % batchnum)
@classmethod
def get_instance(cls, data_dir, batch_range=None, init_epoch=1, init_batchnum=None, type="default", dp_params={}, test=False):
# why the fuck can't i reference DataProvider in the original definition?
#cls.dp_classes['default'] = DataProvider
type = type or DataProvider.get_batch_meta(data_dir)['dp_type'] # allow data to decide data provider
if type.startswith("dummy-"):
name = "-".join(type.split('-')[:-1]) + "-n"
if name not in dp_types:
raise DataProviderException("No such data provider: %s" % type)
_class = dp_classes[name]
dims = int(type.split('-')[-1])
return _class(dims)
elif type in dp_types:
_class = dp_classes[type]
return _class(data_dir, batch_range, init_epoch, init_batchnum, dp_params, test)
raise DataProviderException("No such data provider: %s" % type)
@classmethod
def register_data_provider(cls, name, desc, _class):
if name in dp_types:
raise DataProviderException("Data provider %s already registered" % name)
dp_types[name] = desc
dp_classes[name] = _class
@staticmethod
def get_batch_meta(data_dir):
return unpickle(os.path.join(data_dir, BATCH_META_FILE))
@staticmethod
def get_batch_filenames(srcdir):
return sorted([f for f in os.listdir(srcdir) if DataProvider.BATCH_REGEX.match(f)], key=alphanum_key)
@staticmethod
def get_batch_nums(srcdir):
names = DataProvider.get_batch_filenames(srcdir)
return sorted(list(set(int(DataProvider.BATCH_REGEX.match(n).group(1)) for n in names)))
@staticmethod
def get_num_batches(srcdir):
return len(DataProvider.get_batch_nums(srcdir))
class DummyDataProvider(DataProvider):
def __init__(self, data_dim):
#self.data_dim = data_dim
self.batch_range = [1]
self.batch_meta = {'num_vis': data_dim, 'data_in_rows':True}
self.curr_epoch = 1
self.curr_batchnum = 1
self.batch_idx = 0
def get_next_batch(self):
epoch, batchnum = self.curr_epoch, self.curr_batchnum
self.advance_batch()
data = rand(512, self.get_data_dims()).astype(n.single)
return self.curr_epoch, self.curr_batchnum, {'data':data}
class LabeledDataProvider(DataProvider):
def __init__(self, data_dir, batch_range=None, init_epoch=1, init_batchnum=None, dp_params={}, test=False):
DataProvider.__init__(self, data_dir, batch_range, init_epoch, init_batchnum, dp_params, test)
def get_num_classes(self):
return len(self.batch_meta['label_names'])
class LabeledDummyDataProvider(DummyDataProvider):
def __init__(self, data_dim, num_classes=10, num_cases=7):
#self.data_dim = data_dim
self.batch_range = [1]
self.batch_meta = {'num_vis': data_dim,
'label_names': [str(x) for x in range(num_classes)],
'data_in_rows':True}
self.num_cases = num_cases
self.num_classes = num_classes
self.curr_epoch = 1
self.curr_batchnum = 1
self.batch_idx=0
self.data = None
def get_num_classes(self):
return self.num_classes
def get_next_batch(self):
epoch, batchnum = self.curr_epoch, self.curr_batchnum
self.advance_batch()
if self.data is None:
data = rand(self.num_cases, self.get_data_dims()).astype(n.single) # <--changed to rand
labels = n.require(n.c_[random_integers(0,self.num_classes-1,self.num_cases)], requirements='C', dtype=n.single)
self.data, self.labels = data, labels
else:
data, labels = self.data, self.labels
# print data.shape, labels.shape
return self.curr_epoch, self.curr_batchnum, [data.T, labels.T ]
dp_types = {"dummy-n": "Dummy data provider for n-dimensional data",
"dummy-labeled-n": "Labeled dummy data provider for n-dimensional data"}
dp_classes = {"dummy-n": DummyDataProvider,
"dummy-labeled-n": LabeledDummyDataProvider}
class DataProviderException(Exception):
pass
|
apache-2.0
|
edx-solutions/edx-platform
|
lms/djangoapps/courseware/tests/test_rules.py
|
4
|
1680
|
"""
Tests for permissions defined in courseware.rules
"""
import ddt
import six
from django.test import TestCase
from opaque_keys.edx.locator import CourseLocator
from course_modes.tests.factories import CourseModeFactory
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
@ddt.ddt
class PermissionTests(TestCase):
"""
Tests for permissions defined in courseware.rules
"""
def setUp(self):
super(PermissionTests, self).setUp()
self.user = UserFactory()
self.course_id = CourseLocator('MITx', '000', 'Perm_course')
CourseModeFactory(mode_slug='verified', course_id=self.course_id)
CourseModeFactory(mode_slug='masters', course_id=self.course_id)
CourseModeFactory(mode_slug='professional', course_id=self.course_id)
CourseEnrollment.unenroll(self.user, self.course_id)
def tearDown(self):
super(PermissionTests, self).tearDown()
self.user.delete()
@ddt.data(
(None, False),
('audit', False),
('verified', True),
('masters', True),
('professional', True),
('no-id-professional', False),
)
@ddt.unpack
def test_proctoring_perm(self, mode, should_have_perm):
"""
Test that the user has the edx_proctoring.can_take_proctored_exam permission
"""
if mode is not None:
CourseEnrollment.enroll(self.user, self.course_id, mode=mode)
has_perm = self.user.has_perm('edx_proctoring.can_take_proctored_exam',
{'course_id': six.text_type(self.course_id)})
assert has_perm == should_have_perm
|
agpl-3.0
|
henriquegemignani/randovania
|
randovania/game_connection/connection_base.py
|
1
|
1872
|
from enum import Enum
from typing import Optional, Callable, Awaitable, List, NamedTuple, Dict, Tuple
from randovania.game_description.resources.item_resource_info import ItemResourceInfo
from randovania.game_description.resources.pickup_entry import PickupEntry
class GameConnectionStatus(Enum):
Disconnected = "disconnected"
UnknownGame = "unknown-game"
WrongGame = "wrong-game"
WrongHash = "wrong-hash"
TitleScreen = "title-screen"
TrackerOnly = "tracker-only"
InGame = "in-game"
@property
def pretty_text(self) -> str:
return _pretty_connection_status[self]
_pretty_connection_status = {
GameConnectionStatus.Disconnected: "Disconnected",
GameConnectionStatus.UnknownGame: "Unknown game",
GameConnectionStatus.WrongGame: "Wrong game",
GameConnectionStatus.WrongHash: "Correct game, wrong seed hash",
GameConnectionStatus.TitleScreen: "Title screen",
GameConnectionStatus.TrackerOnly: "Tracker only",
GameConnectionStatus.InGame: "In-game",
}
class InventoryItem(NamedTuple):
amount: int
capacity: int
class ConnectionBase:
_location_collected_listener: Optional[Callable[[int], Awaitable[None]]] = None
@property
def current_status(self) -> GameConnectionStatus:
raise NotImplementedError()
def get_current_inventory(self) -> Dict[ItemResourceInfo, InventoryItem]:
raise NotImplementedError()
def set_permanent_pickups(self, pickups: List[Tuple[str, PickupEntry]]):
raise NotImplementedError()
def set_location_collected_listener(self, listener: Optional[Callable[[int], Awaitable[None]]]):
self._location_collected_listener = listener
async def _emit_location_collected(self, location: int):
if self._location_collected_listener is not None:
await self._location_collected_listener(location)
|
gpl-3.0
|
bxshi/gem5
|
src/mem/slicc/ast/OodAST.py
|
90
|
1802
|
#
# Copyright (c) 2011 Mark D. Hill and David A. Wood
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from slicc.ast.ExprAST import ExprAST
class OodAST(ExprAST):
def __init__(self, slicc):
super(OodAST, self).__init__(slicc)
def __repr__(self):
return "[Ood:]"
def generate(self, code):
code += "NULL"
return "OOD"
|
bsd-3-clause
|
marcusrehm/serenata-de-amor
|
rosie/rosie/chamber_of_deputies/classifiers/monthly_subquota_limit_classifier.py
|
2
|
6711
|
import numpy as np
import pandas as pd
from sklearn.base import TransformerMixin
class MonthlySubquotaLimitClassifier(TransformerMixin):
"""
Monthly Subquota Limit classifier.
Dataset
-------
issue_date : datetime column
Date when the expense was made.
month : int column
The quota month matching the expense request.
net_value : float column
The value of the expense.
subquota_number : category column
A number to classify a category of expenses.
year : int column
The quota year matching the expense request.
"""
KEYS = ['applicant_id', 'month', 'year']
COLS = ['applicant_id',
'issue_date',
'month',
'net_value',
'subquota_number',
'year']
def fit(self, X):
self.X = X
self._X = self.X[self.COLS].copy()
self.__create_columns()
return self
def transform(self, X=None):
self.limits = [
{
# Automotive vehicle renting or charter (From 12/2013 to 03/2015)
'data': self._X.query('(subquota_number == "120") & '
'(reimbursement_month >= datetime(2013, 12, 1)) & '
'(reimbursement_month <= datetime(2015, 3, 1))'),
'monthly_limit': 1000000,
},
{
# Automotive vehicle renting or charter (From 04/2015 to 04/2017)
'data': self._X.query('(subquota_number == "120") & '
'(reimbursement_month >= datetime(2015, 4, 1)) & '
'(reimbursement_month <= datetime(2017, 4, 1))'),
'monthly_limit': 1090000,
},
{
# Automotive vehicle renting or charter (From 05/2017)
'data': self._X.query('(subquota_number == "120") & '
'(reimbursement_month >= datetime(2017, 5, 1))'),
'monthly_limit': 1271300,
},
{
# Taxi, toll and parking (From 12/2013 to 03/2015)
'data': self._X.query('(subquota_number == "122") & '
'(reimbursement_month >= datetime(2013, 12, 1)) & '
'(reimbursement_month <= datetime(2015, 3, 1))'),
'monthly_limit': 250000,
},
{
# Taxi, toll and parking (From 04/2015)
'data': self._X.query('(subquota_number == "122") & '
'(reimbursement_month >= datetime(2015, 4, 1))'),
'monthly_limit': 270000,
},
{
# Fuels and lubricants (From 07/2009 to 03/2015)
'data': self._X.query('(subquota_number == "3") & '
'(reimbursement_month >= datetime(2009, 7, 1)) & '
'(reimbursement_month <= datetime(2015, 3, 1))'),
'monthly_limit': 450000,
},
{
# Fuels and lubricants (From 04/2015 to 08/2015)
'data': self._X.query('(subquota_number == "3") & '
'(reimbursement_month >= datetime(2015, 4, 1)) & '
'(reimbursement_month <= datetime(2015, 8, 1))'),
'monthly_limit': 490000,
},
{
# Fuels and lubricants (From 9/2015)
'data': self._X.query('(subquota_number == "3") & '
'(reimbursement_month >= datetime(2015, 9, 1))'),
'monthly_limit': 600000,
},
{
# Security service provided by specialized company (From 07/2009 to 4/2014)
'data': self._X.query('(subquota_number == "8") & '
'(reimbursement_month >= datetime(2009, 7, 1)) & '
'(reimbursement_month <= datetime(2014, 4, 1))'),
'monthly_limit': 450000,
},
{
# Security service provided by specialized company (From 05/2014 to 3/2015)
'data': self._X.query('(subquota_number == "8") & '
'(reimbursement_month >= datetime(2014, 5, 1)) & '
'(reimbursement_month <= datetime(2015, 3, 1))'),
'monthly_limit': 800000,
},
{
# Security service provided by specialized company (From 04/2015)
'data': self._X.query('(subquota_number == "8") & '
'(reimbursement_month >= datetime(2015, 4, 1))'),
'monthly_limit': 870000,
},
{
# Participation in course, talk or similar event (From 10/2015)
'data': self._X.query('(subquota_number == "137") & '
'(reimbursement_month >= datetime(2015, 10, 1))'),
'monthly_limit': 769716,
},
]
return self
def predict(self, X=None):
self._X['is_over_monthly_subquota_limit'] = False
for metadata in self.limits:
data, monthly_limit = metadata['data'], metadata['monthly_limit']
if len(data):
surplus_reimbursements = self.__find_surplus_reimbursements(data, monthly_limit)
self._X.loc[surplus_reimbursements.index,
'is_over_monthly_subquota_limit'] = True
results = self._X.loc[self.X.index, 'is_over_monthly_subquota_limit']
return np.r_[results]
def predict_proba(self, X=None):
return 1.
def __create_columns(self):
self._X['net_value_int'] = (self._X['net_value'] * 100).apply(int)
self._X['coerced_issue_date'] = \
pd.to_datetime(self._X['issue_date'], errors='coerce')
self._X.sort_values('coerced_issue_date', kind='mergesort', inplace=True)
reimbursement_month = self._X[['year', 'month']].copy()
reimbursement_month['day'] = 1
self._X['reimbursement_month'] = pd.to_datetime(reimbursement_month)
def __find_surplus_reimbursements(self, data, monthly_limit):
grouped = data.groupby(self.KEYS).apply(self.__create_cumsum_cols)
return grouped[grouped['cumsum_net_value'] > monthly_limit]
def __create_cumsum_cols(self, subset):
subset['cumsum_net_value'] = subset['net_value_int'].cumsum()
return subset
|
mit
|
gmcastil/numpy
|
numpy/tests/test_reloading.py
|
68
|
1038
|
from __future__ import division, absolute_import, print_function
import sys
from numpy.testing import assert_raises, assert_, run_module_suite
if sys.version_info[:2] >= (3, 4):
from importlib import reload
else:
from imp import reload
def test_numpy_reloading():
# gh-7844. Also check that relevant globals retain their identity.
import numpy as np
import numpy._globals
_NoValue = np._NoValue
VisibleDeprecationWarning = np.VisibleDeprecationWarning
ModuleDeprecationWarning = np.ModuleDeprecationWarning
reload(np)
assert_(_NoValue is np._NoValue)
assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning)
assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning)
assert_raises(RuntimeError, reload, numpy._globals)
reload(np)
assert_(_NoValue is np._NoValue)
assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning)
assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning)
if __name__ == "__main__":
run_module_suite()
|
bsd-3-clause
|
jbhamilton/Which-CSS
|
codebase/tinycss/tests/test_decoding.py
|
6
|
3375
|
# coding: utf8
"""
Tests for decoding bytes to Unicode
-----------------------------------
:copyright: (c) 2012 by Simon Sapin.
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
import pytest
from tinycss.decoding import decode
def params(css, encoding, use_bom=False, expect_error=False, **kwargs):
"""Nicer syntax to make a tuple."""
return css, encoding, use_bom, expect_error, kwargs
@pytest.mark.parametrize(('css', 'encoding', 'use_bom', 'expect_error',
'kwargs'), [
params('', 'utf8'), # default to utf8
params('𐂃', 'utf8'),
params('é', 'latin1'), # utf8 fails, fall back on ShiftJIS
params('£', 'ShiftJIS', expect_error=True),
params('£', 'ShiftJIS', protocol_encoding='Shift-JIS'),
params('£', 'ShiftJIS', linking_encoding='Shift-JIS'),
params('£', 'ShiftJIS', document_encoding='Shift-JIS'),
params('£', 'ShiftJIS', protocol_encoding='utf8',
document_encoding='ShiftJIS'),
params('@charset "utf8"; £', 'ShiftJIS', expect_error=True),
params('@charset "utf£8"; £', 'ShiftJIS', expect_error=True),
params('@charset "unknown-encoding"; £', 'ShiftJIS', expect_error=True),
params('@charset "utf8"; £', 'ShiftJIS', document_encoding='ShiftJIS'),
params('£', 'ShiftJIS', linking_encoding='utf8',
document_encoding='ShiftJIS'),
params('@charset "utf-32"; 𐂃', 'utf-32-be'),
params('@charset "Shift-JIS"; £', 'ShiftJIS'),
params('@charset "ISO-8859-8"; £', 'ShiftJIS', expect_error=True),
params('𐂃', 'utf-16-le', expect_error=True), # no BOM
params('𐂃', 'utf-16-le', use_bom=True),
params('𐂃', 'utf-32-be', expect_error=True),
params('𐂃', 'utf-32-be', use_bom=True),
params('𐂃', 'utf-32-be', document_encoding='utf-32-be'),
params('𐂃', 'utf-32-be', linking_encoding='utf-32-be'),
params('@charset "utf-32-le"; 𐂃', 'utf-32-be',
use_bom=True, expect_error=True),
# protocol_encoding takes precedence over @charset
params('@charset "ISO-8859-8"; £', 'ShiftJIS',
protocol_encoding='Shift-JIS'),
params('@charset "unknown-encoding"; £', 'ShiftJIS',
protocol_encoding='Shift-JIS'),
params('@charset "Shift-JIS"; £', 'ShiftJIS',
protocol_encoding='utf8'),
# @charset takes precedence over document_encoding
params('@charset "Shift-JIS"; £', 'ShiftJIS',
document_encoding='ISO-8859-8'),
# @charset takes precedence over linking_encoding
params('@charset "Shift-JIS"; £', 'ShiftJIS',
linking_encoding='ISO-8859-8'),
# linking_encoding takes precedence over document_encoding
params('£', 'ShiftJIS',
linking_encoding='Shift-JIS', document_encoding='ISO-8859-8'),
])
def test_decode(css, encoding, use_bom, expect_error, kwargs):
# Workaround PyPy and CPython 3.0 bug: https://bugs.pypy.org/issue1094
css = css.encode('utf16').decode('utf16')
if use_bom:
source = '\ufeff' + css
else:
source = css
css_bytes = source.encode(encoding)
result, result_encoding = decode(css_bytes, **kwargs)
if expect_error:
assert result != css, 'Unexpected unicode success'
else:
assert result == css, 'Unexpected unicode error'
|
apache-2.0
|
janocat/odoo
|
addons/product/__init__.py
|
443
|
1120
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product
import pricelist
import report
import partner
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
EdLogan18/logan-repository
|
plugin.video.logan/mechanize/_request.py
|
133
|
1561
|
"""Integration with Python standard library module urllib2: Request class.
Copyright 2004-2006 John J Lee <[email protected]>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import logging
import _rfc3986
import _sockettimeout
import _urllib2_fork
warn = logging.getLogger("mechanize").warning
class Request(_urllib2_fork.Request):
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False, visit=None,
timeout=_sockettimeout._GLOBAL_DEFAULT_TIMEOUT):
# In mechanize 0.2, the interpretation of a unicode url argument will
# change: A unicode url argument will be interpreted as an IRI, and a
# bytestring as a URI. For now, we accept unicode or bytestring. We
# don't insist that the value is always a URI (specifically, must only
# contain characters which are legal), because that might break working
# code (who knows what bytes some servers want to see, especially with
# browser plugins for internationalised URIs).
if not _rfc3986.is_clean_uri(url):
warn("url argument is not a URI "
"(contains illegal characters) %r" % url)
_urllib2_fork.Request.__init__(self, url, data, headers)
self.selector = None
self.visit = visit
self.timeout = timeout
def __str__(self):
return "<Request for %s>" % self.get_full_url()
|
gpl-2.0
|
jacshfr/mozilla-bedrock
|
vendor-local/lib/python/south/management/commands/startmigration.py
|
129
|
2113
|
"""
Now-obsolete startmigration command.
"""
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Attempt to automatically detect differences from the last migration.'),
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
make_option('--stdout', action='store_true', dest='stdout', default=False,
help='Print the migration to stdout instead of writing it to a file.'),
)
help = "Deprecated command"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
print("The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands.")
|
mpl-2.0
|
hostmaster/ansible-modules-core
|
system/group.py
|
81
|
13394
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Stephen Fromm <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: group
author: "Stephen Fromm (@sfromm)"
version_added: "0.0.2"
short_description: Add or remove groups
requirements: [ groupadd, groupdel, groupmod ]
description:
- Manage presence of groups on a host.
options:
name:
required: true
description:
- Name of the group to manage.
gid:
required: false
description:
- Optional I(GID) to set for the group.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the group should be present or not on the remote host.
system:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If I(yes), indicates that the group created is a system group.
'''
EXAMPLES = '''
# Example group command from Ansible Playbooks
- group: name=somegroup state=present
'''
import grp
import syslog
import platform
class Group(object):
"""
This is a generic Group manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- group_del()
- group_add()
- group_mod()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
GROUPFILE = '/etc/group'
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Group, args, kwargs)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.gid = module.params['gid']
self.system = module.params['system']
self.syslogging = False
def execute_command(self, cmd):
if self.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Command %s' % '|'.join(cmd))
return self.module.run_command(cmd)
def group_del(self):
cmd = [self.module.get_bin_path('groupdel', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('-g')
cmd.append(kwargs[key])
elif key == 'system' and kwargs[key] == True:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('groupmod', True)]
info = self.group_info()
for key in kwargs:
if key == 'gid':
if kwargs[key] is not None and info[2] != int(kwargs[key]):
cmd.append('-g')
cmd.append(kwargs[key])
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self):
try:
if grp.getgrnam(self.name):
return True
except KeyError:
return False
def group_info(self):
if not self.group_exists():
return False
try:
info = list(grp.getgrnam(self.name))
except KeyError:
return False
return info
# ===========================================
class SunOS(Group):
"""
This is a SunOS Group manipulation class. Solaris doesn't have
the 'system' group concept.
This overrides the following methods from the generic class:-
- group_add()
"""
platform = 'SunOS'
distribution = None
GROUPFILE = '/etc/group'
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('-g')
cmd.append(kwargs[key])
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class AIX(Group):
"""
This is a AIX Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'AIX'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('rmgroup', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('mkgroup', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('id='+kwargs[key])
elif key == 'system' and kwargs[key] == True:
cmd.append('-a')
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('chgroup', True)]
info = self.group_info()
for key in kwargs:
if key == 'gid':
if kwargs[key] is not None and info[2] != int(kwargs[key]):
cmd.append('id='+kwargs[key])
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class FreeBsdGroup(Group):
"""
This is a FreeBSD Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'FreeBSD'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('pw', True), 'groupdel', self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('pw', True), 'groupadd', self.name]
if self.gid is not None:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('pw', True), 'groupmod', self.name]
info = self.group_info()
cmd_len = len(cmd)
if self.gid is not None and int(self.gid) != info[2]:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
# modify the group if cmd will do anything
if cmd_len != len(cmd):
if self.module.check_mode:
return (0, '', '')
return self.execute_command(cmd)
return (None, '', '')
# ===========================================
class DarwinGroup(Group):
"""
This is a Mac OS X Darwin Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
group manupulation are done using dseditgroup(1).
"""
platform = 'Darwin'
distribution = None
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('dseditgroup', True)]
cmd += [ '-o', 'create' ]
if self.gid is not None:
cmd += [ '-i', self.gid ]
cmd += [ '-L', self.name ]
(rc, out, err) = self.execute_command(cmd)
return (rc, out, err)
def group_del(self):
cmd = [self.module.get_bin_path('dseditgroup', True)]
cmd += [ '-o', 'delete' ]
cmd += [ '-L', self.name ]
(rc, out, err) = self.execute_command(cmd)
return (rc, out, err)
def group_mod(self, gid=None):
info = self.group_info()
if self.gid is not None and int(self.gid) != info[2]:
cmd = [self.module.get_bin_path('dseditgroup', True)]
cmd += [ '-o', 'edit' ]
if gid is not None:
cmd += [ '-i', gid ]
cmd += [ '-L', self.name ]
(rc, out, err) = self.execute_command(cmd)
return (rc, out, err)
return (None, '', '')
class OpenBsdGroup(Group):
"""
This is a OpenBSD Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'OpenBSD'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('groupdel', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
if self.gid is not None:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('groupmod', True)]
info = self.group_info()
cmd_len = len(cmd)
if self.gid is not None and int(self.gid) != info[2]:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class NetBsdGroup(Group):
"""
This is a NetBSD Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'NetBSD'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('groupdel', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
if self.gid is not None:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('groupmod', True)]
info = self.group_info()
cmd_len = len(cmd)
if self.gid is not None and int(self.gid) != info[2]:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, type='str'),
gid=dict(default=None, type='str'),
system=dict(default=False, type='bool'),
),
supports_check_mode=True
)
group = Group(module)
if group.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Group instantiated - platform %s' % group.platform)
if user.distribution:
syslog.syslog(syslog.LOG_NOTICE, 'Group instantiated - distribution %s' % group.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = group.name
result['state'] = group.state
if group.state == 'absent':
if group.group_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = group.group_del()
if rc != 0:
module.fail_json(name=group.name, msg=err)
elif group.state == 'present':
if not group.group_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = group.group_add(gid=group.gid, system=group.system)
else:
(rc, out, err) = group.group_mod(gid=group.gid)
if rc is not None and rc != 0:
module.fail_json(name=group.name, msg=err)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if group.group_exists():
info = group.group_info()
result['system'] = group.system
result['gid'] = info[2]
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
Cinntax/home-assistant
|
homeassistant/components/geniushub/sensor.py
|
1
|
3755
|
"""Support for Genius Hub sensor devices."""
from datetime import timedelta
from typing import Any, Awaitable, Dict
from homeassistant.const import DEVICE_CLASS_BATTERY
from homeassistant.util.dt import utc_from_timestamp, utcnow
from . import DOMAIN, GeniusEntity
GH_HAS_BATTERY = ["Room Thermostat", "Genius Valve", "Room Sensor", "Radiator Valve"]
GH_LEVEL_MAPPING = {
"error": "Errors",
"warning": "Warnings",
"information": "Information",
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Genius Hub sensor entities."""
client = hass.data[DOMAIN]["client"]
sensors = [GeniusBattery(d) for d in client.device_objs if d.type in GH_HAS_BATTERY]
issues = [GeniusIssue(client, i) for i in list(GH_LEVEL_MAPPING)]
async_add_entities(sensors + issues, update_before_add=True)
class GeniusBattery(GeniusEntity):
"""Representation of a Genius Hub sensor."""
def __init__(self, device) -> None:
"""Initialize the sensor."""
super().__init__()
self._device = device
self._name = f"{device.type} {device.id}"
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
values = self._device._raw["childValues"] # pylint: disable=protected-access
last_comms = utc_from_timestamp(values["lastComms"]["val"])
if "WakeUp_Interval" in values:
interval = timedelta(seconds=values["WakeUp_Interval"]["val"])
else:
interval = timedelta(minutes=20)
if last_comms < utcnow() - interval * 3:
return "mdi:battery-unknown"
battery_level = self._device.data["state"]["batteryLevel"]
if battery_level == 255:
return "mdi:battery-unknown"
if battery_level < 40:
return "mdi:battery-alert"
icon = "mdi:battery"
if battery_level <= 95:
icon += f"-{int(round(battery_level / 10 - 0.01)) * 10}"
return icon
@property
def device_class(self) -> str:
"""Return the device class of the sensor."""
return DEVICE_CLASS_BATTERY
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return "%"
@property
def state(self) -> str:
"""Return the state of the sensor."""
level = self._device.data["state"].get("batteryLevel", 255)
return level if level != 255 else 0
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the device state attributes."""
attrs = {}
attrs["assigned_zone"] = self._device.data["assignedZones"][0]["name"]
# pylint: disable=protected-access
last_comms = self._device._raw["childValues"]["lastComms"]["val"]
attrs["last_comms"] = utc_from_timestamp(last_comms).isoformat()
return {**attrs}
class GeniusIssue(GeniusEntity):
"""Representation of a Genius Hub sensor."""
def __init__(self, hub, level) -> None:
"""Initialize the sensor."""
super().__init__()
self._hub = hub
self._name = GH_LEVEL_MAPPING[level]
self._level = level
self._issues = []
@property
def state(self) -> str:
"""Return the number of issues."""
return len(self._issues)
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the device state attributes."""
return {f"{self._level}_list": self._issues}
async def async_update(self) -> Awaitable[None]:
"""Process the sensor's state data."""
self._issues = [
i["description"] for i in self._hub.issues if i["level"] == self._level
]
|
apache-2.0
|
valorbit/valorbit
|
share/seeds/generate-seeds.py
|
128
|
4187
|
#!/usr/bin/python
# Copyright (c) 2014 Wladmir J. van der Laan
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef H_CHAINPARAMSSEEDS\n')
g.write('#define H_CHAINPARAMSSEEDS\n')
g.write('// List of fixed seed nodes for the bitcoin network\n')
g.write('// AUTOGENERATED by contrib/devtools/generate-seeds.py\n\n')
g.write('// Each line contains a 16-byte IPv6 address and a port.\n')
g.write('// IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 15714)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 25714)
g.write('#endif\n')
if __name__ == '__main__':
main()
|
mit
|
knabar/openmicroscopy
|
components/tools/OmeroPy/src/omero/install/bzip2_tool.py
|
15
|
1301
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Function for enabling/disabling the bzip2.dll which
comes with PyTables.
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import os
import sys
def bzip2_tool(disable=False):
"""
Renames the bzip2.dll library which comes with PyTables.
"""
import tables
f = tables.__file__
p = os.path.dirname(f)
p = os.path.abspath(p)
b = os.path.join(p, "bzip2.dll")
d = os.path.join(p, "bzip2_DISABLED.dll")
if disable:
_swap(b, d)
else:
_swap(d, b)
def _swap(f, t):
if not os.path.exists(f):
print "%s doesn't exist" % f
sys.exit(0)
os.rename(f, t)
if __name__ == "__main__":
try:
if len(sys.argv) == 2:
which = sys.argv[1]
if which == "disable":
which = True
elif which == "enable":
which = False
else:
print "Unknown command: ", which
sys.exit(2)
bzip2_tool(disable=which)
sys.exit(0)
except Exception, e:
print "bzip2_tool failed: ", e
sys.exit(1)
print "Usage: %s disable|enable" % sys.argv[0]
sys.exit(2)
|
gpl-2.0
|
thorgate/django-esteid
|
esteid/authentication/views.py
|
1
|
3854
|
import logging
from typing import Type, TYPE_CHECKING
from django.http import HttpRequest, JsonResponse
from esteid.exceptions import ActionInProgress
from esteid.mixins import DjangoRestCompatibilityMixin, SessionViewMixin
from .authenticator import Authenticator
from .types import AuthenticationResult
try:
from rest_framework.exceptions import ValidationError as DRFValidationError
except ImportError:
# If rest framework is not installed, create a stub class so the isinstance check is always false
class DRFValidationError:
pass
if TYPE_CHECKING:
# Make type checkers aware of request.session attribute which is missing on the HttpRequest class
from django.contrib.sessions import base_session
class RequestType(HttpRequest):
session: base_session.AbstractBaseSession
data: dict
logger = logging.getLogger(__name__)
class AuthenticationViewMixin(SessionViewMixin):
# these come from the `url()` definition as in `View.as_view(authentication_method='...')`, either one is enough
authentication_method: str = None
authenticator: Type[Authenticator] = None
def on_auth_success(self, request, data: AuthenticationResult):
"""
A hook to make use of the authentication data once the process is complete.
May be used to store the data into session, authenticate a user etc.
"""
pass
def success_response(self, request, data: AuthenticationResult):
"""Customizable response on success"""
return JsonResponse({**data, "status": self.Status.SUCCESS})
def select_authenticator_class(self) -> Type["Authenticator"]:
if self.authenticator is not None:
return self.authenticator
return Authenticator.select_authenticator(self.authentication_method)
def start_session(self, request: "RequestType", *args, **kwargs):
"""
Initiates an authentication session.
"""
auth_class = self.select_authenticator_class()
authenticator = auth_class.start_session(request.session, request.data)
do_cleanup = True
try:
result = authenticator.authenticate()
except ActionInProgress as e:
do_cleanup = False
# return SUCCESS to indicate that the upstream service successfully accepted the request
return JsonResponse({"status": self.Status.SUCCESS, **e.data}, status=e.status)
else:
# Handle a theoretical case of immediate authentication
self.on_auth_success(request, result)
return JsonResponse({**result, "status": self.Status.SUCCESS})
finally:
if do_cleanup:
authenticator.cleanup()
def finish_session(self, request: "RequestType", *args, **kwargs):
"""
Checks the status of an authentication session
"""
authenticator_class = self.select_authenticator_class()
authenticator = authenticator_class.load_session(request.session)
do_cleanup = True
try:
result = authenticator.poll()
except ActionInProgress as e:
do_cleanup = False
return JsonResponse({"status": self.Status.PENDING, **e.data}, status=e.status)
else:
self.on_auth_success(request, result)
return self.success_response(request, result)
finally:
if do_cleanup:
authenticator.cleanup()
class AuthenticationViewRestMixin(AuthenticationViewMixin):
"""
To be used with rest-framework's APIView.
"""
class AuthenticationViewDjangoMixin(DjangoRestCompatibilityMixin, AuthenticationViewMixin):
"""
To be used with plain Django class-based views (No rest-framework).
Adds `data` attribute to the request with the POST or JSON data.
"""
|
bsd-3-clause
|
shabab12/edx-platform
|
common/test/acceptance/pages/lms/index.py
|
11
|
1751
|
# -*- coding: utf-8 -*-
"""
LMS index (home) page.
"""
from bok_choy.page_object import PageObject
from . import BASE_URL
BANNER_SELECTOR = 'section.home header div.outer-wrapper div.title .heading-group h1'
INTRO_VIDEO_SELECTOR = 'div.play-intro'
VIDEO_MODAL_SELECTOR = 'section#video-modal.modal.home-page-video-modal.video-modal'
class IndexPage(PageObject):
"""
LMS index (home) page, the default landing page for Open edX users when they are not logged in
"""
def __init__(self, browser):
"""Initialize the page.
Arguments:
browser (Browser): The browser instance.
"""
super(IndexPage, self).__init__(browser)
url = "{base}/".format(base=BASE_URL)
def is_browser_on_page(self):
"""
Returns a browser query object representing the video modal element
"""
element = self.q(css=BANNER_SELECTOR)
return element.visible and element.text[0].startswith("Welcome to the Open edX")
@property
def banner_element(self):
"""
Returns a browser query object representing the video modal element
"""
return self.q(css=BANNER_SELECTOR)
@property
def intro_video_element(self):
"""
Returns a browser query object representing the video modal element
"""
return self.q(css=INTRO_VIDEO_SELECTOR)
@property
def video_modal_element(self):
"""
Returns a browser query object representing the video modal element
"""
return self.q(css=VIDEO_MODAL_SELECTOR)
@property
def footer_links(self):
"""Return a list of the text of the links in the page footer."""
return self.q(css='.nav-colophon a').attrs('text')
|
agpl-3.0
|
Poojawa/tgstation-fork
|
tools/midi2piano/midi/midi.py
|
102
|
65440
|
#! /usr/bin/python3
# unsupported 20091104 ...
# ['set_sequence_number', dtime, sequence]
# ['raw_data', dtime, raw]
r'''
This module offers functions: concatenate_scores(), grep(),
merge_scores(), mix_scores(), midi2opus(), midi2score(), opus2midi(),
opus2score(), play_score(), score2midi(), score2opus(), score2stats(),
score_type(), segment(), timeshift() and to_millisecs(),
where "midi" means the MIDI-file bytes (as can be put in a .mid file,
or piped into aplaymidi), and "opus" and "score" are list-structures
as inspired by Sean Burke's MIDI-Perl CPAN module.
Download MIDI.py from http://www.pjb.com.au/midi/free/MIDI.py
and put it in your PYTHONPATH. MIDI.py depends on Python3.
There is also a call-compatible translation into Lua of this
module: see http://www.pjb.com.au/comp/lua/MIDI.html
The "opus" is a direct translation of the midi-file-events, where
the times are delta-times, in ticks, since the previous event.
The "score" is more human-centric; it uses absolute times, and
combines the separate note_on and note_off events into one "note"
event, with a duration:
['note', start_time, duration, channel, note, velocity] # in a "score"
EVENTS (in an "opus" structure)
['note_off', dtime, channel, note, velocity] # in an "opus"
['note_on', dtime, channel, note, velocity] # in an "opus"
['key_after_touch', dtime, channel, note, velocity]
['control_change', dtime, channel, controller(0-127), value(0-127)]
['patch_change', dtime, channel, patch]
['channel_after_touch', dtime, channel, velocity]
['pitch_wheel_change', dtime, channel, pitch_wheel]
['text_event', dtime, text]
['copyright_text_event', dtime, text]
['track_name', dtime, text]
['instrument_name', dtime, text]
['lyric', dtime, text]
['marker', dtime, text]
['cue_point', dtime, text]
['text_event_08', dtime, text]
['text_event_09', dtime, text]
['text_event_0a', dtime, text]
['text_event_0b', dtime, text]
['text_event_0c', dtime, text]
['text_event_0d', dtime, text]
['text_event_0e', dtime, text]
['text_event_0f', dtime, text]
['end_track', dtime]
['set_tempo', dtime, tempo]
['smpte_offset', dtime, hr, mn, se, fr, ff]
['time_signature', dtime, nn, dd, cc, bb]
['key_signature', dtime, sf, mi]
['sequencer_specific', dtime, raw]
['raw_meta_event', dtime, command(0-255), raw]
['sysex_f0', dtime, raw]
['sysex_f7', dtime, raw]
['song_position', dtime, song_pos]
['song_select', dtime, song_number]
['tune_request', dtime]
DATA TYPES
channel = a value 0 to 15
controller = 0 to 127 (see http://www.pjb.com.au/muscript/gm.html#cc )
dtime = time measured in "ticks", 0 to 268435455
velocity = a value 0 (soft) to 127 (loud)
note = a value 0 to 127 (middle-C is 60)
patch = 0 to 127 (see http://www.pjb.com.au/muscript/gm.html )
pitch_wheel = a value -8192 to 8191 (0x1FFF)
raw = 0 or more bytes of binary data
sequence_number = a value 0 to 65,535 (0xFFFF)
song_pos = a value 0 to 16,383 (0x3FFF)
song_number = a value 0 to 127
tempo = microseconds per crochet (quarter-note), 0 to 16777215
text = a string of 0 or more bytes of of ASCII text
ticks = the number of ticks per crochet (quarter-note)
GOING THROUGH A SCORE WITHIN A PYTHON PROGRAM
channels = {2,3,5,8,13}
itrack = 1 # skip 1st element which is ticks
while itrack < len(score):
for event in score[itrack]:
if event[0] == 'note': # for example,
pass # do something to all notes
# or, to work on events in only particular channels...
channel_index = MIDI.Event2channelindex.get(event[0], False)
if channel_index and (event[channel_index] in channels):
pass # do something to channels 2,3,5,8 and 13
itrack += 1
'''
import sys, struct, os, copy
# sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb')
Version = '6.2'
VersionDate = '20150101'
# 20150101 6.2 all text events can be 8-bit; let user get the right encoding
# 20141231 6.1 fix _some_text_event; sequencer_specific data can be 8-bit
# 20141230 6.0 synth_specific data can be 8-bit
# 20120504 5.9 add the contents of mid_opus_tracks()
# 20120208 5.8 fix num_notes_by_channel() ; should be a dict
# 20120129 5.7 _encode handles empty tracks; score2stats num_notes_by_channel
# 20111111 5.6 fix patch 45 and 46 in Number2patch, should be Harp
# 20110129 5.5 add mix_opus_tracks() and event2alsaseq()
# 20110126 5.4 "previous message repeated N times" to save space on stderr
# 20110125 5.2 opus2score terminates unended notes at the end of the track
# 20110124 5.1 the warnings in midi2opus display track_num
# 21110122 5.0 if garbage, midi2opus returns the opus so far
# 21110119 4.9 non-ascii chars stripped out of the text_events
# 21110110 4.8 note_on with velocity=0 treated as a note-off
# 21110108 4.6 unknown F-series event correctly eats just one byte
# 21011010 4.2 segment() uses start_time, end_time named params
# 21011005 4.1 timeshift() must not pad the set_tempo command
# 21011003 4.0 pitch2note_event must be chapitch2note_event
# 21010918 3.9 set_sequence_number supported, FWIW
# 20100913 3.7 many small bugfixes; passes all tests
# 20100910 3.6 concatenate_scores enforce ticks=1000, just like merge_scores
# 20100908 3.5 minor bugs fixed in score2stats
# 20091104 3.4 tune_request now supported
# 20091104 3.3 fixed bug in decoding song_position and song_select
# 20091104 3.2 unsupported: set_sequence_number tune_request raw_data
# 20091101 3.1 document how to traverse a score within Python
# 20091021 3.0 fixed bug in score2stats detecting GM-mode = 0
# 20091020 2.9 score2stats reports GM-mode and bank msb,lsb events
# 20091019 2.8 in merge_scores, channel 9 must remain channel 9 (in GM)
# 20091018 2.7 handles empty tracks gracefully
# 20091015 2.6 grep() selects channels
# 20091010 2.5 merge_scores reassigns channels to avoid conflicts
# 20091010 2.4 fixed bug in to_millisecs which now only does opusses
# 20091010 2.3 score2stats returns channels & patch_changes, by_track & total
# 20091010 2.2 score2stats() returns also pitches and percussion dicts
# 20091010 2.1 bugs: >= not > in segment, to notice patch_change at time 0
# 20091010 2.0 bugs: spurious pop(0) ( in _decode sysex
# 20091008 1.9 bugs: ISO decoding in sysex; str( not int( in note-off warning
# 20091008 1.8 add concatenate_scores()
# 20091006 1.7 score2stats() measures nticks and ticks_per_quarter
# 20091004 1.6 first mix_scores() and merge_scores()
# 20090424 1.5 timeshift() bugfix: earliest only sees events after from_time
# 20090330 1.4 timeshift() has also a from_time argument
# 20090322 1.3 timeshift() has also a start_time argument
# 20090319 1.2 add segment() and timeshift()
# 20090301 1.1 add to_millisecs()
_previous_warning = '' # 5.4
_previous_times = 0 # 5.4
#------------------------------- Encoding stuff --------------------------
def opus2midi(opus=[]):
r'''The argument is a list: the first item in the list is the "ticks"
parameter, the others are the tracks. Each track is a list
of midi-events, and each event is itself a list; see above.
opus2midi() returns a bytestring of the MIDI, which can then be
written either to a file opened in binary mode (mode='wb'),
or to stdout by means of: sys.stdout.buffer.write()
my_opus = [
96,
[ # track 0:
['patch_change', 0, 1, 8], # and these are the events...
['note_on', 5, 1, 25, 96],
['note_off', 96, 1, 25, 0],
['note_on', 0, 1, 29, 96],
['note_off', 96, 1, 29, 0],
], # end of track 0
]
my_midi = opus2midi(my_opus)
sys.stdout.buffer.write(my_midi)
'''
if len(opus) < 2:
opus=[1000, [],]
tracks = copy.deepcopy(opus)
ticks = int(tracks.pop(0))
ntracks = len(tracks)
if ntracks == 1:
format = 0
else:
format = 1
my_midi = b"MThd\x00\x00\x00\x06"+struct.pack('>HHH',format,ntracks,ticks)
for track in tracks:
events = _encode(track)
my_midi += b'MTrk' + struct.pack('>I',len(events)) + events
_clean_up_warnings()
return my_midi
def score2opus(score=None):
r'''
The argument is a list: the first item in the list is the "ticks"
parameter, the others are the tracks. Each track is a list
of score-events, and each event is itself a list. A score-event
is similar to an opus-event (see above), except that in a score:
1) the times are expressed as an absolute number of ticks
from the track's start time
2) the pairs of 'note_on' and 'note_off' events in an "opus"
are abstracted into a single 'note' event in a "score":
['note', start_time, duration, channel, pitch, velocity]
score2opus() returns a list specifying the equivalent "opus".
my_score = [
96,
[ # track 0:
['patch_change', 0, 1, 8],
['note', 5, 96, 1, 25, 96],
['note', 101, 96, 1, 29, 96]
], # end of track 0
]
my_opus = score2opus(my_score)
'''
if len(score) < 2:
score=[1000, [],]
tracks = copy.deepcopy(score)
ticks = int(tracks.pop(0))
opus_tracks = []
for scoretrack in tracks:
time2events = dict([])
for scoreevent in scoretrack:
if scoreevent[0] == 'note':
note_on_event = ['note_on',scoreevent[1],
scoreevent[3],scoreevent[4],scoreevent[5]]
note_off_event = ['note_off',scoreevent[1]+scoreevent[2],
scoreevent[3],scoreevent[4],scoreevent[5]]
if time2events.get(note_on_event[1]):
time2events[note_on_event[1]].append(note_on_event)
else:
time2events[note_on_event[1]] = [note_on_event,]
if time2events.get(note_off_event[1]):
time2events[note_off_event[1]].append(note_off_event)
else:
time2events[note_off_event[1]] = [note_off_event,]
continue
if time2events.get(scoreevent[1]):
time2events[scoreevent[1]].append(scoreevent)
else:
time2events[scoreevent[1]] = [scoreevent,]
sorted_times = [] # list of keys
for k in time2events.keys():
sorted_times.append(k)
sorted_times.sort()
sorted_events = [] # once-flattened list of values sorted by key
for time in sorted_times:
sorted_events.extend(time2events[time])
abs_time = 0
for event in sorted_events: # convert abs times => delta times
delta_time = event[1] - abs_time
abs_time = event[1]
event[1] = delta_time
opus_tracks.append(sorted_events)
opus_tracks.insert(0,ticks)
_clean_up_warnings()
return opus_tracks
def score2midi(score=None):
r'''
Translates a "score" into MIDI, using score2opus() then opus2midi()
'''
return opus2midi(score2opus(score))
#--------------------------- Decoding stuff ------------------------
def midi2opus(midi=b''):
r'''Translates MIDI into a "opus". For a description of the
"opus" format, see opus2midi()
'''
my_midi=bytearray(midi)
if len(my_midi) < 4:
_clean_up_warnings()
return [1000,[],]
id = bytes(my_midi[0:4])
if id != b'MThd':
_warn("midi2opus: midi starts with "+str(id)+" instead of 'MThd'")
_clean_up_warnings()
return [1000,[],]
[length, format, tracks_expected, ticks] = struct.unpack(
'>IHHH', bytes(my_midi[4:14]))
if length != 6:
_warn("midi2opus: midi header length was "+str(length)+" instead of 6")
_clean_up_warnings()
return [1000,[],]
my_opus = [ticks,]
my_midi = my_midi[14:]
track_num = 1 # 5.1
while len(my_midi) >= 8:
track_type = bytes(my_midi[0:4])
if track_type != b'MTrk':
_warn('midi2opus: Warning: track #'+str(track_num)+' type is '+str(track_type)+" instead of b'MTrk'")
[track_length] = struct.unpack('>I', my_midi[4:8])
my_midi = my_midi[8:]
if track_length > len(my_midi):
_warn('midi2opus: track #'+str(track_num)+' length '+str(track_length)+' is too large')
_clean_up_warnings()
return my_opus # 5.0
my_midi_track = my_midi[0:track_length]
my_track = _decode(my_midi_track)
my_opus.append(my_track)
my_midi = my_midi[track_length:]
track_num += 1 # 5.1
_clean_up_warnings()
return my_opus
def opus2score(opus=[]):
r'''For a description of the "opus" and "score" formats,
see opus2midi() and score2opus().
'''
if len(opus) < 2:
_clean_up_warnings()
return [1000,[],]
tracks = copy.deepcopy(opus) # couple of slices probably quicker...
ticks = int(tracks.pop(0))
score = [ticks,]
for opus_track in tracks:
ticks_so_far = 0
score_track = []
chapitch2note_on_events = dict([]) # 4.0
for opus_event in opus_track:
ticks_so_far += opus_event[1]
if opus_event[0] == 'note_off' or (opus_event[0] == 'note_on' and opus_event[4] == 0): # 4.8
cha = opus_event[2]
pitch = opus_event[3]
key = cha*128 + pitch
if chapitch2note_on_events.get(key):
new_event = chapitch2note_on_events[key].pop(0)
new_event[2] = ticks_so_far - new_event[1]
score_track.append(new_event)
elif pitch > 127:
_warn('opus2score: note_off with no note_on, bad pitch='+str(pitch))
else:
_warn('opus2score: note_off with no note_on cha='+str(cha)+' pitch='+str(pitch))
elif opus_event[0] == 'note_on':
cha = opus_event[2]
pitch = opus_event[3]
key = cha*128 + pitch
new_event = ['note',ticks_so_far,0,cha,pitch, opus_event[4]]
if chapitch2note_on_events.get(key):
chapitch2note_on_events[key].append(new_event)
else:
chapitch2note_on_events[key] = [new_event,]
else:
opus_event[1] = ticks_so_far
score_track.append(opus_event)
# check for unterminated notes (OisÃn) -- 5.2
for chapitch in chapitch2note_on_events:
note_on_events = chapitch2note_on_events[chapitch]
for new_e in note_on_events:
new_e[2] = ticks_so_far - new_e[1]
score_track.append(new_e)
_warn("opus2score: note_on with no note_off cha="+str(new_e[3])+' pitch='+str(new_e[4])+'; adding note_off at end')
score.append(score_track)
_clean_up_warnings()
return score
def midi2score(midi=b''):
r'''
Translates MIDI into a "score", using midi2opus() then opus2score()
'''
return opus2score(midi2opus(midi))
def midi2ms_score(midi=b''):
r'''
Translates MIDI into a "score" with one beat per second and one
tick per millisecond, using midi2opus() then to_millisecs()
then opus2score()
'''
return opus2score(to_millisecs(midi2opus(midi)))
#------------------------ Other Transformations ---------------------
def to_millisecs(old_opus=None):
r'''Recallibrates all the times in an "opus" to use one beat
per second and one tick per millisecond. This makes it
hard to retrieve any information about beats or barlines,
but it does make it easy to mix different scores together.
'''
if old_opus == None:
return [1000,[],]
try:
old_tpq = int(old_opus[0])
except IndexError: # 5.0
_warn('to_millisecs: the opus '+str(type(old_opus))+' has no elements')
return [1000,[],]
new_opus = [1000,]
millisec_per_old_tick = 1000.0 / old_tpq # float: will be rounded later
itrack = 1
while itrack < len(old_opus):
millisec_so_far = 0.0
previous_millisec_so_far = 0.0
new_track = [['set_tempo',0,1000000],] # new "crochet" is 1 sec
for old_event in old_opus[itrack]:
if old_event[0] == 'note':
raise TypeError('to_millisecs needs an opus, not a score')
new_event = copy.deepcopy(old_event)
millisec_so_far += (millisec_per_old_tick * old_event[1])
new_event[1] = round(millisec_so_far - previous_millisec_so_far)
if old_event[0] == 'set_tempo':
millisec_per_old_tick = old_event[2] / (1000.0 * old_tpq)
else:
previous_millisec_so_far = millisec_so_far
new_track.append(new_event)
new_opus.append(new_track)
itrack += 1
_clean_up_warnings()
return new_opus
def event2alsaseq(event=None): # 5.5
r'''Converts an event into the format needed by the alsaseq module,
http://pp.com.mx/python/alsaseq
The type of track (opus or score) is autodetected.
'''
pass
def grep(score=None, channels=None):
r'''Returns a "score" containing only the channels specified
'''
if score == None:
return [1000,[],]
ticks = score[0]
new_score = [ticks,]
if channels == None:
return new_score
channels = set(channels)
global Event2channelindex
itrack = 1
while itrack < len(score):
new_score.append([])
for event in score[itrack]:
channel_index = Event2channelindex.get(event[0], False)
if channel_index:
if event[channel_index] in channels:
new_score[itrack].append(event)
else:
new_score[itrack].append(event)
itrack += 1
return new_score
def play_score(score=None):
r'''Converts the "score" to midi, and feeds it into 'aplaymidi -'
'''
if score == None:
return
import subprocess
pipe = subprocess.Popen(['aplaymidi','-'], stdin=subprocess.PIPE)
if score_type(score) == 'opus':
pipe.stdin.write(opus2midi(score))
else:
pipe.stdin.write(score2midi(score))
pipe.stdin.close()
def timeshift(score=None, shift=None, start_time=None, from_time=0, tracks={0,1,2,3,4,5,6,7,8,10,12,13,14,15}):
r'''Returns a "score" shifted in time by "shift" ticks, or shifted
so that the first event starts at "start_time" ticks.
If "from_time" is specified, only those events in the score
that begin after it are shifted. If "start_time" is less than
"from_time" (or "shift" is negative), then the intermediate
notes are deleted, though patch-change events are preserved.
If "tracks" are specified, then only those tracks get shifted.
"tracks" can be a list, tuple or set; it gets converted to set
internally.
It is deprecated to specify both "shift" and "start_time".
If this does happen, timeshift() will print a warning to
stderr and ignore the "shift" argument.
If "shift" is negative and sufficiently large that it would
leave some event with a negative tick-value, then the score
is shifted so that the first event occurs at time 0. This
also occurs if "start_time" is negative, and is also the
default if neither "shift" nor "start_time" are specified.
'''
#_warn('tracks='+str(tracks))
if score == None or len(score) < 2:
return [1000, [],]
new_score = [score[0],]
my_type = score_type(score)
if my_type == '':
return new_score
if my_type == 'opus':
_warn("timeshift: opus format is not supported\n")
# _clean_up_scores() 6.2; doesn't exist! what was it supposed to do?
return new_score
if not (shift == None) and not (start_time == None):
_warn("timeshift: shift and start_time specified: ignoring shift\n")
shift = None
if shift == None:
if (start_time == None) or (start_time < 0):
start_time = 0
# shift = start_time - from_time
i = 1 # ignore first element (ticks)
tracks = set(tracks) # defend against tuples and lists
earliest = 1000000000
if not (start_time == None) or shift < 0: # first find the earliest event
while i < len(score):
if len(tracks) and not ((i-1) in tracks):
i += 1
continue
for event in score[i]:
if event[1] < from_time:
continue # just inspect the to_be_shifted events
if event[1] < earliest:
earliest = event[1]
i += 1
if earliest > 999999999:
earliest = 0
if shift == None:
shift = start_time - earliest
elif (earliest + shift) < 0:
start_time = 0
shift = 0 - earliest
i = 1 # ignore first element (ticks)
while i < len(score):
if len(tracks) == 0 or not ((i-1) in tracks): # 3.8
new_score.append(score[i])
i += 1
continue
new_track = []
for event in score[i]:
new_event = list(event)
#if new_event[1] == 0 and shift > 0 and new_event[0] != 'note':
# pass
#elif new_event[1] >= from_time:
if new_event[1] >= from_time:
# 4.1 must not rightshift set_tempo
if new_event[0] != 'set_tempo' or shift<0:
new_event[1] += shift
elif (shift < 0) and (new_event[1] >= (from_time+shift)):
continue
new_track.append(new_event)
if len(new_track) > 0:
new_score.append(new_track)
i += 1
_clean_up_warnings()
return new_score
def segment(score=None, start_time=None, end_time=None, start=0, end=100000000,
tracks={0,1,2,3,4,5,6,7,8,10,11,12,13,14,15}):
r'''Returns a "score" which is a segment of the one supplied
as the argument, beginning at "start_time" ticks and ending
at "end_time" ticks (or at the end if "end_time" is not supplied).
If the set "tracks" is specified, only those tracks will
be returned.
'''
if score == None or len(score) < 2:
return [1000, [],]
if start_time == None: # as of 4.2 start_time is recommended
start_time = start # start is legacy usage
if end_time == None: # likewise
end_time = end
new_score = [score[0],]
my_type = score_type(score)
if my_type == '':
return new_score
if my_type == 'opus':
# more difficult (disconnecting note_on's from their note_off's)...
_warn("segment: opus format is not supported\n")
_clean_up_warnings()
return new_score
i = 1 # ignore first element (ticks); we count in ticks anyway
tracks = set(tracks) # defend against tuples and lists
while i < len(score):
if len(tracks) and not ((i-1) in tracks):
i += 1
continue
new_track = []
channel2patch_num = {} # keep most recent patch change before start
channel2patch_time = {}
set_tempo_num = 1000000 # keep most recent tempo change before start
set_tempo_time = 0
earliest_note_time = end_time
for event in score[i]:
if event[0] == 'patch_change':
patch_time = channel2patch_time.get(event[2]) or 0
if (event[1] < start_time) and (event[1] >= patch_time): # 2.0
channel2patch_num[event[2]] = event[3]
channel2patch_time[event[2]] = event[1]
if event[0] == 'set_tempo':
if (event[1] < start_time) and (event[1] >= set_tempo_time):
set_tempo_num = event[2]
set_tempo_time = event[1]
if (event[1] >= start_time) and (event[1] <= end_time):
new_track.append(event)
if (event[0] == 'note') and (event[1] < earliest_note_time):
earliest_note_time = event[1]
if len(new_track) > 0:
for c in channel2patch_num:
new_track.append(['patch_change',start_time,c,channel2patch_num[c]])
new_track.append(['set_tempo', start_time, set_tempo_num])
new_score.append(new_track)
i += 1
_clean_up_warnings()
return new_score
def score_type(opus_or_score=None):
r'''Returns a string, either 'opus' or 'score' or ''
'''
if opus_or_score == None or str(type(opus_or_score)).find('list')<0 or len(opus_or_score) < 2:
return ''
i = 1 # ignore first element
while i < len(opus_or_score):
for event in opus_or_score[i]:
if event[0] == 'note':
return 'score'
elif event[0] == 'note_on':
return 'opus'
i += 1
return ''
def concatenate_scores(scores):
r'''Concatenates a list of scores into one score.
If the scores differ in their "ticks" parameter,
they will all get converted to millisecond-tick format.
'''
# the deepcopys are needed if the input_score's are refs to the same obj
# e.g. if invoked by midisox's repeat()
input_scores = _consistentise_ticks(scores) # 3.7
output_score = copy.deepcopy(input_scores[0])
for input_score in input_scores[1:]:
output_stats = score2stats(output_score)
delta_ticks = output_stats['nticks']
itrack = 1
while itrack < len(input_score):
if itrack >= len(output_score): # new output track if doesn't exist
output_score.append([])
for event in input_score[itrack]:
output_score[itrack].append(copy.deepcopy(event))
output_score[itrack][-1][1] += delta_ticks
itrack += 1
return output_score
def merge_scores(scores):
r'''Merges a list of scores into one score. A merged score comprises
all of the tracks from all of the input scores; un-merging is possible
by selecting just some of the tracks. If the scores differ in their
"ticks" parameter, they will all get converted to millisecond-tick
format. merge_scores attempts to resolve channel-conflicts,
but there are of course only 15 available channels...
'''
input_scores = _consistentise_ticks(scores) # 3.6
output_score = [1000]
channels_so_far = set()
all_channels = {0,1,2,3,4,5,6,7,8,10,11,12,13,14,15}
global Event2channelindex
for input_score in input_scores:
new_channels = set(score2stats(input_score).get('channels_total', []))
new_channels.discard(9) # 2.8 cha9 must remain cha9 (in GM)
for channel in channels_so_far & new_channels:
# consistently choose lowest avaiable, to ease testing
free_channels = list(all_channels - (channels_so_far|new_channels))
if len(free_channels) > 0:
free_channels.sort()
free_channel = free_channels[0]
else:
free_channel = None
break
itrack = 1
while itrack < len(input_score):
for input_event in input_score[itrack]:
channel_index=Event2channelindex.get(input_event[0],False)
if channel_index and input_event[channel_index]==channel:
input_event[channel_index] = free_channel
itrack += 1
channels_so_far.add(free_channel)
channels_so_far |= new_channels
output_score.extend(input_score[1:])
return output_score
def _ticks(event):
return event[1]
def mix_opus_tracks(input_tracks): # 5.5
r'''Mixes an array of tracks into one track. A mixed track
cannot be un-mixed. It is assumed that the tracks share the same
ticks parameter and the same tempo.
Mixing score-tracks is trivial (just insert all events into one array).
Mixing opus-tracks is only slightly harder, but it's common enough
that a dedicated function is useful.
'''
output_score = [1000, []]
for input_track in input_tracks: # 5.8
input_score = opus2score([1000, input_track])
for event in input_score[1]:
output_score[1].append(event)
output_score[1].sort(key=_ticks)
output_opus = score2opus(output_score)
return output_opus[1]
def mix_scores(scores):
r'''Mixes a list of scores into one one-track score.
A mixed score cannot be un-mixed. Hopefully the scores
have no undesirable channel-conflicts between them.
If the scores differ in their "ticks" parameter,
they will all get converted to millisecond-tick format.
'''
input_scores = _consistentise_ticks(scores) # 3.6
output_score = [1000, []]
for input_score in input_scores:
for input_track in input_score[1:]:
output_score[1].extend(input_track)
return output_score
def score2stats(opus_or_score=None):
r'''Returns a dict of some basic stats about the score, like
bank_select (list of tuples (msb,lsb)),
channels_by_track (list of lists), channels_total (set),
general_midi_mode (list),
ntracks, nticks, patch_changes_by_track (list of dicts),
num_notes_by_channel (list of numbers),
patch_changes_total (set),
percussion (dict histogram of channel 9 events),
pitches (dict histogram of pitches on channels other than 9),
pitch_range_by_track (list, by track, of two-member-tuples),
pitch_range_sum (sum over tracks of the pitch_ranges),
'''
bank_select_msb = -1
bank_select_lsb = -1
bank_select = []
channels_by_track = []
channels_total = set([])
general_midi_mode = []
num_notes_by_channel = dict([])
patches_used_by_track = []
patches_used_total = set([])
patch_changes_by_track = []
patch_changes_total = set([])
percussion = dict([]) # histogram of channel 9 "pitches"
pitches = dict([]) # histogram of pitch-occurrences channels 0-8,10-15
pitch_range_sum = 0 # u pitch-ranges of each track
pitch_range_by_track = []
is_a_score = True
if opus_or_score == None:
return {'bank_select':[], 'channels_by_track':[], 'channels_total':[],
'general_midi_mode':[], 'ntracks':0, 'nticks':0,
'num_notes_by_channel':dict([]),
'patch_changes_by_track':[], 'patch_changes_total':[],
'percussion':{}, 'pitches':{}, 'pitch_range_by_track':[],
'ticks_per_quarter':0, 'pitch_range_sum':0}
ticks_per_quarter = opus_or_score[0]
i = 1 # ignore first element, which is ticks
nticks = 0
while i < len(opus_or_score):
highest_pitch = 0
lowest_pitch = 128
channels_this_track = set([])
patch_changes_this_track = dict({})
for event in opus_or_score[i]:
if event[0] == 'note':
num_notes_by_channel[event[3]] = num_notes_by_channel.get(event[3],0) + 1
if event[3] == 9:
percussion[event[4]] = percussion.get(event[4],0) + 1
else:
pitches[event[4]] = pitches.get(event[4],0) + 1
if event[4] > highest_pitch:
highest_pitch = event[4]
if event[4] < lowest_pitch:
lowest_pitch = event[4]
channels_this_track.add(event[3])
channels_total.add(event[3])
finish_time = event[1] + event[2]
if finish_time > nticks:
nticks = finish_time
elif event[0] == 'note_off' or (event[0] == 'note_on' and event[4] == 0): # 4.8
finish_time = event[1]
if finish_time > nticks:
nticks = finish_time
elif event[0] == 'note_on':
is_a_score = False
num_notes_by_channel[event[2]] = num_notes_by_channel.get(event[2],0) + 1
if event[2] == 9:
percussion[event[3]] = percussion.get(event[3],0) + 1
else:
pitches[event[3]] = pitches.get(event[3],0) + 1
if event[3] > highest_pitch:
highest_pitch = event[3]
if event[3] < lowest_pitch:
lowest_pitch = event[3]
channels_this_track.add(event[2])
channels_total.add(event[2])
elif event[0] == 'patch_change':
patch_changes_this_track[event[2]] = event[3]
patch_changes_total.add(event[3])
elif event[0] == 'control_change':
if event[3] == 0: # bank select MSB
bank_select_msb = event[4]
elif event[3] == 32: # bank select LSB
bank_select_lsb = event[4]
if bank_select_msb >= 0 and bank_select_lsb >= 0:
bank_select.append((bank_select_msb,bank_select_lsb))
bank_select_msb = -1
bank_select_lsb = -1
elif event[0] == 'sysex_f0':
if _sysex2midimode.get(event[2], -1) >= 0:
general_midi_mode.append(_sysex2midimode.get(event[2]))
if is_a_score:
if event[1] > nticks:
nticks = event[1]
else:
nticks += event[1]
if lowest_pitch == 128:
lowest_pitch = 0
channels_by_track.append(channels_this_track)
patch_changes_by_track.append(patch_changes_this_track)
pitch_range_by_track.append((lowest_pitch,highest_pitch))
pitch_range_sum += (highest_pitch-lowest_pitch)
i += 1
return {'bank_select':bank_select,
'channels_by_track':channels_by_track,
'channels_total':channels_total,
'general_midi_mode':general_midi_mode,
'ntracks':len(opus_or_score)-1,
'nticks':nticks,
'num_notes_by_channel':num_notes_by_channel,
'patch_changes_by_track':patch_changes_by_track,
'patch_changes_total':patch_changes_total,
'percussion':percussion,
'pitches':pitches,
'pitch_range_by_track':pitch_range_by_track,
'pitch_range_sum':pitch_range_sum,
'ticks_per_quarter':ticks_per_quarter}
#----------------------------- Event stuff --------------------------
_sysex2midimode = {
"\x7E\x7F\x09\x01\xF7": 1,
"\x7E\x7F\x09\x02\xF7": 0,
"\x7E\x7F\x09\x03\xF7": 2,
}
# Some public-access tuples:
MIDI_events = tuple('''note_off note_on key_after_touch
control_change patch_change channel_after_touch
pitch_wheel_change'''.split())
Text_events = tuple('''text_event copyright_text_event
track_name instrument_name lyric marker cue_point text_event_08
text_event_09 text_event_0a text_event_0b text_event_0c
text_event_0d text_event_0e text_event_0f'''.split())
Nontext_meta_events = tuple('''end_track set_tempo
smpte_offset time_signature key_signature sequencer_specific
raw_meta_event sysex_f0 sysex_f7 song_position song_select
tune_request'''.split())
# unsupported: raw_data
# Actually, 'tune_request' is is F-series event, not strictly a meta-event...
Meta_events = Text_events + Nontext_meta_events
All_events = MIDI_events + Meta_events
# And three dictionaries:
Number2patch = { # General MIDI patch numbers:
0:'Acoustic Grand',
1:'Bright Acoustic',
2:'Electric Grand',
3:'Honky-Tonk',
4:'Electric Piano 1',
5:'Electric Piano 2',
6:'Harpsichord',
7:'Clav',
8:'Celesta',
9:'Glockenspiel',
10:'Music Box',
11:'Vibraphone',
12:'Marimba',
13:'Xylophone',
14:'Tubular Bells',
15:'Dulcimer',
16:'Drawbar Organ',
17:'Percussive Organ',
18:'Rock Organ',
19:'Church Organ',
20:'Reed Organ',
21:'Accordion',
22:'Harmonica',
23:'Tango Accordion',
24:'Acoustic Guitar(nylon)',
25:'Acoustic Guitar(steel)',
26:'Electric Guitar(jazz)',
27:'Electric Guitar(clean)',
28:'Electric Guitar(muted)',
29:'Overdriven Guitar',
30:'Distortion Guitar',
31:'Guitar Harmonics',
32:'Acoustic Bass',
33:'Electric Bass(finger)',
34:'Electric Bass(pick)',
35:'Fretless Bass',
36:'Slap Bass 1',
37:'Slap Bass 2',
38:'Synth Bass 1',
39:'Synth Bass 2',
40:'Violin',
41:'Viola',
42:'Cello',
43:'Contrabass',
44:'Tremolo Strings',
45:'Pizzicato Strings',
46:'Orchestral Harp',
47:'Timpani',
48:'String Ensemble 1',
49:'String Ensemble 2',
50:'SynthStrings 1',
51:'SynthStrings 2',
52:'Choir Aahs',
53:'Voice Oohs',
54:'Synth Voice',
55:'Orchestra Hit',
56:'Trumpet',
57:'Trombone',
58:'Tuba',
59:'Muted Trumpet',
60:'French Horn',
61:'Brass Section',
62:'SynthBrass 1',
63:'SynthBrass 2',
64:'Soprano Sax',
65:'Alto Sax',
66:'Tenor Sax',
67:'Baritone Sax',
68:'Oboe',
69:'English Horn',
70:'Bassoon',
71:'Clarinet',
72:'Piccolo',
73:'Flute',
74:'Recorder',
75:'Pan Flute',
76:'Blown Bottle',
77:'Skakuhachi',
78:'Whistle',
79:'Ocarina',
80:'Lead 1 (square)',
81:'Lead 2 (sawtooth)',
82:'Lead 3 (calliope)',
83:'Lead 4 (chiff)',
84:'Lead 5 (charang)',
85:'Lead 6 (voice)',
86:'Lead 7 (fifths)',
87:'Lead 8 (bass+lead)',
88:'Pad 1 (new age)',
89:'Pad 2 (warm)',
90:'Pad 3 (polysynth)',
91:'Pad 4 (choir)',
92:'Pad 5 (bowed)',
93:'Pad 6 (metallic)',
94:'Pad 7 (halo)',
95:'Pad 8 (sweep)',
96:'FX 1 (rain)',
97:'FX 2 (soundtrack)',
98:'FX 3 (crystal)',
99:'FX 4 (atmosphere)',
100:'FX 5 (brightness)',
101:'FX 6 (goblins)',
102:'FX 7 (echoes)',
103:'FX 8 (sci-fi)',
104:'Sitar',
105:'Banjo',
106:'Shamisen',
107:'Koto',
108:'Kalimba',
109:'Bagpipe',
110:'Fiddle',
111:'Shanai',
112:'Tinkle Bell',
113:'Agogo',
114:'Steel Drums',
115:'Woodblock',
116:'Taiko Drum',
117:'Melodic Tom',
118:'Synth Drum',
119:'Reverse Cymbal',
120:'Guitar Fret Noise',
121:'Breath Noise',
122:'Seashore',
123:'Bird Tweet',
124:'Telephone Ring',
125:'Helicopter',
126:'Applause',
127:'Gunshot',
}
Notenum2percussion = { # General MIDI Percussion (on Channel 9):
35:'Acoustic Bass Drum',
36:'Bass Drum 1',
37:'Side Stick',
38:'Acoustic Snare',
39:'Hand Clap',
40:'Electric Snare',
41:'Low Floor Tom',
42:'Closed Hi-Hat',
43:'High Floor Tom',
44:'Pedal Hi-Hat',
45:'Low Tom',
46:'Open Hi-Hat',
47:'Low-Mid Tom',
48:'Hi-Mid Tom',
49:'Crash Cymbal 1',
50:'High Tom',
51:'Ride Cymbal 1',
52:'Chinese Cymbal',
53:'Ride Bell',
54:'Tambourine',
55:'Splash Cymbal',
56:'Cowbell',
57:'Crash Cymbal 2',
58:'Vibraslap',
59:'Ride Cymbal 2',
60:'Hi Bongo',
61:'Low Bongo',
62:'Mute Hi Conga',
63:'Open Hi Conga',
64:'Low Conga',
65:'High Timbale',
66:'Low Timbale',
67:'High Agogo',
68:'Low Agogo',
69:'Cabasa',
70:'Maracas',
71:'Short Whistle',
72:'Long Whistle',
73:'Short Guiro',
74:'Long Guiro',
75:'Claves',
76:'Hi Wood Block',
77:'Low Wood Block',
78:'Mute Cuica',
79:'Open Cuica',
80:'Mute Triangle',
81:'Open Triangle',
}
Event2channelindex = { 'note':3, 'note_off':2, 'note_on':2,
'key_after_touch':2, 'control_change':2, 'patch_change':2,
'channel_after_touch':2, 'pitch_wheel_change':2
}
################################################################
# The code below this line is full of frightening things, all to
# do with the actual encoding and decoding of binary MIDI data.
def _twobytes2int(byte_a):
r'''decode a 16 bit quantity from two bytes,'''
return (byte_a[1] | (byte_a[0] << 8))
def _int2twobytes(int_16bit):
r'''encode a 16 bit quantity into two bytes,'''
return bytes([(int_16bit>>8) & 0xFF, int_16bit & 0xFF])
def _read_14_bit(byte_a):
r'''decode a 14 bit quantity from two bytes,'''
return (byte_a[0] | (byte_a[1] << 7))
def _write_14_bit(int_14bit):
r'''encode a 14 bit quantity into two bytes,'''
return bytes([int_14bit & 0x7F, (int_14bit>>7) & 0x7F])
def _ber_compressed_int(integer):
r'''BER compressed integer (not an ASN.1 BER, see perlpacktut for
details). Its bytes represent an unsigned integer in base 128,
most significant digit first, with as few digits as possible.
Bit eight (the high bit) is set on each byte except the last.
'''
ber = bytearray(b'')
seven_bits = 0x7F & integer
ber.insert(0, seven_bits) # XXX surely should convert to a char ?
integer >>= 7
while integer > 0:
seven_bits = 0x7F & integer
ber.insert(0, 0x80|seven_bits) # XXX surely should convert to a char ?
integer >>= 7
return ber
def _unshift_ber_int(ba):
r'''Given a bytearray, returns a tuple of (the ber-integer at the
start, and the remainder of the bytearray).
'''
byte = ba.pop(0)
integer = 0
while True:
integer += (byte & 0x7F)
if not (byte & 0x80):
return ((integer, ba))
if not len(ba):
_warn('_unshift_ber_int: no end-of-integer found')
return ((0, ba))
byte = ba.pop(0)
integer <<= 7
def _clean_up_warnings(): # 5.4
# Call this before returning from any publicly callable function
# whenever there's a possibility that a warning might have been printed
# by the function, or by any private functions it might have called.
global _previous_times
global _previous_warning
if _previous_times > 1:
print(' previous message repeated '+str(_previous_times)+' times', file=sys.stderr)
elif _previous_times > 0:
print(' previous message repeated', file=sys.stderr)
_previous_times = 0
_previous_warning = ''
def _warn(s=''):
global _previous_times
global _previous_warning
if s == _previous_warning: # 5.4
_previous_times = _previous_times + 1
else:
_clean_up_warnings()
print(str(s), file=sys.stderr)
_previous_warning = s
def _some_text_event(which_kind=0x01, text='some_text'):
# if which_kind == 0x7F: # 6.1 sequencer_specific data can be 8-bit
data = bytes(text, encoding='ISO-8859-1') # 6.2 and also text data!
# else: data = bytes(text, encoding='ascii')
return b'\xFF'+bytes((which_kind,))+_ber_compressed_int(len(data))+data
def _consistentise_ticks(scores): # 3.6
# used by mix_scores, merge_scores, concatenate_scores
if len(scores) == 1:
return copy.deepcopy(scores)
are_consistent = True
ticks = scores[0][0]
iscore = 1
while iscore < len(scores):
if scores[iscore][0] != ticks:
are_consistent = False
break
iscore += 1
if are_consistent:
return copy.deepcopy(scores)
new_scores = []
iscore = 0
while iscore < len(scores):
score = scores[iscore]
new_scores.append(opus2score(to_millisecs(score2opus(score))))
iscore += 1
return new_scores
###########################################################################
def _decode(trackdata=b'', exclude=None, include=None,
event_callback=None, exclusive_event_callback=None, no_eot_magic=False):
r'''Decodes MIDI track data into an opus-style list of events.
The options:
'exclude' is a list of event types which will be ignored SHOULD BE A SET
'include' (and no exclude), makes exclude a list
of all possible events, /minus/ what include specifies
'event_callback' is a coderef
'exclusive_event_callback' is a coderef
'''
trackdata = bytearray(trackdata)
if exclude == None:
exclude = []
if include == None:
include = []
if include and not exclude:
exclude = All_events
include = set(include)
exclude = set(exclude)
# Pointer = 0; not used here; we eat through the bytearray instead.
event_code = -1; # used for running status
event_count = 0;
events = []
while(len(trackdata)):
# loop while there's anything to analyze ...
eot = False # When True, the event registrar aborts this loop
event_count += 1
E = []
# E for events - we'll feed it to the event registrar at the end.
# Slice off the delta time code, and analyze it
[time, remainder] = _unshift_ber_int(trackdata)
# Now let's see what we can make of the command
first_byte = trackdata.pop(0) & 0xFF
if (first_byte < 0xF0): # It's a MIDI event
if (first_byte & 0x80):
event_code = first_byte
else:
# It wants running status; use last event_code value
trackdata.insert(0, first_byte)
if (event_code == -1):
_warn("Running status not set; Aborting track.")
return []
command = event_code & 0xF0
channel = event_code & 0x0F
if (command == 0xF6): # 0-byte argument
pass
elif (command == 0xC0 or command == 0xD0): # 1-byte argument
parameter = trackdata.pop(0) # could be B
else: # 2-byte argument could be BB or 14-bit
parameter = (trackdata.pop(0), trackdata.pop(0))
#################################################################
# MIDI events
if (command == 0x80):
if 'note_off' in exclude:
continue
E = ['note_off', time, channel, parameter[0], parameter[1]]
elif (command == 0x90):
if 'note_on' in exclude:
continue
E = ['note_on', time, channel, parameter[0], parameter[1]]
elif (command == 0xA0):
if 'key_after_touch' in exclude:
continue
E = ['key_after_touch',time,channel,parameter[0],parameter[1]]
elif (command == 0xB0):
if 'control_change' in exclude:
continue
E = ['control_change',time,channel,parameter[0],parameter[1]]
elif (command == 0xC0):
if 'patch_change' in exclude:
continue
E = ['patch_change', time, channel, parameter]
elif (command == 0xD0):
if 'channel_after_touch' in exclude:
continue
E = ['channel_after_touch', time, channel, parameter]
elif (command == 0xE0):
if 'pitch_wheel_change' in exclude:
continue
E = ['pitch_wheel_change', time, channel,
_read_14_bit(parameter)-0x2000]
else:
_warn("Shouldn't get here; command="+hex(command))
elif (first_byte == 0xFF): # It's a Meta-Event! ##################
#[command, length, remainder] =
# unpack("xCwa*", substr(trackdata, $Pointer, 6));
#Pointer += 6 - len(remainder);
# # Move past JUST the length-encoded.
command = trackdata.pop(0) & 0xFF
[length, trackdata] = _unshift_ber_int(trackdata)
if (command == 0x00):
if (length == 2):
E = ['set_sequence_number',time,_twobytes2int(trackdata)]
else:
_warn('set_sequence_number: length must be 2, not '+str(length))
E = ['set_sequence_number', time, 0]
elif command >= 0x01 and command <= 0x0f: # Text events
# 6.2 take it in bytes; let the user get the right encoding.
# text_str = trackdata[0:length].decode('ascii','ignore')
text_str = trackdata[0:length].decode('ISO-8859-1')
# Defined text events
if (command == 0x01):
E = ['text_event', time, text_str]
elif (command == 0x02):
E = ['copyright_text_event', time, text_str]
elif (command == 0x03):
E = ['track_name', time, text_str]
elif (command == 0x04):
E = ['instrument_name', time, text_str]
elif (command == 0x05):
E = ['lyric', time, text_str]
elif (command == 0x06):
E = ['marker', time, text_str]
elif (command == 0x07):
E = ['cue_point', time, text_str]
# Reserved but apparently unassigned text events
elif (command == 0x08):
E = ['text_event_08', time, text_str]
elif (command == 0x09):
E = ['text_event_09', time, text_str]
elif (command == 0x0a):
E = ['text_event_0a', time, text_str]
elif (command == 0x0b):
E = ['text_event_0b', time, text_str]
elif (command == 0x0c):
E = ['text_event_0c', time, text_str]
elif (command == 0x0d):
E = ['text_event_0d', time, text_str]
elif (command == 0x0e):
E = ['text_event_0e', time, text_str]
elif (command == 0x0f):
E = ['text_event_0f', time, text_str]
# Now the sticky events -------------------------------------
elif (command == 0x2F):
E = ['end_track', time]
# The code for handling this, oddly, comes LATER,
# in the event registrar.
elif (command == 0x51): # DTime, Microseconds/Crochet
if length != 3:
_warn('set_tempo event, but length='+str(length))
E = ['set_tempo', time,
struct.unpack(">I", b'\x00'+trackdata[0:3])[0]]
elif (command == 0x54):
if length != 5: # DTime, HR, MN, SE, FR, FF
_warn('smpte_offset event, but length='+str(length))
E = ['smpte_offset',time] + list(struct.unpack(">BBBBB",trackdata[0:5]))
elif (command == 0x58):
if length != 4: # DTime, NN, DD, CC, BB
_warn('time_signature event, but length='+str(length))
E = ['time_signature', time]+list(trackdata[0:4])
elif (command == 0x59):
if length != 2: # DTime, SF(signed), MI
_warn('key_signature event, but length='+str(length))
E = ['key_signature',time] + list(struct.unpack(">bB",trackdata[0:2]))
elif (command == 0x7F):
E = ['sequencer_specific',time,
trackdata[0:length].decode('ISO-8859-1')] # 6.0
else:
E = ['raw_meta_event', time, command,
trackdata[0:length].decode('ISO-8859-1')] # 6.0
#"[uninterpretable meta-event command of length length]"
# DTime, Command, Binary Data
# It's uninterpretable; record it as raw_data.
# Pointer += length; # Now move Pointer
trackdata = trackdata[length:]
######################################################################
elif (first_byte == 0xF0 or first_byte == 0xF7):
# Note that sysexes in MIDI /files/ are different than sysexes
# in MIDI transmissions!! The vast majority of system exclusive
# messages will just use the F0 format. For instance, the
# transmitted message F0 43 12 00 07 F7 would be stored in a
# MIDI file as F0 05 43 12 00 07 F7. As mentioned above, it is
# required to include the F7 at the end so that the reader of the
# MIDI file knows that it has read the entire message. (But the F7
# is omitted if this is a non-final block in a multiblock sysex;
# but the F7 (if there) is counted in the message's declared
# length, so we don't have to think about it anyway.)
#command = trackdata.pop(0)
[length, trackdata] = _unshift_ber_int(trackdata)
if first_byte == 0xF0:
# 20091008 added ISO-8859-1 to get an 8-bit str
E = ['sysex_f0', time, trackdata[0:length].decode('ISO-8859-1')]
else:
E = ['sysex_f7', time, trackdata[0:length].decode('ISO-8859-1')]
trackdata = trackdata[length:]
######################################################################
# Now, the MIDI file spec says:
# <track data> = <MTrk event>+
# <MTrk event> = <delta-time> <event>
# <event> = <MIDI event> | <sysex event> | <meta-event>
# I know that, on the wire, <MIDI event> can include note_on,
# note_off, and all the other 8x to Ex events, AND Fx events
# other than F0, F7, and FF -- namely, <song position msg>,
# <song select msg>, and <tune request>.
#
# Whether these can occur in MIDI files is not clear specified
# from the MIDI file spec. So, I'm going to assume that
# they CAN, in practice, occur. I don't know whether it's
# proper for you to actually emit these into a MIDI file.
elif (first_byte == 0xF2): # DTime, Beats
# <song position msg> ::= F2 <data pair>
E = ['song_position', time, _read_14_bit(trackdata[:2])]
trackdata = trackdata[2:]
elif (first_byte == 0xF3): # <song select msg> ::= F3 <data singlet>
# E = ['song_select', time, struct.unpack('>B',trackdata.pop(0))[0]]
E = ['song_select', time, trackdata[0]]
trackdata = trackdata[1:]
# DTime, Thing (what?! song number? whatever ...)
elif (first_byte == 0xF6): # DTime
E = ['tune_request', time]
# What would a tune request be doing in a MIDI /file/?
#########################################################
# ADD MORE META-EVENTS HERE. TODO:
# f1 -- MTC Quarter Frame Message. One data byte follows
# the Status; it's the time code value, from 0 to 127.
# f8 -- MIDI clock. no data.
# fa -- MIDI start. no data.
# fb -- MIDI continue. no data.
# fc -- MIDI stop. no data.
# fe -- Active sense. no data.
# f4 f5 f9 fd -- unallocated
r'''
elif (first_byte > 0xF0) { # Some unknown kinda F-series event ####
# Here we only produce a one-byte piece of raw data.
# But the encoder for 'raw_data' accepts any length of it.
E = [ 'raw_data',
time, substr(trackdata,Pointer,1) ]
# DTime and the Data (in this case, the one Event-byte)
++Pointer; # itself
'''
elif first_byte > 0xF0: # Some unknown F-series event
# Here we only produce a one-byte piece of raw data.
E = ['raw_data', time, trackdata[0].decode('ISO-8859-1')]
trackdata = trackdata[1:]
else: # Fallthru.
_warn("Aborting track. Command-byte first_byte="+hex(first_byte))
break
# End of the big if-group
######################################################################
# THE EVENT REGISTRAR...
if E and (E[0] == 'end_track'):
# This is the code for exceptional handling of the EOT event.
eot = True
if not no_eot_magic:
if E[1] > 0: # a null text-event to carry the delta-time
E = ['text_event', E[1], '']
else:
E = [] # EOT with a delta-time of 0; ignore it.
if E and not (E[0] in exclude):
#if ( $exclusive_event_callback ):
# &{ $exclusive_event_callback }( @E );
#else:
# &{ $event_callback }( @E ) if $event_callback;
events.append(E)
if eot:
break
# End of the big "Event" while-block
return events
###########################################################################
def _encode(events_lol, unknown_callback=None, never_add_eot=False,
no_eot_magic=False, no_running_status=False):
# encode an event structure, presumably for writing to a file
# Calling format:
# $data_r = MIDI::Event::encode( \@event_lol, { options } );
# Takes a REFERENCE to an event structure (a LoL)
# Returns an (unblessed) REFERENCE to track data.
# If you want to use this to encode a /single/ event,
# you still have to do it as a reference to an event structure (a LoL)
# that just happens to have just one event. I.e.,
# encode( [ $event ] ) or encode( [ [ 'note_on', 100, 5, 42, 64] ] )
# If you're doing this, consider the never_add_eot track option, as in
# print MIDI ${ encode( [ $event], { 'never_add_eot' => 1} ) };
data = [] # what I'll store the chunks of byte-data in
# This is so my end_track magic won't corrupt the original
events = copy.deepcopy(events_lol)
if not never_add_eot:
# One way or another, tack on an 'end_track'
if events:
last = events[-1]
if not (last[0] == 'end_track'): # no end_track already
if (last[0] == 'text_event' and len(last[2]) == 0):
# 0-length text event at track-end.
if no_eot_magic:
# Exceptional case: don't mess with track-final
# 0-length text_events; just peg on an end_track
events.append(['end_track', 0])
else:
# NORMAL CASE: replace with an end_track, leaving DTime
last[0] = 'end_track'
else:
# last event was neither 0-length text_event nor end_track
events.append(['end_track', 0])
else: # an eventless track!
events = [['end_track', 0],]
# maybe_running_status = not no_running_status # unused? 4.7
last_status = -1
for event_r in (events):
E = copy.deepcopy(event_r)
# otherwise the shifting'd corrupt the original
if not E:
continue
event = E.pop(0)
if not len(event):
continue
dtime = int(E.pop(0))
# print('event='+str(event)+' dtime='+str(dtime))
event_data = ''
if ( # MIDI events -- eligible for running status
event == 'note_on'
or event == 'note_off'
or event == 'control_change'
or event == 'key_after_touch'
or event == 'patch_change'
or event == 'channel_after_touch'
or event == 'pitch_wheel_change' ):
# This block is where we spend most of the time. Gotta be tight.
if (event == 'note_off'):
status = 0x80 | (int(E[0]) & 0x0F)
parameters = struct.pack('>BB', int(E[1])&0x7F, int(E[2])&0x7F)
elif (event == 'note_on'):
status = 0x90 | (int(E[0]) & 0x0F)
parameters = struct.pack('>BB', int(E[1])&0x7F, int(E[2])&0x7F)
elif (event == 'key_after_touch'):
status = 0xA0 | (int(E[0]) & 0x0F)
parameters = struct.pack('>BB', int(E[1])&0x7F, int(E[2])&0x7F)
elif (event == 'control_change'):
status = 0xB0 | (int(E[0]) & 0x0F)
parameters = struct.pack('>BB', int(E[1])&0xFF, int(E[2])&0xFF)
elif (event == 'patch_change'):
status = 0xC0 | (int(E[0]) & 0x0F)
parameters = struct.pack('>B', int(E[1]) & 0xFF)
elif (event == 'channel_after_touch'):
status = 0xD0 | (int(E[0]) & 0x0F)
parameters = struct.pack('>B', int(E[1]) & 0xFF)
elif (event == 'pitch_wheel_change'):
status = 0xE0 | (int(E[0]) & 0x0F)
parameters = _write_14_bit(int(E[1]) + 0x2000)
else:
_warn("BADASS FREAKOUT ERROR 31415!")
# And now the encoding
# w = BER compressed integer (not ASN.1 BER, see perlpacktut for
# details). Its bytes represent an unsigned integer in base 128,
# most significant digit first, with as few digits as possible.
# Bit eight (the high bit) is set on each byte except the last.
data.append(_ber_compressed_int(dtime))
if (status != last_status) or no_running_status:
data.append(struct.pack('>B', status))
data.append(parameters)
last_status = status
continue
else:
# Not a MIDI event.
# All the code in this block could be more efficient,
# but this is not where the code needs to be tight.
# print "zaz $event\n";
last_status = -1
if event == 'raw_meta_event':
event_data = _some_text_event(int(E[0]), E[1])
elif (event == 'set_sequence_number'): # 3.9
event_data = b'\xFF\x00\x02'+_int2twobytes(E[0])
# Text meta-events...
# a case for a dict, I think (pjb) ...
elif (event == 'text_event'):
event_data = _some_text_event(0x01, E[0])
elif (event == 'copyright_text_event'):
event_data = _some_text_event(0x02, E[0])
elif (event == 'track_name'):
event_data = _some_text_event(0x03, E[0])
elif (event == 'instrument_name'):
event_data = _some_text_event(0x04, E[0])
elif (event == 'lyric'):
event_data = _some_text_event(0x05, E[0])
elif (event == 'marker'):
event_data = _some_text_event(0x06, E[0])
elif (event == 'cue_point'):
event_data = _some_text_event(0x07, E[0])
elif (event == 'text_event_08'):
event_data = _some_text_event(0x08, E[0])
elif (event == 'text_event_09'):
event_data = _some_text_event(0x09, E[0])
elif (event == 'text_event_0a'):
event_data = _some_text_event(0x0A, E[0])
elif (event == 'text_event_0b'):
event_data = _some_text_event(0x0B, E[0])
elif (event == 'text_event_0c'):
event_data = _some_text_event(0x0C, E[0])
elif (event == 'text_event_0d'):
event_data = _some_text_event(0x0D, E[0])
elif (event == 'text_event_0e'):
event_data = _some_text_event(0x0E, E[0])
elif (event == 'text_event_0f'):
event_data = _some_text_event(0x0F, E[0])
# End of text meta-events
elif (event == 'end_track'):
event_data = b"\xFF\x2F\x00"
elif (event == 'set_tempo'):
#event_data = struct.pack(">BBwa*", 0xFF, 0x51, 3,
# substr( struct.pack('>I', E[0]), 1, 3))
event_data = b'\xFF\x51\x03'+struct.pack('>I',E[0])[1:]
elif (event == 'smpte_offset'):
# event_data = struct.pack(">BBwBBBBB", 0xFF, 0x54, 5, E[0:5] )
event_data = struct.pack(">BBBbBBBB", 0xFF,0x54,0x05,E[0],E[1],E[2],E[3],E[4])
elif (event == 'time_signature'):
# event_data = struct.pack(">BBwBBBB", 0xFF, 0x58, 4, E[0:4] )
event_data = struct.pack(">BBBbBBB", 0xFF, 0x58, 0x04, E[0],E[1],E[2],E[3])
elif (event == 'key_signature'):
event_data = struct.pack(">BBBbB", 0xFF, 0x59, 0x02, E[0],E[1])
elif (event == 'sequencer_specific'):
# event_data = struct.pack(">BBwa*", 0xFF,0x7F, len(E[0]), E[0])
event_data = _some_text_event(0x7F, E[0])
# End of Meta-events
# Other Things...
elif (event == 'sysex_f0'):
#event_data = struct.pack(">Bwa*", 0xF0, len(E[0]), E[0])
#B=bitstring w=BER-compressed-integer a=null-padded-ascii-str
event_data = bytearray(b'\xF0')+_ber_compressed_int(len(E[0]))+bytearray(bytes(E[0],encoding='ISO-8859-1'))
elif (event == 'sysex_f7'):
#event_data = struct.pack(">Bwa*", 0xF7, len(E[0]), E[0])
event_data = bytearray(b'\xF7')+_ber_compressed_int(len(E[0]))+bytearray(bytes(E[0],encoding='ISO-8859-1'))
elif (event == 'song_position'):
event_data = b"\xF2" + _write_14_bit( E[0] )
elif (event == 'song_select'):
event_data = struct.pack('>BB', 0xF3, E[0] )
elif (event == 'tune_request'):
event_data = b"\xF6"
elif (event == 'raw_data'):
_warn("_encode: raw_data event not supported")
# event_data = E[0]
continue
# End of Other Stuff
else:
# The Big Fallthru
if unknown_callback:
# push(@data, &{ $unknown_callback }( @$event_r ))
pass
else:
_warn("Unknown event: "+str(event))
# To surpress complaint here, just set
# 'unknown_callback' => sub { return () }
continue
#print "Event $event encoded part 2\n"
if str(type(event_data)).find('str') >= 0:
event_data = bytearray(event_data.encode('Latin1', 'ignore'))
if len(event_data): # how could $event_data be empty
# data.append(struct.pack('>wa*', dtime, event_data))
# print(' event_data='+str(event_data))
data.append(_ber_compressed_int(dtime)+event_data)
return b''.join(data)
|
agpl-3.0
|
5GExchange/escape
|
mininet/mininet/cli_vnf.py
|
2
|
5976
|
"""
Extends Mininet CLI with sum-commands for interactive with VNFs.
Example session:
mininet> vnf info
mininet> ee status
mininet> ee help
mininet> ee
mininet:ee> status
mininet:ee> help
mininet:ee>
mininet>
"""
from os import kill
from time import sleep
from cmd import Cmd
import sys
import curses
from mininet.log import info, output, error
from mininet.util import quietRun
from mininet.vnfcatalog import Catalog
class SubCmd( Cmd ):
"Base-class for sub-commands."
def __init__( self ):
Cmd.__init__( self )
def complete_further(self, text, origline, begidx, endidx):
"""Return possible completions for 'text'.
Suitable for hierarchical completion.
"""
# based on Cmd.complete()
line = origline.lstrip()
stripped = len(origline) - len(line)
begidx = begidx - stripped
endidx = endidx - stripped
if begidx>0:
cmd, args, foo = self.parseline(line)
if cmd == '':
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, 'complete_' + cmd)
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
return compfunc(text, line, begidx, endidx)
def emptyline( self ):
"Exit from the sub-interpreter."
return True
def do_exit( self, _line ):
"Exit"
return 'exited by user command'
def do_quit( self, line ):
"Exit"
return self.do_exit( line )
def do_EOF( self, line ):
"Exit"
output( '\n' )
return self.do_exit( line )
class EE( SubCmd ):
"Subcommands for interacting with EEs."
prompt = 'mininet:ee> '
def __init__( self, mininet, stdin=sys.stdin, script=None ):
self.mn = mininet
SubCmd.__init__( self )
def do_status( self, _line ):
"Print the status of Execution Environments (VNF containers)."
for ee in self.mn.ees:
if ee.vnfPid:
try:
kill( ee.vnfPid, 0 )
output('%s is running %s at PID %s\n' %
(ee.name, ee.startCmd, ee.vnfPid))
except:
ee.vnfPid = 0
output('%s is idle\n' % ee.name)
else:
output('%s is idle\n' % ee.name)
def do_stop( self, _line ):
"Stop VNFs running inside EEs."
self.mn.stopVNFs()
def do_start( self, _line ):
"Start VNFs associated with EEs."
self.mn.startVNFs()
def do_restart( self, _line ):
"Restart VNFs associated with EEs."
self.mn.restartVNFs()
def do_top( self, _line ):
"Show resource usage for EEs."
cmd = 'grep cpu /proc/stat'
mcmd = 'grep MemTotal /proc/meminfo'
mem_total = int( quietRun( mcmd ).split()[-2] )
last_cpu_usage = {}
last_cpu_total = {}
for ee in self.mn.ees:
last_cpu_usage[ee] = 1
last_cpu_total[ee] = 1
screen = curses.initscr()
curses.noecho()
screen.clear()
#curses.curs_set(0)
h1 = "\t[ CPU ]\t[ MEM ] "
h2 = " NAME\t[ SHARE ABS REL ]\t[ TOTAL kB USED kB ABS REL ]"
while True:
vpos=2
hpos=0
screen.addstr(0, 0, h1)
screen.addstr(1, 0, h2)
for ee in self.mn.ees:
procresult = quietRun( cmd ).split()
cpu_total = sum( map(int, procresult[1:10]) )
cpu_usage = ee.cGetCpuUsage()
diff = float(cpu_usage-last_cpu_usage[ee])
total_diff = float(cpu_total-last_cpu_total[ee])
last = last_cpu_total[ee]
cpu_usage_abs = 100.0 * diff / total_diff
cpu_usage_rel = cpu_usage_abs / ee.frac
mem_usage = ee.cGetMemUsage()
mem_usage_abs = 100.0 * mem_usage / mem_total
mem_usage_rel = mem_usage_abs / ee.frac
s = " %s" % ee.name
s += '\t%7.1f' % ee.frac
s += ' %6.1f' % cpu_usage_abs
s += ' %6.1f' % cpu_usage_rel
s += '\t%10d' % mem_total
s += ' %7d' % mem_usage
s += ' %5.1f' % mem_usage_abs
s += ' %5.1f' % mem_usage_rel
screen.addstr(vpos, hpos, s)
last_cpu_usage[ee] = cpu_usage
last_cpu_total[ee] = cpu_total
vpos+=1
screen.addstr(vpos, hpos, '')
screen.refresh()
sleep(1)
class VNF( SubCmd ):
"Subcommands for interacting with VNFs."
prompt = 'mininet:vnf> '
def __init__( self, mininet, stdin=sys.stdin, script=None ):
self.mn = mininet
SubCmd.__init__( self )
def do_info( self, line ):
"Print short info about a VNF or all of them if none is specified."
vnf_catalog = Catalog().get_db()
for metadata in vnf_catalog.itervalues():
try:
if metadata['name'] == line.strip():
for k, v in metadata.iteritems():
output('%s: %s\n' % (k, v))
break
except KeyError:
pass
else:
for metadata in vnf_catalog.itervalues():
try:
info = metadata.get('description', '').split('\n')[0]
output('%s: %s\n' % (metadata['name'], info))
except KeyError:
pass
def complete_info( self, text, line, begidx, endidx):
names = Catalog().get_db().keys()
return [n for n in names if n.startswith(text)]
def do_reload( self, line):
"Reload VNF catalog"
Catalog().load(line)
|
apache-2.0
|
menardorama/ReadyNAS-Add-ons
|
headphones-1.0.0/debian/headphones/apps/headphones/lib/unidecode/x0a3.py
|
253
|
4521
|
data = (
'nzup', # 0x00
'nzurx', # 0x01
'nzur', # 0x02
'nzyt', # 0x03
'nzyx', # 0x04
'nzy', # 0x05
'nzyp', # 0x06
'nzyrx', # 0x07
'nzyr', # 0x08
'sit', # 0x09
'six', # 0x0a
'si', # 0x0b
'sip', # 0x0c
'siex', # 0x0d
'sie', # 0x0e
'siep', # 0x0f
'sat', # 0x10
'sax', # 0x11
'sa', # 0x12
'sap', # 0x13
'suox', # 0x14
'suo', # 0x15
'suop', # 0x16
'sot', # 0x17
'sox', # 0x18
'so', # 0x19
'sop', # 0x1a
'sex', # 0x1b
'se', # 0x1c
'sep', # 0x1d
'sut', # 0x1e
'sux', # 0x1f
'su', # 0x20
'sup', # 0x21
'surx', # 0x22
'sur', # 0x23
'syt', # 0x24
'syx', # 0x25
'sy', # 0x26
'syp', # 0x27
'syrx', # 0x28
'syr', # 0x29
'ssit', # 0x2a
'ssix', # 0x2b
'ssi', # 0x2c
'ssip', # 0x2d
'ssiex', # 0x2e
'ssie', # 0x2f
'ssiep', # 0x30
'ssat', # 0x31
'ssax', # 0x32
'ssa', # 0x33
'ssap', # 0x34
'ssot', # 0x35
'ssox', # 0x36
'sso', # 0x37
'ssop', # 0x38
'ssex', # 0x39
'sse', # 0x3a
'ssep', # 0x3b
'ssut', # 0x3c
'ssux', # 0x3d
'ssu', # 0x3e
'ssup', # 0x3f
'ssyt', # 0x40
'ssyx', # 0x41
'ssy', # 0x42
'ssyp', # 0x43
'ssyrx', # 0x44
'ssyr', # 0x45
'zhat', # 0x46
'zhax', # 0x47
'zha', # 0x48
'zhap', # 0x49
'zhuox', # 0x4a
'zhuo', # 0x4b
'zhuop', # 0x4c
'zhot', # 0x4d
'zhox', # 0x4e
'zho', # 0x4f
'zhop', # 0x50
'zhet', # 0x51
'zhex', # 0x52
'zhe', # 0x53
'zhep', # 0x54
'zhut', # 0x55
'zhux', # 0x56
'zhu', # 0x57
'zhup', # 0x58
'zhurx', # 0x59
'zhur', # 0x5a
'zhyt', # 0x5b
'zhyx', # 0x5c
'zhy', # 0x5d
'zhyp', # 0x5e
'zhyrx', # 0x5f
'zhyr', # 0x60
'chat', # 0x61
'chax', # 0x62
'cha', # 0x63
'chap', # 0x64
'chuot', # 0x65
'chuox', # 0x66
'chuo', # 0x67
'chuop', # 0x68
'chot', # 0x69
'chox', # 0x6a
'cho', # 0x6b
'chop', # 0x6c
'chet', # 0x6d
'chex', # 0x6e
'che', # 0x6f
'chep', # 0x70
'chux', # 0x71
'chu', # 0x72
'chup', # 0x73
'churx', # 0x74
'chur', # 0x75
'chyt', # 0x76
'chyx', # 0x77
'chy', # 0x78
'chyp', # 0x79
'chyrx', # 0x7a
'chyr', # 0x7b
'rrax', # 0x7c
'rra', # 0x7d
'rruox', # 0x7e
'rruo', # 0x7f
'rrot', # 0x80
'rrox', # 0x81
'rro', # 0x82
'rrop', # 0x83
'rret', # 0x84
'rrex', # 0x85
'rre', # 0x86
'rrep', # 0x87
'rrut', # 0x88
'rrux', # 0x89
'rru', # 0x8a
'rrup', # 0x8b
'rrurx', # 0x8c
'rrur', # 0x8d
'rryt', # 0x8e
'rryx', # 0x8f
'rry', # 0x90
'rryp', # 0x91
'rryrx', # 0x92
'rryr', # 0x93
'nrat', # 0x94
'nrax', # 0x95
'nra', # 0x96
'nrap', # 0x97
'nrox', # 0x98
'nro', # 0x99
'nrop', # 0x9a
'nret', # 0x9b
'nrex', # 0x9c
'nre', # 0x9d
'nrep', # 0x9e
'nrut', # 0x9f
'nrux', # 0xa0
'nru', # 0xa1
'nrup', # 0xa2
'nrurx', # 0xa3
'nrur', # 0xa4
'nryt', # 0xa5
'nryx', # 0xa6
'nry', # 0xa7
'nryp', # 0xa8
'nryrx', # 0xa9
'nryr', # 0xaa
'shat', # 0xab
'shax', # 0xac
'sha', # 0xad
'shap', # 0xae
'shuox', # 0xaf
'shuo', # 0xb0
'shuop', # 0xb1
'shot', # 0xb2
'shox', # 0xb3
'sho', # 0xb4
'shop', # 0xb5
'shet', # 0xb6
'shex', # 0xb7
'she', # 0xb8
'shep', # 0xb9
'shut', # 0xba
'shux', # 0xbb
'shu', # 0xbc
'shup', # 0xbd
'shurx', # 0xbe
'shur', # 0xbf
'shyt', # 0xc0
'shyx', # 0xc1
'shy', # 0xc2
'shyp', # 0xc3
'shyrx', # 0xc4
'shyr', # 0xc5
'rat', # 0xc6
'rax', # 0xc7
'ra', # 0xc8
'rap', # 0xc9
'ruox', # 0xca
'ruo', # 0xcb
'ruop', # 0xcc
'rot', # 0xcd
'rox', # 0xce
'ro', # 0xcf
'rop', # 0xd0
'rex', # 0xd1
're', # 0xd2
'rep', # 0xd3
'rut', # 0xd4
'rux', # 0xd5
'ru', # 0xd6
'rup', # 0xd7
'rurx', # 0xd8
'rur', # 0xd9
'ryt', # 0xda
'ryx', # 0xdb
'ry', # 0xdc
'ryp', # 0xdd
'ryrx', # 0xde
'ryr', # 0xdf
'jit', # 0xe0
'jix', # 0xe1
'ji', # 0xe2
'jip', # 0xe3
'jiet', # 0xe4
'jiex', # 0xe5
'jie', # 0xe6
'jiep', # 0xe7
'juot', # 0xe8
'juox', # 0xe9
'juo', # 0xea
'juop', # 0xeb
'jot', # 0xec
'jox', # 0xed
'jo', # 0xee
'jop', # 0xef
'jut', # 0xf0
'jux', # 0xf1
'ju', # 0xf2
'jup', # 0xf3
'jurx', # 0xf4
'jur', # 0xf5
'jyt', # 0xf6
'jyx', # 0xf7
'jy', # 0xf8
'jyp', # 0xf9
'jyrx', # 0xfa
'jyr', # 0xfb
'qit', # 0xfc
'qix', # 0xfd
'qi', # 0xfe
'qip', # 0xff
)
|
gpl-2.0
|
jswoboda/SimISR
|
beamtools/setupGUI.py
|
2
|
13468
|
#!/usr/bin/env python
"""
This GUI can be used to create set up files for the SimISR. The user can set up
the parameters and set up the beam pattern. The user can also bring in an older setup
file, change the settings and then save out a new version.
@author: Greg Starr
"""
#from Tkinter import *
import Tkinter as Tk
import tkFileDialog
import pickBeams as pb
import pdb
import scipy as sp
from SimISR.utilFunctions import makeconfigfile,readconfigfile
class App():
def __init__(self,root):
self.root = root
self.root.title("SimISR")
# title
self.titleframe = Tk.Frame(self.root)
self.titleframe.grid(row=0,columnspan=3)
self.menubar = Tk.Menu(self.titleframe)
# filemenu stuff
self.filemenu = Tk.Menu(self.menubar, tearoff=0)
self.filemenu.add_command(label="Load", command=self.loadfile)
self.filemenu.add_command(label="Save", command=self.savefile)
self.menubar.add_cascade(label="File", menu=self.filemenu)
self.root.config(menu=self.menubar)
# frame label
self.frame = Tk.LabelFrame(self.root, text="Sim Params", padx=5, pady=5)
self.frame.grid(row=1,column=0, sticky="e")
#Gui label
self.leb = Tk.Label(self.titleframe, text="Radar Data Sim GUI",font=("Helvetica", 16))
self.leb.grid()
rown = 4
#IPP stuff
self.ipp = Tk.Entry(self.frame)
self.ipp.grid(row=rown,column=1)
self.ipplabel = Tk.Label(self.frame,text="IPP (sec)")
self.ipplabel.grid(row=rown,column=0)
rown+=1
# Range limits
self.rangelimlow = Tk.Entry(self.frame)
self.rangelimhigh = Tk.Entry(self.frame)
self.rangelimlow.grid(row=rown,column=1)
self.rangelimhigh.grid(row=rown,column=2)
self.rangelabel = Tk.Label(self.frame,text="Range Gate Limits (km)")
self.rangelabel.grid(row=rown)
rown+=1
# pulse length
self.pulselength = Tk.Entry(self.frame)
self.pulselength.grid(row=rown,column=1)
self.pulselengthlabel = Tk.Label(self.frame,text="Pulse Length (us)")
self.pulselengthlabel.grid(row=rown)
rown+=1
# Sampling rate
self.t_s = Tk.Entry(self.frame)
self.t_s.grid(row=rown,column=1)
self.t_slabel = Tk.Label(self.frame,text="Sampling Time (us)")
self.t_slabel.grid(row=rown)
rown+=1
# Pulse type update
self.pulsetype = Tk.StringVar()
self.pulsetype.set("Long")
self.pulsetypelabel = Tk.Label(self.frame,text="Pulse Type")
self.pulsetypelabel.grid(row=rown)
self.pulsetypemenu = Tk.OptionMenu(self.frame, self.pulsetype,"Long","Barker",command=self.set_defaults)
self.pulsetypemenu.grid(row=rown,column=1,sticky='w')
rown+=1
# Integration Time
self.tint = Tk.Entry(self.frame)
self.tint.grid(row=rown,column=1)
self.tintlabel = Tk.Label(self.frame,text="Integration time (s)")
self.tintlabel.grid(row=rown)
rown+=1
# Fitter time interval
self.fitinter = Tk.Entry(self.frame)
self.fitinter.grid(row=rown,column=1)
self.fitinterlabel = Tk.Label(self.frame,text="Time interval between fits (s)")
self.fitinterlabel.grid(row=rown)
rown+=1
# Fitter time Limit
self.timelim = Tk.Entry(self.frame)
self.timelim.grid(row=rown,column=1)
self.timelimlabel = Tk.Label(self.frame,text="Simulation Time limit (s)")
self.timelimlabel.grid(row=rown)
rown+=1
# Number of noise samples per pulse
self.nns = Tk.Entry(self.frame)
self.nns.grid(row=rown,column=1)
self.nnslabel = Tk.Label(self.frame,text="noise samples per pulse")
self.nnslabel.grid(row=rown)
rown+=1
# Number of noise pulses
# XXX May get rid of this
self.nnp = Tk.Entry(self.frame)
self.nnp.grid(row=rown,column=1)
self.nnplabel = Tk.Label(self.frame,text="number of noise pulses")
self.nnplabel.grid(row=rown)
rown+=1
# Data type
self.dtype = Tk.StringVar()
self.dtype.set("complex128")
self.dtypelabel = Tk.Label(self.frame,text="Raw Data Type")
self.dtypelabel.grid(row=rown)
self.dtypemenu = Tk.OptionMenu(self.frame, self.dtype,"complex64","complex128")
self.dtypemenu.grid(row=rown,column=1,sticky='w')
rown+=1
# Upsampling factor for the ambiguity funcition
# XXX May get rid of this.
self.ambupsamp = Tk.Entry(self.frame)
self.ambupsamp.grid(row=rown,column=1)
self.ambupsamplabel = Tk.Label(self.frame,text="Up sampling factor for ambiguity function")
self.ambupsamplabel.grid(row=rown)
rown+=1
# Species
self.species = Tk.Entry(self.frame)
self.species.grid(row=rown,column=1)
self.specieslabel = Tk.Label(self.frame,text="Species N2+, N+, O+, NO+, H+, O2+, e-")
self.specieslabel.grid(row=rown)
rown+=1
# Number of samples per spectrum
self.numpoints = Tk.Entry(self.frame)
self.numpoints.grid(row=rown,column=1)
self.numpointslabel = Tk.Label(self.frame,text="Number of Samples for Sectrum")
self.numpointslabel.grid(row=rown)
rown+=1
# Start file for set up
self.startfile = Tk.Entry(self.frame)
self.startfile.grid(row=rown,column=1)
self.startfilelabel = Tk.Label(self.frame,text="Start File")
self.startfilelabel.grid(row=rown)
rown+=1
# Fitting Type
self.fittype = Tk.StringVar()
self.fittype.set("Spectrum")
self.fittypelabel = Tk.Label(self.frame,text="Fit type")
self.fittypelabel.grid(row=rown)
self.fittypemenu = Tk.OptionMenu(self.frame, self.fittype,"Spectrum","ACF")
self.fittypemenu.grid(row=rown,column=1,sticky='w')
rown+=1
# outangles output
self.outangles = Tk.Entry(self.frame)
self.outangles.grid(row=rown,column=1)
self.outangleslabel = Tk.Label(self.frame,text="Beam int together, seperated by commas")
self.outangleslabel.grid(row=rown)
# Beam selector GUI
self.frame2 = Tk.LabelFrame(self.root,text="Beam Selector",padx=5,pady=5)
self.frame2.grid(row=1,column=1, sticky="e")
self.pickbeams = pb.Gui(self.frame2)
# self.timelim=DoubleVar()
self.set_defaults()
self.paramdic = {'IPP':self.ipp,
'TimeLim':self.timelim,
'RangeLims':[self.rangelimlow,self.rangelimhigh],
'Pulselength':self.pulselength,
't_s': self.t_s,
'Pulsetype':self.pulsetype,
'Tint':self.tint,
'Fitinter':self.fitinter,
'NNs': self.nns,
'NNp':self.nnp,
'dtype':self.dtype,
'ambupsamp':self.ambupsamp,
'species':self.species,
'numpoints':self.numpoints,
'startfile':self.startfile,
'FitType':self.fittype }
def set_defaults(self,*args):
"""Set the default files for the data."""
self.ipp.delete(0, 'end')
self.ipp.insert(0,'8.7e-3')
self.tint.delete(0,'end')
self.tint.insert(0,'180')
self.fitinter.delete(0,'end')
self.fitinter.insert(0,'180')
self.species.delete(0, 'end')
self.species.insert(0,'O+ e-')
self.numpoints.delete(0, 'end')
self.numpoints.insert(0,'128')
self.ambupsamp.delete(0, 'end')
self.ambupsamp.insert(0,'1')
self.timelim.delete(0, 'end')
self.timelim.insert(0,'540')
# noise
self.nns.delete(0,'end')
self.nns.insert(0,'28')
self.nnp.delete(0,'end')
self.nnp.insert(0,'100')
# For different pulse types
self.rangelimlow.delete(0, 'end')
self.rangelimhigh.delete(0, 'end')
self.pulselength.delete(0, 'end')
self.t_s.delete(0, 'end')
if self.pulsetype.get().lower()=='long':
self.rangelimlow.insert(0,'150')
self.rangelimhigh.insert(0,'500')
self.pulselength.insert(0,'280')
self.t_s.insert(0,'20')
elif self.pulsetype.get().lower()=='barker':
self.rangelimlow.insert(0,'50')
self.rangelimhigh.insert(0,'150')
self.t_s.insert(0,'10')
self.pulselength.insert(0,'130')
def savefile(self):
"""Saves the parameters out"""
fn = tkFileDialog.asksaveasfilename(title="Save File",filetypes=[('INI','.ini'),('PICKLE','.pickle')])
blist = self.pickbeams.output
self.pickbeams.buttonClick(fn)
radarname = self.pickbeams.var.get()
posspec = ['N2+', 'N+', 'O+', 'NO+', 'H+', 'O2+','e-' ]
specieslist = self.species.get().lower().split()
newlist =[x for x in posspec if x.lower() in specieslist]
if 'e-' not in newlist:newlist.append('e-')
simparams ={'IPP':float(self.ipp.get()),
'TimeLim':float(self.timelim.get()),
'RangeLims':[int(float(self.rangelimlow.get())),int(float(self.rangelimhigh.get()))],
'Pulselength':1e-6*float(self.pulselength.get()),
't_s': 1e-6*float(self.t_s.get()),
'Pulsetype':self.pulsetype.get(),
'Tint':float(self.tint.get()),
'Fitinter':float(self.fitinter.get()),
'NNs': int(float(self.nns.get())),
'NNp':int(float(self.nnp.get())),
'dtype':{'complex128':sp.complex128,'complex64':sp.complex64}[self.dtype.get()],
'ambupsamp':int(float(self.ambupsamp.get())),
'species':newlist,
'numpoints':int(float(self.numpoints.get())),
'startfile':self.startfile.get(),
'FitType': self.fittype.get()}
if len(self.outangles.get())>0:
outlist1 = self.outangles.get().split(',')
simparams['outangles']=[[ float(j) for j in i.lstrip().rstrip().split(' ')] for i in outlist1]
makeconfigfile(fn,blist,radarname,simparams)
def loadfile(self):
"""Imports parameters from old files"""
fn = tkFileDialog.askopenfilename(title="Load File",filetypes=[('INI','.ini'),('PICKLE','.pickle')])
try:
sensdict,simparams = readconfigfile(fn)
rdrnames = {'PFISR':'PFISR','pfisr':'PFISR','risr':'RISR-N','RISR-N':'RISR-N','RISR':'RISR-N'}
currdr = rdrnames[sensdict['Name']]
fitnfound = True
for i in simparams:
try:
if i=='RangeLims':
self.paramdic[i][0].delete(0,Tk.END)
self.paramdic[i][1].delete(0,Tk.END)
self.paramdic[i][0].insert(0,str(simparams[i][0]))
self.paramdic[i][1].insert(0,str(simparams[i][1]))
elif i=='species':
self.paramdic[i].delete(0,Tk.END)
string=''
if isinstance(simparams[i],list):
for a in simparams[i]:
string+=a
string+=" "
else:
string = simparams[i]
self.paramdic[i].insert(0,string)
elif i=='Pulselength' or i=='t_s':
self.paramdic[i].delete(0,Tk.END)
num = float(simparams[i])*10**6
self.paramdic[i].insert(0,str(num))
elif i== 'FitType':
self.fittype = simparams[i]
fitnfound=False
else:
self.paramdic[i].delete(0,Tk.END)
self.paramdic[i].insert(0,str(simparams[i]))
except:
if simparams[i]==sp.complex128:
self.paramdic[i].set('complex128')
elif simparams[i]==sp.complex64:
self.paramdic[i].set('complex64')
elif i in self.paramdic:
self.paramdic[i].set(simparams[i])
if fitnfound:
self.fittype = 'Spectrum'
self.pickbeams.var.set(currdr)
self.pickbeams.Changefile()
self.pickbeams.addbeamlist(simparams['angles'])
except:
print "Failed to import file."
def runsetupgui():
root = Tk.Tk()
app = App(root)
root.mainloop()
if __name__ == "__main__":
root = Tk.Tk()
app = App(root)
root.mainloop()
|
mit
|
teeple/pns_server
|
work/install/Python-2.7.4/Lib/test/test_uuid.py
|
84
|
20782
|
from unittest import TestCase
from test import test_support
import uuid
def importable(name):
try:
__import__(name)
return True
except:
return False
class TestUUID(TestCase):
last_node = None
source2node = {}
def test_UUID(self):
equal = self.assertEqual
ascending = []
for (string, curly, hex, bytes, bytes_le, fields, integer, urn,
time, clock_seq, variant, version) in [
('00000000-0000-0000-0000-000000000000',
'{00000000-0000-0000-0000-000000000000}',
'00000000000000000000000000000000',
'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
(0, 0, 0, 0, 0, 0),
0,
'urn:uuid:00000000-0000-0000-0000-000000000000',
0, 0, uuid.RESERVED_NCS, None),
('00010203-0405-0607-0809-0a0b0c0d0e0f',
'{00010203-0405-0607-0809-0a0b0c0d0e0f}',
'000102030405060708090a0b0c0d0e0f',
'\0\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\x0d\x0e\x0f',
'\x03\x02\x01\0\x05\x04\x07\x06\x08\t\n\x0b\x0c\x0d\x0e\x0f',
(0x00010203L, 0x0405, 0x0607, 8, 9, 0x0a0b0c0d0e0fL),
0x000102030405060708090a0b0c0d0e0fL,
'urn:uuid:00010203-0405-0607-0809-0a0b0c0d0e0f',
0x607040500010203L, 0x809, uuid.RESERVED_NCS, None),
('02d9e6d5-9467-382e-8f9b-9300a64ac3cd',
'{02d9e6d5-9467-382e-8f9b-9300a64ac3cd}',
'02d9e6d59467382e8f9b9300a64ac3cd',
'\x02\xd9\xe6\xd5\x94\x67\x38\x2e\x8f\x9b\x93\x00\xa6\x4a\xc3\xcd',
'\xd5\xe6\xd9\x02\x67\x94\x2e\x38\x8f\x9b\x93\x00\xa6\x4a\xc3\xcd',
(0x02d9e6d5L, 0x9467, 0x382e, 0x8f, 0x9b, 0x9300a64ac3cdL),
0x02d9e6d59467382e8f9b9300a64ac3cdL,
'urn:uuid:02d9e6d5-9467-382e-8f9b-9300a64ac3cd',
0x82e946702d9e6d5L, 0xf9b, uuid.RFC_4122, 3),
('12345678-1234-5678-1234-567812345678',
'{12345678-1234-5678-1234-567812345678}',
'12345678123456781234567812345678',
'\x12\x34\x56\x78'*4,
'\x78\x56\x34\x12\x34\x12\x78\x56\x12\x34\x56\x78\x12\x34\x56\x78',
(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678),
0x12345678123456781234567812345678,
'urn:uuid:12345678-1234-5678-1234-567812345678',
0x678123412345678L, 0x1234, uuid.RESERVED_NCS, None),
('6ba7b810-9dad-11d1-80b4-00c04fd430c8',
'{6ba7b810-9dad-11d1-80b4-00c04fd430c8}',
'6ba7b8109dad11d180b400c04fd430c8',
'\x6b\xa7\xb8\x10\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
'\x10\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
(0x6ba7b810L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
0x6ba7b8109dad11d180b400c04fd430c8L,
'urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8',
0x1d19dad6ba7b810L, 0xb4, uuid.RFC_4122, 1),
('6ba7b811-9dad-11d1-80b4-00c04fd430c8',
'{6ba7b811-9dad-11d1-80b4-00c04fd430c8}',
'6ba7b8119dad11d180b400c04fd430c8',
'\x6b\xa7\xb8\x11\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
'\x11\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
(0x6ba7b811L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
0x6ba7b8119dad11d180b400c04fd430c8L,
'urn:uuid:6ba7b811-9dad-11d1-80b4-00c04fd430c8',
0x1d19dad6ba7b811L, 0xb4, uuid.RFC_4122, 1),
('6ba7b812-9dad-11d1-80b4-00c04fd430c8',
'{6ba7b812-9dad-11d1-80b4-00c04fd430c8}',
'6ba7b8129dad11d180b400c04fd430c8',
'\x6b\xa7\xb8\x12\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
'\x12\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
(0x6ba7b812L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
0x6ba7b8129dad11d180b400c04fd430c8L,
'urn:uuid:6ba7b812-9dad-11d1-80b4-00c04fd430c8',
0x1d19dad6ba7b812L, 0xb4, uuid.RFC_4122, 1),
('6ba7b814-9dad-11d1-80b4-00c04fd430c8',
'{6ba7b814-9dad-11d1-80b4-00c04fd430c8}',
'6ba7b8149dad11d180b400c04fd430c8',
'\x6b\xa7\xb8\x14\x9d\xad\x11\xd1\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
'\x14\xb8\xa7\x6b\xad\x9d\xd1\x11\x80\xb4\x00\xc0\x4f\xd4\x30\xc8',
(0x6ba7b814L, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00c04fd430c8L),
0x6ba7b8149dad11d180b400c04fd430c8L,
'urn:uuid:6ba7b814-9dad-11d1-80b4-00c04fd430c8',
0x1d19dad6ba7b814L, 0xb4, uuid.RFC_4122, 1),
('7d444840-9dc0-11d1-b245-5ffdce74fad2',
'{7d444840-9dc0-11d1-b245-5ffdce74fad2}',
'7d4448409dc011d1b2455ffdce74fad2',
'\x7d\x44\x48\x40\x9d\xc0\x11\xd1\xb2\x45\x5f\xfd\xce\x74\xfa\xd2',
'\x40\x48\x44\x7d\xc0\x9d\xd1\x11\xb2\x45\x5f\xfd\xce\x74\xfa\xd2',
(0x7d444840L, 0x9dc0, 0x11d1, 0xb2, 0x45, 0x5ffdce74fad2L),
0x7d4448409dc011d1b2455ffdce74fad2L,
'urn:uuid:7d444840-9dc0-11d1-b245-5ffdce74fad2',
0x1d19dc07d444840L, 0x3245, uuid.RFC_4122, 1),
('e902893a-9d22-3c7e-a7b8-d6e313b71d9f',
'{e902893a-9d22-3c7e-a7b8-d6e313b71d9f}',
'e902893a9d223c7ea7b8d6e313b71d9f',
'\xe9\x02\x89\x3a\x9d\x22\x3c\x7e\xa7\xb8\xd6\xe3\x13\xb7\x1d\x9f',
'\x3a\x89\x02\xe9\x22\x9d\x7e\x3c\xa7\xb8\xd6\xe3\x13\xb7\x1d\x9f',
(0xe902893aL, 0x9d22, 0x3c7e, 0xa7, 0xb8, 0xd6e313b71d9fL),
0xe902893a9d223c7ea7b8d6e313b71d9fL,
'urn:uuid:e902893a-9d22-3c7e-a7b8-d6e313b71d9f',
0xc7e9d22e902893aL, 0x27b8, uuid.RFC_4122, 3),
('eb424026-6f54-4ef8-a4d0-bb658a1fc6cf',
'{eb424026-6f54-4ef8-a4d0-bb658a1fc6cf}',
'eb4240266f544ef8a4d0bb658a1fc6cf',
'\xeb\x42\x40\x26\x6f\x54\x4e\xf8\xa4\xd0\xbb\x65\x8a\x1f\xc6\xcf',
'\x26\x40\x42\xeb\x54\x6f\xf8\x4e\xa4\xd0\xbb\x65\x8a\x1f\xc6\xcf',
(0xeb424026L, 0x6f54, 0x4ef8, 0xa4, 0xd0, 0xbb658a1fc6cfL),
0xeb4240266f544ef8a4d0bb658a1fc6cfL,
'urn:uuid:eb424026-6f54-4ef8-a4d0-bb658a1fc6cf',
0xef86f54eb424026L, 0x24d0, uuid.RFC_4122, 4),
('f81d4fae-7dec-11d0-a765-00a0c91e6bf6',
'{f81d4fae-7dec-11d0-a765-00a0c91e6bf6}',
'f81d4fae7dec11d0a76500a0c91e6bf6',
'\xf8\x1d\x4f\xae\x7d\xec\x11\xd0\xa7\x65\x00\xa0\xc9\x1e\x6b\xf6',
'\xae\x4f\x1d\xf8\xec\x7d\xd0\x11\xa7\x65\x00\xa0\xc9\x1e\x6b\xf6',
(0xf81d4faeL, 0x7dec, 0x11d0, 0xa7, 0x65, 0x00a0c91e6bf6L),
0xf81d4fae7dec11d0a76500a0c91e6bf6L,
'urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6',
0x1d07decf81d4faeL, 0x2765, uuid.RFC_4122, 1),
('fffefdfc-fffe-fffe-fffe-fffefdfcfbfa',
'{fffefdfc-fffe-fffe-fffe-fffefdfcfbfa}',
'fffefdfcfffefffefffefffefdfcfbfa',
'\xff\xfe\xfd\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xfd\xfc\xfb\xfa',
'\xfc\xfd\xfe\xff\xfe\xff\xfe\xff\xff\xfe\xff\xfe\xfd\xfc\xfb\xfa',
(0xfffefdfcL, 0xfffe, 0xfffe, 0xff, 0xfe, 0xfffefdfcfbfaL),
0xfffefdfcfffefffefffefffefdfcfbfaL,
'urn:uuid:fffefdfc-fffe-fffe-fffe-fffefdfcfbfa',
0xffefffefffefdfcL, 0x3ffe, uuid.RESERVED_FUTURE, None),
('ffffffff-ffff-ffff-ffff-ffffffffffff',
'{ffffffff-ffff-ffff-ffff-ffffffffffff}',
'ffffffffffffffffffffffffffffffff',
'\xff'*16,
'\xff'*16,
(0xffffffffL, 0xffffL, 0xffffL, 0xff, 0xff, 0xffffffffffffL),
0xffffffffffffffffffffffffffffffffL,
'urn:uuid:ffffffff-ffff-ffff-ffff-ffffffffffff',
0xfffffffffffffffL, 0x3fff, uuid.RESERVED_FUTURE, None),
]:
equivalents = []
# Construct each UUID in several different ways.
for u in [uuid.UUID(string), uuid.UUID(curly), uuid.UUID(hex),
uuid.UUID(bytes=bytes), uuid.UUID(bytes_le=bytes_le),
uuid.UUID(fields=fields), uuid.UUID(int=integer),
uuid.UUID(urn)]:
# Test all conversions and properties of the UUID object.
equal(str(u), string)
equal(int(u), integer)
equal(u.bytes, bytes)
equal(u.bytes_le, bytes_le)
equal(u.fields, fields)
equal(u.time_low, fields[0])
equal(u.time_mid, fields[1])
equal(u.time_hi_version, fields[2])
equal(u.clock_seq_hi_variant, fields[3])
equal(u.clock_seq_low, fields[4])
equal(u.node, fields[5])
equal(u.hex, hex)
equal(u.int, integer)
equal(u.urn, urn)
equal(u.time, time)
equal(u.clock_seq, clock_seq)
equal(u.variant, variant)
equal(u.version, version)
equivalents.append(u)
# Different construction methods should give the same UUID.
for u in equivalents:
for v in equivalents:
equal(u, v)
ascending.append(u)
# Test comparison of UUIDs.
for i in range(len(ascending)):
for j in range(len(ascending)):
equal(cmp(i, j), cmp(ascending[i], ascending[j]))
# Test sorting of UUIDs (above list is in ascending order).
resorted = ascending[:]
resorted.reverse()
resorted.sort()
equal(ascending, resorted)
def test_exceptions(self):
badvalue = lambda f: self.assertRaises(ValueError, f)
badtype = lambda f: self.assertRaises(TypeError, f)
# Badly formed hex strings.
badvalue(lambda: uuid.UUID(''))
badvalue(lambda: uuid.UUID('abc'))
badvalue(lambda: uuid.UUID('1234567812345678123456781234567'))
badvalue(lambda: uuid.UUID('123456781234567812345678123456789'))
badvalue(lambda: uuid.UUID('123456781234567812345678z2345678'))
# Badly formed bytes.
badvalue(lambda: uuid.UUID(bytes='abc'))
badvalue(lambda: uuid.UUID(bytes='\0'*15))
badvalue(lambda: uuid.UUID(bytes='\0'*17))
# Badly formed bytes_le.
badvalue(lambda: uuid.UUID(bytes_le='abc'))
badvalue(lambda: uuid.UUID(bytes_le='\0'*15))
badvalue(lambda: uuid.UUID(bytes_le='\0'*17))
# Badly formed fields.
badvalue(lambda: uuid.UUID(fields=(1,)))
badvalue(lambda: uuid.UUID(fields=(1, 2, 3, 4, 5)))
badvalue(lambda: uuid.UUID(fields=(1, 2, 3, 4, 5, 6, 7)))
# Field values out of range.
badvalue(lambda: uuid.UUID(fields=(-1, 0, 0, 0, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0x100000000L, 0, 0, 0, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, -1, 0, 0, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0x10000L, 0, 0, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, -1, 0, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0x10000L, 0, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0, -1, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0x100L, 0, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, -1, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0x100L, 0)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0, -1)))
badvalue(lambda: uuid.UUID(fields=(0, 0, 0, 0, 0, 0x1000000000000L)))
# Version number out of range.
badvalue(lambda: uuid.UUID('00'*16, version=0))
badvalue(lambda: uuid.UUID('00'*16, version=6))
# Integer value out of range.
badvalue(lambda: uuid.UUID(int=-1))
badvalue(lambda: uuid.UUID(int=1<<128L))
# Must supply exactly one of hex, bytes, fields, int.
h, b, f, i = '00'*16, '\0'*16, (0, 0, 0, 0, 0, 0), 0
uuid.UUID(h)
uuid.UUID(hex=h)
uuid.UUID(bytes=b)
uuid.UUID(bytes_le=b)
uuid.UUID(fields=f)
uuid.UUID(int=i)
# Wrong number of arguments (positional).
badtype(lambda: uuid.UUID())
badtype(lambda: uuid.UUID(h, b))
badtype(lambda: uuid.UUID(h, b, b))
badtype(lambda: uuid.UUID(h, b, b, f))
badtype(lambda: uuid.UUID(h, b, b, f, i))
# Duplicate arguments.
for hh in [[], [('hex', h)]]:
for bb in [[], [('bytes', b)]]:
for bble in [[], [('bytes_le', b)]]:
for ii in [[], [('int', i)]]:
for ff in [[], [('fields', f)]]:
args = dict(hh + bb + bble + ii + ff)
if len(args) != 0:
badtype(lambda: uuid.UUID(h, **args))
if len(args) != 1:
badtype(lambda: uuid.UUID(**args))
# Immutability.
u = uuid.UUID(h)
badtype(lambda: setattr(u, 'hex', h))
badtype(lambda: setattr(u, 'bytes', b))
badtype(lambda: setattr(u, 'bytes_le', b))
badtype(lambda: setattr(u, 'fields', f))
badtype(lambda: setattr(u, 'int', i))
badtype(lambda: setattr(u, 'time_low', 0))
badtype(lambda: setattr(u, 'time_mid', 0))
badtype(lambda: setattr(u, 'time_hi_version', 0))
badtype(lambda: setattr(u, 'time_hi_version', 0))
badtype(lambda: setattr(u, 'clock_seq_hi_variant', 0))
badtype(lambda: setattr(u, 'clock_seq_low', 0))
badtype(lambda: setattr(u, 'node', 0))
def check_node(self, node, source):
message = "%012x is not an RFC 4122 node ID" % node
self.assertTrue(0 < node, message)
self.assertTrue(node < (1L << 48), message)
TestUUID.source2node[source] = node
if TestUUID.last_node:
if TestUUID.last_node != node:
msg = "different sources disagree on node:\n"
for s, n in TestUUID.source2node.iteritems():
msg += " from source %r, node was %012x\n" % (s, n)
# There's actually no reason to expect the MAC addresses
# to agree across various methods -- e.g., a box may have
# multiple network interfaces, and different ways of getting
# a MAC address may favor different HW.
##self.fail(msg)
else:
TestUUID.last_node = node
def test_ifconfig_getnode(self):
import sys
import os
if os.name == 'posix':
node = uuid._ifconfig_getnode()
if node is not None:
self.check_node(node, 'ifconfig')
def test_ipconfig_getnode(self):
import os
if os.name == 'nt':
node = uuid._ipconfig_getnode()
if node is not None:
self.check_node(node, 'ipconfig')
def test_netbios_getnode(self):
if importable('win32wnet') and importable('netbios'):
self.check_node(uuid._netbios_getnode(), 'netbios')
def test_random_getnode(self):
node = uuid._random_getnode()
# Least significant bit of first octet must be set.
self.assertTrue(node & 0x010000000000)
self.assertTrue(node < (1L << 48))
def test_unixdll_getnode(self):
import sys
import os
if importable('ctypes') and os.name == 'posix':
try: # Issues 1481, 3581: _uuid_generate_time() might be None.
self.check_node(uuid._unixdll_getnode(), 'unixdll')
except TypeError:
pass
def test_windll_getnode(self):
import os
if importable('ctypes') and os.name == 'nt':
self.check_node(uuid._windll_getnode(), 'windll')
def test_getnode(self):
import sys
node1 = uuid.getnode()
self.check_node(node1, "getnode1")
# Test it again to ensure consistency.
node2 = uuid.getnode()
self.check_node(node2, "getnode2")
self.assertEqual(node1, node2)
def test_uuid1(self):
# uuid1 requires ctypes.
try:
import ctypes
except ImportError:
return
equal = self.assertEqual
# Make sure uuid1() generates UUIDs that are actually version 1.
for u in [uuid.uuid1() for i in range(10)]:
equal(u.variant, uuid.RFC_4122)
equal(u.version, 1)
# Make sure the generated UUIDs are actually unique.
uuids = {}
for u in [uuid.uuid1() for i in range(1000)]:
uuids[u] = 1
equal(len(uuids.keys()), 1000)
# Make sure the supplied node ID appears in the UUID.
u = uuid.uuid1(0)
equal(u.node, 0)
u = uuid.uuid1(0x123456789abc)
equal(u.node, 0x123456789abc)
u = uuid.uuid1(0xffffffffffff)
equal(u.node, 0xffffffffffff)
# Make sure the supplied clock sequence appears in the UUID.
u = uuid.uuid1(0x123456789abc, 0)
equal(u.node, 0x123456789abc)
equal(((u.clock_seq_hi_variant & 0x3f) << 8) | u.clock_seq_low, 0)
u = uuid.uuid1(0x123456789abc, 0x1234)
equal(u.node, 0x123456789abc)
equal(((u.clock_seq_hi_variant & 0x3f) << 8) |
u.clock_seq_low, 0x1234)
u = uuid.uuid1(0x123456789abc, 0x3fff)
equal(u.node, 0x123456789abc)
equal(((u.clock_seq_hi_variant & 0x3f) << 8) |
u.clock_seq_low, 0x3fff)
def test_uuid3(self):
equal = self.assertEqual
# Test some known version-3 UUIDs.
for u, v in [(uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org'),
'6fa459ea-ee8a-3ca4-894e-db77e160355e'),
(uuid.uuid3(uuid.NAMESPACE_URL, 'http://python.org/'),
'9fe8e8c4-aaa8-32a9-a55c-4535a88b748d'),
(uuid.uuid3(uuid.NAMESPACE_OID, '1.3.6.1'),
'dd1a1cef-13d5-368a-ad82-eca71acd4cd1'),
(uuid.uuid3(uuid.NAMESPACE_X500, 'c=ca'),
'658d3002-db6b-3040-a1d1-8ddd7d189a4d'),
]:
equal(u.variant, uuid.RFC_4122)
equal(u.version, 3)
equal(u, uuid.UUID(v))
equal(str(u), v)
def test_uuid4(self):
# uuid4 requires ctypes.
try:
import ctypes
except ImportError:
return
equal = self.assertEqual
# Make sure uuid4() generates UUIDs that are actually version 4.
for u in [uuid.uuid4() for i in range(10)]:
equal(u.variant, uuid.RFC_4122)
equal(u.version, 4)
# Make sure the generated UUIDs are actually unique.
uuids = {}
for u in [uuid.uuid4() for i in range(1000)]:
uuids[u] = 1
equal(len(uuids.keys()), 1000)
def test_uuid5(self):
equal = self.assertEqual
# Test some known version-5 UUIDs.
for u, v in [(uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org'),
'886313e1-3b8a-5372-9b90-0c9aee199e5d'),
(uuid.uuid5(uuid.NAMESPACE_URL, 'http://python.org/'),
'4c565f0d-3f5a-5890-b41b-20cf47701c5e'),
(uuid.uuid5(uuid.NAMESPACE_OID, '1.3.6.1'),
'1447fa61-5277-5fef-a9b3-fbc6e44f4af3'),
(uuid.uuid5(uuid.NAMESPACE_X500, 'c=ca'),
'cc957dd1-a972-5349-98cd-874190002798'),
]:
equal(u.variant, uuid.RFC_4122)
equal(u.version, 5)
equal(u, uuid.UUID(v))
equal(str(u), v)
def testIssue8621(self):
import os
import sys
if os.name != 'posix':
return
# On at least some versions of OSX uuid.uuid4 generates
# the same sequence of UUIDs in the parent and any
# children started using fork.
fds = os.pipe()
pid = os.fork()
if pid == 0:
os.close(fds[0])
value = uuid.uuid4()
os.write(fds[1], value.hex)
os._exit(0)
else:
os.close(fds[1])
parent_value = uuid.uuid4().hex
os.waitpid(pid, 0)
child_value = os.read(fds[0], 100)
self.assertNotEqual(parent_value, child_value)
def test_main():
test_support.run_unittest(TestUUID)
if __name__ == '__main__':
test_main()
|
gpl-2.0
|
zhuguihua/linux
|
tools/perf/scripts/python/export-to-postgresql.py
|
238
|
25591
|
# export-to-postgresql.py: export perf data to a postgresql database
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
import os
import sys
import struct
import datetime
# To use this script you will need to have installed package python-pyside which
# provides LGPL-licensed Python bindings for Qt. You will also need the package
# libqt4-sql-psql for Qt postgresql support.
#
# The script assumes postgresql is running on the local machine and that the
# user has postgresql permissions to create databases. Examples of installing
# postgresql and adding such a user are:
#
# fedora:
#
# $ sudo yum install postgresql postgresql-server python-pyside qt-postgresql
# $ sudo su - postgres -c initdb
# $ sudo service postgresql start
# $ sudo su - postgres
# $ createuser <your user id here>
# Shall the new role be a superuser? (y/n) y
#
# ubuntu:
#
# $ sudo apt-get install postgresql
# $ sudo su - postgres
# $ createuser <your user id here>
# Shall the new role be a superuser? (y/n) y
#
# An example of using this script with Intel PT:
#
# $ perf record -e intel_pt//u ls
# $ perf script -s ~/libexec/perf-core/scripts/python/export-to-postgresql.py pt_example branches calls
# 2015-05-29 12:49:23.464364 Creating database...
# 2015-05-29 12:49:26.281717 Writing to intermediate files...
# 2015-05-29 12:49:27.190383 Copying to database...
# 2015-05-29 12:49:28.140451 Removing intermediate files...
# 2015-05-29 12:49:28.147451 Adding primary keys
# 2015-05-29 12:49:28.655683 Adding foreign keys
# 2015-05-29 12:49:29.365350 Done
#
# To browse the database, psql can be used e.g.
#
# $ psql pt_example
# pt_example=# select * from samples_view where id < 100;
# pt_example=# \d+
# pt_example=# \d+ samples_view
# pt_example=# \q
#
# An example of using the database is provided by the script
# call-graph-from-postgresql.py. Refer to that script for details.
#
# Tables:
#
# The tables largely correspond to perf tools' data structures. They are largely self-explanatory.
#
# samples
#
# 'samples' is the main table. It represents what instruction was executing at a point in time
# when something (a selected event) happened. The memory address is the instruction pointer or 'ip'.
#
# calls
#
# 'calls' represents function calls and is related to 'samples' by 'call_id' and 'return_id'.
# 'calls' is only created when the 'calls' option to this script is specified.
#
# call_paths
#
# 'call_paths' represents all the call stacks. Each 'call' has an associated record in 'call_paths'.
# 'calls_paths' is only created when the 'calls' option to this script is specified.
#
# branch_types
#
# 'branch_types' provides descriptions for each type of branch.
#
# comm_threads
#
# 'comm_threads' shows how 'comms' relates to 'threads'.
#
# comms
#
# 'comms' contains a record for each 'comm' - the name given to the executable that is running.
#
# dsos
#
# 'dsos' contains a record for each executable file or library.
#
# machines
#
# 'machines' can be used to distinguish virtual machines if virtualization is supported.
#
# selected_events
#
# 'selected_events' contains a record for each kind of event that has been sampled.
#
# symbols
#
# 'symbols' contains a record for each symbol. Only symbols that have samples are present.
#
# threads
#
# 'threads' contains a record for each thread.
#
# Views:
#
# Most of the tables have views for more friendly display. The views are:
#
# calls_view
# call_paths_view
# comm_threads_view
# dsos_view
# machines_view
# samples_view
# symbols_view
# threads_view
#
# More examples of browsing the database with psql:
# Note that some of the examples are not the most optimal SQL query.
# Note that call information is only available if the script's 'calls' option has been used.
#
# Top 10 function calls (not aggregated by symbol):
#
# SELECT * FROM calls_view ORDER BY elapsed_time DESC LIMIT 10;
#
# Top 10 function calls (aggregated by symbol):
#
# SELECT symbol_id,(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,
# SUM(elapsed_time) AS tot_elapsed_time,SUM(branch_count) AS tot_branch_count
# FROM calls_view GROUP BY symbol_id ORDER BY tot_elapsed_time DESC LIMIT 10;
#
# Note that the branch count gives a rough estimation of cpu usage, so functions
# that took a long time but have a relatively low branch count must have spent time
# waiting.
#
# Find symbols by pattern matching on part of the name (e.g. names containing 'alloc'):
#
# SELECT * FROM symbols_view WHERE name LIKE '%alloc%';
#
# Top 10 function calls for a specific symbol (e.g. whose symbol_id is 187):
#
# SELECT * FROM calls_view WHERE symbol_id = 187 ORDER BY elapsed_time DESC LIMIT 10;
#
# Show function calls made by function in the same context (i.e. same call path) (e.g. one with call_path_id 254):
#
# SELECT * FROM calls_view WHERE parent_call_path_id = 254;
#
# Show branches made during a function call (e.g. where call_id is 29357 and return_id is 29370 and tid is 29670)
#
# SELECT * FROM samples_view WHERE id >= 29357 AND id <= 29370 AND tid = 29670 AND event LIKE 'branches%';
#
# Show transactions:
#
# SELECT * FROM samples_view WHERE event = 'transactions';
#
# Note transaction start has 'in_tx' true whereas, transaction end has 'in_tx' false.
# Transaction aborts have branch_type_name 'transaction abort'
#
# Show transaction aborts:
#
# SELECT * FROM samples_view WHERE event = 'transactions' AND branch_type_name = 'transaction abort';
#
# To print a call stack requires walking the call_paths table. For example this python script:
# #!/usr/bin/python2
#
# import sys
# from PySide.QtSql import *
#
# if __name__ == '__main__':
# if (len(sys.argv) < 3):
# print >> sys.stderr, "Usage is: printcallstack.py <database name> <call_path_id>"
# raise Exception("Too few arguments")
# dbname = sys.argv[1]
# call_path_id = sys.argv[2]
# db = QSqlDatabase.addDatabase('QPSQL')
# db.setDatabaseName(dbname)
# if not db.open():
# raise Exception("Failed to open database " + dbname + " error: " + db.lastError().text())
# query = QSqlQuery(db)
# print " id ip symbol_id symbol dso_id dso_short_name"
# while call_path_id != 0 and call_path_id != 1:
# ret = query.exec_('SELECT * FROM call_paths_view WHERE id = ' + str(call_path_id))
# if not ret:
# raise Exception("Query failed: " + query.lastError().text())
# if not query.next():
# raise Exception("Query failed")
# print "{0:>6} {1:>10} {2:>9} {3:<30} {4:>6} {5:<30}".format(query.value(0), query.value(1), query.value(2), query.value(3), query.value(4), query.value(5))
# call_path_id = query.value(6)
from PySide.QtSql import *
# Need to access PostgreSQL C library directly to use COPY FROM STDIN
from ctypes import *
libpq = CDLL("libpq.so.5")
PQconnectdb = libpq.PQconnectdb
PQconnectdb.restype = c_void_p
PQfinish = libpq.PQfinish
PQstatus = libpq.PQstatus
PQexec = libpq.PQexec
PQexec.restype = c_void_p
PQresultStatus = libpq.PQresultStatus
PQputCopyData = libpq.PQputCopyData
PQputCopyData.argtypes = [ c_void_p, c_void_p, c_int ]
PQputCopyEnd = libpq.PQputCopyEnd
PQputCopyEnd.argtypes = [ c_void_p, c_void_p ]
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
# These perf imports are not used at present
#from perf_trace_context import *
#from Core import *
perf_db_export_mode = True
perf_db_export_calls = False
def usage():
print >> sys.stderr, "Usage is: export-to-postgresql.py <database name> [<columns>] [<calls>]"
print >> sys.stderr, "where: columns 'all' or 'branches'"
print >> sys.stderr, " calls 'calls' => create calls table"
raise Exception("Too few arguments")
if (len(sys.argv) < 2):
usage()
dbname = sys.argv[1]
if (len(sys.argv) >= 3):
columns = sys.argv[2]
else:
columns = "all"
if columns not in ("all", "branches"):
usage()
branches = (columns == "branches")
if (len(sys.argv) >= 4):
if (sys.argv[3] == "calls"):
perf_db_export_calls = True
else:
usage()
output_dir_name = os.getcwd() + "/" + dbname + "-perf-data"
os.mkdir(output_dir_name)
def do_query(q, s):
if (q.exec_(s)):
return
raise Exception("Query failed: " + q.lastError().text())
print datetime.datetime.today(), "Creating database..."
db = QSqlDatabase.addDatabase('QPSQL')
query = QSqlQuery(db)
db.setDatabaseName('postgres')
db.open()
try:
do_query(query, 'CREATE DATABASE ' + dbname)
except:
os.rmdir(output_dir_name)
raise
query.finish()
query.clear()
db.close()
db.setDatabaseName(dbname)
db.open()
query = QSqlQuery(db)
do_query(query, 'SET client_min_messages TO WARNING')
do_query(query, 'CREATE TABLE selected_events ('
'id bigint NOT NULL,'
'name varchar(80))')
do_query(query, 'CREATE TABLE machines ('
'id bigint NOT NULL,'
'pid integer,'
'root_dir varchar(4096))')
do_query(query, 'CREATE TABLE threads ('
'id bigint NOT NULL,'
'machine_id bigint,'
'process_id bigint,'
'pid integer,'
'tid integer)')
do_query(query, 'CREATE TABLE comms ('
'id bigint NOT NULL,'
'comm varchar(16))')
do_query(query, 'CREATE TABLE comm_threads ('
'id bigint NOT NULL,'
'comm_id bigint,'
'thread_id bigint)')
do_query(query, 'CREATE TABLE dsos ('
'id bigint NOT NULL,'
'machine_id bigint,'
'short_name varchar(256),'
'long_name varchar(4096),'
'build_id varchar(64))')
do_query(query, 'CREATE TABLE symbols ('
'id bigint NOT NULL,'
'dso_id bigint,'
'sym_start bigint,'
'sym_end bigint,'
'binding integer,'
'name varchar(2048))')
do_query(query, 'CREATE TABLE branch_types ('
'id integer NOT NULL,'
'name varchar(80))')
if branches:
do_query(query, 'CREATE TABLE samples ('
'id bigint NOT NULL,'
'evsel_id bigint,'
'machine_id bigint,'
'thread_id bigint,'
'comm_id bigint,'
'dso_id bigint,'
'symbol_id bigint,'
'sym_offset bigint,'
'ip bigint,'
'time bigint,'
'cpu integer,'
'to_dso_id bigint,'
'to_symbol_id bigint,'
'to_sym_offset bigint,'
'to_ip bigint,'
'branch_type integer,'
'in_tx boolean)')
else:
do_query(query, 'CREATE TABLE samples ('
'id bigint NOT NULL,'
'evsel_id bigint,'
'machine_id bigint,'
'thread_id bigint,'
'comm_id bigint,'
'dso_id bigint,'
'symbol_id bigint,'
'sym_offset bigint,'
'ip bigint,'
'time bigint,'
'cpu integer,'
'to_dso_id bigint,'
'to_symbol_id bigint,'
'to_sym_offset bigint,'
'to_ip bigint,'
'period bigint,'
'weight bigint,'
'transaction bigint,'
'data_src bigint,'
'branch_type integer,'
'in_tx boolean)')
if perf_db_export_calls:
do_query(query, 'CREATE TABLE call_paths ('
'id bigint NOT NULL,'
'parent_id bigint,'
'symbol_id bigint,'
'ip bigint)')
do_query(query, 'CREATE TABLE calls ('
'id bigint NOT NULL,'
'thread_id bigint,'
'comm_id bigint,'
'call_path_id bigint,'
'call_time bigint,'
'return_time bigint,'
'branch_count bigint,'
'call_id bigint,'
'return_id bigint,'
'parent_call_path_id bigint,'
'flags integer)')
do_query(query, 'CREATE VIEW machines_view AS '
'SELECT '
'id,'
'pid,'
'root_dir,'
'CASE WHEN id=0 THEN \'unknown\' WHEN pid=-1 THEN \'host\' ELSE \'guest\' END AS host_or_guest'
' FROM machines')
do_query(query, 'CREATE VIEW dsos_view AS '
'SELECT '
'id,'
'machine_id,'
'(SELECT host_or_guest FROM machines_view WHERE id = machine_id) AS host_or_guest,'
'short_name,'
'long_name,'
'build_id'
' FROM dsos')
do_query(query, 'CREATE VIEW symbols_view AS '
'SELECT '
'id,'
'name,'
'(SELECT short_name FROM dsos WHERE id=dso_id) AS dso,'
'dso_id,'
'sym_start,'
'sym_end,'
'CASE WHEN binding=0 THEN \'local\' WHEN binding=1 THEN \'global\' ELSE \'weak\' END AS binding'
' FROM symbols')
do_query(query, 'CREATE VIEW threads_view AS '
'SELECT '
'id,'
'machine_id,'
'(SELECT host_or_guest FROM machines_view WHERE id = machine_id) AS host_or_guest,'
'process_id,'
'pid,'
'tid'
' FROM threads')
do_query(query, 'CREATE VIEW comm_threads_view AS '
'SELECT '
'comm_id,'
'(SELECT comm FROM comms WHERE id = comm_id) AS command,'
'thread_id,'
'(SELECT pid FROM threads WHERE id = thread_id) AS pid,'
'(SELECT tid FROM threads WHERE id = thread_id) AS tid'
' FROM comm_threads')
if perf_db_export_calls:
do_query(query, 'CREATE VIEW call_paths_view AS '
'SELECT '
'c.id,'
'to_hex(c.ip) AS ip,'
'c.symbol_id,'
'(SELECT name FROM symbols WHERE id = c.symbol_id) AS symbol,'
'(SELECT dso_id FROM symbols WHERE id = c.symbol_id) AS dso_id,'
'(SELECT dso FROM symbols_view WHERE id = c.symbol_id) AS dso_short_name,'
'c.parent_id,'
'to_hex(p.ip) AS parent_ip,'
'p.symbol_id AS parent_symbol_id,'
'(SELECT name FROM symbols WHERE id = p.symbol_id) AS parent_symbol,'
'(SELECT dso_id FROM symbols WHERE id = p.symbol_id) AS parent_dso_id,'
'(SELECT dso FROM symbols_view WHERE id = p.symbol_id) AS parent_dso_short_name'
' FROM call_paths c INNER JOIN call_paths p ON p.id = c.parent_id')
do_query(query, 'CREATE VIEW calls_view AS '
'SELECT '
'calls.id,'
'thread_id,'
'(SELECT pid FROM threads WHERE id = thread_id) AS pid,'
'(SELECT tid FROM threads WHERE id = thread_id) AS tid,'
'(SELECT comm FROM comms WHERE id = comm_id) AS command,'
'call_path_id,'
'to_hex(ip) AS ip,'
'symbol_id,'
'(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,'
'call_time,'
'return_time,'
'return_time - call_time AS elapsed_time,'
'branch_count,'
'call_id,'
'return_id,'
'CASE WHEN flags=1 THEN \'no call\' WHEN flags=2 THEN \'no return\' WHEN flags=3 THEN \'no call/return\' ELSE \'\' END AS flags,'
'parent_call_path_id'
' FROM calls INNER JOIN call_paths ON call_paths.id = call_path_id')
do_query(query, 'CREATE VIEW samples_view AS '
'SELECT '
'id,'
'time,'
'cpu,'
'(SELECT pid FROM threads WHERE id = thread_id) AS pid,'
'(SELECT tid FROM threads WHERE id = thread_id) AS tid,'
'(SELECT comm FROM comms WHERE id = comm_id) AS command,'
'(SELECT name FROM selected_events WHERE id = evsel_id) AS event,'
'to_hex(ip) AS ip_hex,'
'(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,'
'sym_offset,'
'(SELECT short_name FROM dsos WHERE id = dso_id) AS dso_short_name,'
'to_hex(to_ip) AS to_ip_hex,'
'(SELECT name FROM symbols WHERE id = to_symbol_id) AS to_symbol,'
'to_sym_offset,'
'(SELECT short_name FROM dsos WHERE id = to_dso_id) AS to_dso_short_name,'
'(SELECT name FROM branch_types WHERE id = branch_type) AS branch_type_name,'
'in_tx'
' FROM samples')
file_header = struct.pack("!11sii", "PGCOPY\n\377\r\n\0", 0, 0)
file_trailer = "\377\377"
def open_output_file(file_name):
path_name = output_dir_name + "/" + file_name
file = open(path_name, "w+")
file.write(file_header)
return file
def close_output_file(file):
file.write(file_trailer)
file.close()
def copy_output_file_direct(file, table_name):
close_output_file(file)
sql = "COPY " + table_name + " FROM '" + file.name + "' (FORMAT 'binary')"
do_query(query, sql)
# Use COPY FROM STDIN because security may prevent postgres from accessing the files directly
def copy_output_file(file, table_name):
conn = PQconnectdb("dbname = " + dbname)
if (PQstatus(conn)):
raise Exception("COPY FROM STDIN PQconnectdb failed")
file.write(file_trailer)
file.seek(0)
sql = "COPY " + table_name + " FROM STDIN (FORMAT 'binary')"
res = PQexec(conn, sql)
if (PQresultStatus(res) != 4):
raise Exception("COPY FROM STDIN PQexec failed")
data = file.read(65536)
while (len(data)):
ret = PQputCopyData(conn, data, len(data))
if (ret != 1):
raise Exception("COPY FROM STDIN PQputCopyData failed, error " + str(ret))
data = file.read(65536)
ret = PQputCopyEnd(conn, None)
if (ret != 1):
raise Exception("COPY FROM STDIN PQputCopyEnd failed, error " + str(ret))
PQfinish(conn)
def remove_output_file(file):
name = file.name
file.close()
os.unlink(name)
evsel_file = open_output_file("evsel_table.bin")
machine_file = open_output_file("machine_table.bin")
thread_file = open_output_file("thread_table.bin")
comm_file = open_output_file("comm_table.bin")
comm_thread_file = open_output_file("comm_thread_table.bin")
dso_file = open_output_file("dso_table.bin")
symbol_file = open_output_file("symbol_table.bin")
branch_type_file = open_output_file("branch_type_table.bin")
sample_file = open_output_file("sample_table.bin")
if perf_db_export_calls:
call_path_file = open_output_file("call_path_table.bin")
call_file = open_output_file("call_table.bin")
def trace_begin():
print datetime.datetime.today(), "Writing to intermediate files..."
# id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs
evsel_table(0, "unknown")
machine_table(0, 0, "unknown")
thread_table(0, 0, 0, -1, -1)
comm_table(0, "unknown")
dso_table(0, 0, "unknown", "unknown", "")
symbol_table(0, 0, 0, 0, 0, "unknown")
sample_table(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
if perf_db_export_calls:
call_path_table(0, 0, 0, 0)
unhandled_count = 0
def trace_end():
print datetime.datetime.today(), "Copying to database..."
copy_output_file(evsel_file, "selected_events")
copy_output_file(machine_file, "machines")
copy_output_file(thread_file, "threads")
copy_output_file(comm_file, "comms")
copy_output_file(comm_thread_file, "comm_threads")
copy_output_file(dso_file, "dsos")
copy_output_file(symbol_file, "symbols")
copy_output_file(branch_type_file, "branch_types")
copy_output_file(sample_file, "samples")
if perf_db_export_calls:
copy_output_file(call_path_file, "call_paths")
copy_output_file(call_file, "calls")
print datetime.datetime.today(), "Removing intermediate files..."
remove_output_file(evsel_file)
remove_output_file(machine_file)
remove_output_file(thread_file)
remove_output_file(comm_file)
remove_output_file(comm_thread_file)
remove_output_file(dso_file)
remove_output_file(symbol_file)
remove_output_file(branch_type_file)
remove_output_file(sample_file)
if perf_db_export_calls:
remove_output_file(call_path_file)
remove_output_file(call_file)
os.rmdir(output_dir_name)
print datetime.datetime.today(), "Adding primary keys"
do_query(query, 'ALTER TABLE selected_events ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE machines ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE threads ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE comms ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE comm_threads ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE dsos ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE symbols ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE branch_types ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE samples ADD PRIMARY KEY (id)')
if perf_db_export_calls:
do_query(query, 'ALTER TABLE call_paths ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE calls ADD PRIMARY KEY (id)')
print datetime.datetime.today(), "Adding foreign keys"
do_query(query, 'ALTER TABLE threads '
'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),'
'ADD CONSTRAINT processfk FOREIGN KEY (process_id) REFERENCES threads (id)')
do_query(query, 'ALTER TABLE comm_threads '
'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),'
'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id)')
do_query(query, 'ALTER TABLE dsos '
'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id)')
do_query(query, 'ALTER TABLE symbols '
'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id)')
do_query(query, 'ALTER TABLE samples '
'ADD CONSTRAINT evselfk FOREIGN KEY (evsel_id) REFERENCES selected_events (id),'
'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),'
'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),'
'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),'
'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id),'
'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id),'
'ADD CONSTRAINT todsofk FOREIGN KEY (to_dso_id) REFERENCES dsos (id),'
'ADD CONSTRAINT tosymbolfk FOREIGN KEY (to_symbol_id) REFERENCES symbols (id)')
if perf_db_export_calls:
do_query(query, 'ALTER TABLE call_paths '
'ADD CONSTRAINT parentfk FOREIGN KEY (parent_id) REFERENCES call_paths (id),'
'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id)')
do_query(query, 'ALTER TABLE calls '
'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),'
'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),'
'ADD CONSTRAINT call_pathfk FOREIGN KEY (call_path_id) REFERENCES call_paths (id),'
'ADD CONSTRAINT callfk FOREIGN KEY (call_id) REFERENCES samples (id),'
'ADD CONSTRAINT returnfk FOREIGN KEY (return_id) REFERENCES samples (id),'
'ADD CONSTRAINT parent_call_pathfk FOREIGN KEY (parent_call_path_id) REFERENCES call_paths (id)')
do_query(query, 'CREATE INDEX pcpid_idx ON calls (parent_call_path_id)')
if (unhandled_count):
print datetime.datetime.today(), "Warning: ", unhandled_count, " unhandled events"
print datetime.datetime.today(), "Done"
def trace_unhandled(event_name, context, event_fields_dict):
global unhandled_count
unhandled_count += 1
def sched__sched_switch(*x):
pass
def evsel_table(evsel_id, evsel_name, *x):
n = len(evsel_name)
fmt = "!hiqi" + str(n) + "s"
value = struct.pack(fmt, 2, 8, evsel_id, n, evsel_name)
evsel_file.write(value)
def machine_table(machine_id, pid, root_dir, *x):
n = len(root_dir)
fmt = "!hiqiii" + str(n) + "s"
value = struct.pack(fmt, 3, 8, machine_id, 4, pid, n, root_dir)
machine_file.write(value)
def thread_table(thread_id, machine_id, process_id, pid, tid, *x):
value = struct.pack("!hiqiqiqiiii", 5, 8, thread_id, 8, machine_id, 8, process_id, 4, pid, 4, tid)
thread_file.write(value)
def comm_table(comm_id, comm_str, *x):
n = len(comm_str)
fmt = "!hiqi" + str(n) + "s"
value = struct.pack(fmt, 2, 8, comm_id, n, comm_str)
comm_file.write(value)
def comm_thread_table(comm_thread_id, comm_id, thread_id, *x):
fmt = "!hiqiqiq"
value = struct.pack(fmt, 3, 8, comm_thread_id, 8, comm_id, 8, thread_id)
comm_thread_file.write(value)
def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x):
n1 = len(short_name)
n2 = len(long_name)
n3 = len(build_id)
fmt = "!hiqiqi" + str(n1) + "si" + str(n2) + "si" + str(n3) + "s"
value = struct.pack(fmt, 5, 8, dso_id, 8, machine_id, n1, short_name, n2, long_name, n3, build_id)
dso_file.write(value)
def symbol_table(symbol_id, dso_id, sym_start, sym_end, binding, symbol_name, *x):
n = len(symbol_name)
fmt = "!hiqiqiqiqiii" + str(n) + "s"
value = struct.pack(fmt, 6, 8, symbol_id, 8, dso_id, 8, sym_start, 8, sym_end, 4, binding, n, symbol_name)
symbol_file.write(value)
def branch_type_table(branch_type, name, *x):
n = len(name)
fmt = "!hiii" + str(n) + "s"
value = struct.pack(fmt, 2, 4, branch_type, n, name)
branch_type_file.write(value)
def sample_table(sample_id, evsel_id, machine_id, thread_id, comm_id, dso_id, symbol_id, sym_offset, ip, time, cpu, to_dso_id, to_symbol_id, to_sym_offset, to_ip, period, weight, transaction, data_src, branch_type, in_tx, *x):
if branches:
value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiiiB", 17, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 4, branch_type, 1, in_tx)
else:
value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiqiqiqiqiiiB", 21, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 8, period, 8, weight, 8, transaction, 8, data_src, 4, branch_type, 1, in_tx)
sample_file.write(value)
def call_path_table(cp_id, parent_id, symbol_id, ip, *x):
fmt = "!hiqiqiqiq"
value = struct.pack(fmt, 4, 8, cp_id, 8, parent_id, 8, symbol_id, 8, ip)
call_path_file.write(value)
def call_return_table(cr_id, thread_id, comm_id, call_path_id, call_time, return_time, branch_count, call_id, return_id, parent_call_path_id, flags, *x):
fmt = "!hiqiqiqiqiqiqiqiqiqiqii"
value = struct.pack(fmt, 11, 8, cr_id, 8, thread_id, 8, comm_id, 8, call_path_id, 8, call_time, 8, return_time, 8, branch_count, 8, call_id, 8, return_id, 8, parent_call_path_id, 4, flags)
call_file.write(value)
|
gpl-2.0
|
indashnet/InDashNet.Open.UN2000
|
android/external/chromium_org/tools/metrics/histograms/find_unmapped_histograms.py
|
56
|
7483
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Scans the Chromium source for histograms that are absent from histograms.xml.
This is a heuristic scan, so a clean run of this script does not guarantee that
all histograms in the Chromium source are properly mapped. Notably, field
trials are entirely ignored by this script.
"""
import commands
import extract_histograms
import logging
import optparse
import os
import re
import sys
ADJACENT_C_STRING_REGEX = re.compile(r"""
(" # Opening quotation mark
[^"]*) # Literal string contents
" # Closing quotation mark
\s* # Any number of spaces
" # Another opening quotation mark
""", re.VERBOSE)
CONSTANT_REGEX = re.compile(r"""
(\w*::)? # Optional namespace
k[A-Z] # Match a constant identifier: 'k' followed by an uppercase letter
\w* # Match the rest of the constant identifier
$ # Make sure there's only the identifier, nothing else
""", re.VERBOSE)
HISTOGRAM_REGEX = re.compile(r"""
UMA_HISTOGRAM # Match the shared prefix for standard UMA histogram macros
\w* # Match the rest of the macro name, e.g. '_ENUMERATION'
\( # Match the opening parenthesis for the macro
\s* # Match any whitespace -- especially, any newlines
([^,]*) # Capture the first parameter to the macro
, # Match the comma that delineates the first parameter
""", re.VERBOSE)
class DirectoryNotFoundException(Exception):
"""Base class to distinguish locally defined exceptions from standard ones."""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def changeWorkingDirectory(target_directory):
"""Changes the working directory to the given |target_directory|, which
defaults to the root of the Chromium checkout.
Returns:
None
Raises:
DirectoryNotFoundException if the target directory cannot be found.
"""
working_directory = os.getcwd()
pos = working_directory.find(target_directory)
if pos < 0:
raise DirectoryNotFoundException('Could not find root directory "' +
target_directory + '". ' +
'Please run this script within your ' +
'Chromium checkout.')
os.chdir(working_directory[:pos + len(target_directory)])
def collapseAdjacentCStrings(string):
"""Collapses any adjacent C strings into a single string.
Useful to re-combine strings that were split across multiple lines to satisfy
the 80-col restriction.
Args:
string: The string to recombine, e.g. '"Foo"\n "bar"'
Returns:
The collapsed string, e.g. "Foobar" for an input of '"Foo"\n "bar"'
"""
while True:
collapsed = ADJACENT_C_STRING_REGEX.sub(r'\1', string, count=1)
if collapsed == string:
return collapsed
string = collapsed
def logNonLiteralHistogram(filename, histogram):
"""Logs a statement warning about a non-literal histogram name found in the
Chromium source.
Filters out known acceptable exceptions.
Args:
filename: The filename for the file containing the histogram, e.g.
'chrome/browser/memory_details.cc'
histogram: The expression that evaluates to the name of the histogram, e.g.
'"FakeHistogram" + variant'
Returns:
None
"""
# Ignore histogram macros, which typically contain backslashes so that they
# can be formatted across lines.
if '\\' in histogram:
return
# Field trials are unique within a session, so are effectively constants.
if histogram.startswith('base::FieldTrial::MakeName'):
return
# Ignore histogram names that have been pulled out into C++ constants.
if CONSTANT_REGEX.match(histogram):
return
# TODO(isherman): This is still a little noisy... needs further filtering to
# reduce the noise.
logging.warning('%s contains non-literal histogram name <%s>', filename,
histogram)
def readChromiumHistograms():
"""Searches the Chromium source for all histogram names.
Also prints warnings for any invocations of the UMA_HISTOGRAM_* macros with
names that might vary during a single run of the app.
Returns:
A set cotaining any found literal histogram names.
"""
logging.info('Scanning Chromium source for histograms...')
# Use git grep to find all invocations of the UMA_HISTOGRAM_* macros.
# Examples:
# 'path/to/foo.cc:420: UMA_HISTOGRAM_COUNTS_100("FooGroup.FooName",'
# 'path/to/bar.cc:632: UMA_HISTOGRAM_ENUMERATION('
locations = commands.getoutput('git gs UMA_HISTOGRAM').split('\n')
filenames = set([location.split(':')[0] for location in locations])
histograms = set()
for filename in filenames:
contents = ''
with open(filename, 'r') as f:
contents = f.read()
matches = set(HISTOGRAM_REGEX.findall(contents))
for histogram in matches:
histogram = collapseAdjacentCStrings(histogram)
# Must begin and end with a quotation mark.
if histogram[0] != '"' or histogram[-1] != '"':
logNonLiteralHistogram(filename, histogram)
continue
# Must not include any quotation marks other than at the beginning or end.
histogram_stripped = histogram.strip('"')
if '"' in histogram_stripped:
logNonLiteralHistogram(filename, histogram)
continue
histograms.add(histogram_stripped)
return histograms
def readXmlHistograms(histograms_file_location):
"""Parses all histogram names from histograms.xml.
Returns:
A set cotaining the parsed histogram names.
"""
logging.info('Reading histograms from %s...' % histograms_file_location)
histograms = extract_histograms.ExtractHistograms(histograms_file_location)
return set(extract_histograms.ExtractNames(histograms))
def main():
# Parse command line options
parser = optparse.OptionParser()
parser.add_option(
'--root-directory', dest='root_directory', default='src',
help='scan within DIRECTORY for histograms [optional, defaults to "src/"]',
metavar='DIRECTORY')
parser.add_option(
'--histograms-file', dest='histograms_file_location',
default='tools/metrics/histograms/histograms.xml',
help='read histogram definitions from FILE (relative to --root-directory) '
'[optional, defaults to "tools/histograms/histograms.xml"]',
metavar='FILE')
(options, args) = parser.parse_args()
if args:
parser.print_help()
sys.exit(1)
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
try:
changeWorkingDirectory(options.root_directory)
except DirectoryNotFoundException as e:
logging.error(e)
sys.exit(1)
chromium_histograms = readChromiumHistograms()
xml_histograms = readXmlHistograms(options.histograms_file_location)
unmapped_histograms = sorted(chromium_histograms - xml_histograms)
if len(unmapped_histograms):
logging.info('')
logging.info('')
logging.info('Histograms in Chromium but not in %s:' %
options.histograms_file_location)
logging.info('-------------------------------------------------')
for histogram in unmapped_histograms:
logging.info(' %s', histogram)
else:
logging.info('Success! No unmapped histograms found.')
if __name__ == '__main__':
main()
|
apache-2.0
|
cpennington/edx-platform
|
openedx/core/djangoapps/user_authn/views/tests/test_reset_password.py
|
1
|
23912
|
"""
Test the various password reset flows
"""
import json
import re
import unicodedata
import unittest
import ddt
from django.conf import settings
from django.contrib.auth.hashers import UNUSABLE_PASSWORD_PREFIX, make_password
from django.contrib.auth.models import AnonymousUser, User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.auth.views import INTERNAL_RESET_SESSION_TOKEN, PasswordResetConfirmView
from django.contrib.sessions.middleware import SessionMiddleware
from django.core import mail
from django.core.cache import cache
from django.http import Http404
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.http import int_to_base36
from mock import Mock, patch
from oauth2_provider import models as dot_models
from six.moves import range
from openedx.core.djangoapps.oauth_dispatch.tests import factories as dot_factories
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangolib.testing.utils import skip_unless_lms
from openedx.core.djangoapps.user_api.config.waffle import PREVENT_AUTH_USER_WRITES, SYSTEM_MAINTENANCE_MSG, waffle
from openedx.core.djangoapps.user_api.models import UserRetirementRequest
from openedx.core.djangoapps.user_api.tests.test_views import UserAPITestCase
from openedx.core.djangoapps.user_api.accounts import EMAIL_MAX_LENGTH, EMAIL_MIN_LENGTH
from openedx.core.djangoapps.user_authn.views.password_reset import (
SETTING_CHANGE_INITIATED, password_reset,
PasswordResetConfirmWrapper)
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from student.tests.factories import UserFactory
from student.tests.test_configuration_overrides import fake_get_value
from student.tests.test_email import mock_render_to_string
from util.password_policy_validators import create_validator_config
from util.testing import EventTestMixin
def process_request(request):
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
@unittest.skipUnless(
settings.ROOT_URLCONF == "lms.urls",
"reset password tests should only run in LMS"
)
@ddt.ddt
class ResetPasswordTests(EventTestMixin, CacheIsolationTestCase):
"""
Tests that clicking reset password sends email, and doesn't activate the user
"""
request_factory = RequestFactory()
ENABLED_CACHES = ['default']
def setUp(self): # pylint: disable=arguments-differ
super(ResetPasswordTests, self).setUp('openedx.core.djangoapps.user_authn.views.password_reset.tracker')
self.user = UserFactory.create()
self.user.is_active = False
self.user.save()
self.token = default_token_generator.make_token(self.user)
self.uidb36 = int_to_base36(self.user.id)
self.user_bad_passwd = UserFactory.create()
self.user_bad_passwd.is_active = False
self.user_bad_passwd.password = UNUSABLE_PASSWORD_PREFIX
self.user_bad_passwd.save()
@patch(
'openedx.core.djangoapps.user_authn.views.password_reset.render_to_string',
Mock(side_effect=mock_render_to_string, autospec=True)
)
def test_user_bad_password_reset(self):
"""
Tests password reset behavior for user with password marked UNUSABLE_PASSWORD_PREFIX
"""
bad_pwd_req = self.request_factory.post('/password_reset/', {'email': self.user_bad_passwd.email})
bad_pwd_resp = password_reset(bad_pwd_req)
# If they've got an unusable password, we return a successful response code
self.assertEqual(bad_pwd_resp.status_code, 200)
obj = json.loads(bad_pwd_resp.content.decode('utf-8'))
self.assertEqual(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
self.assert_no_events_were_emitted()
@patch(
'openedx.core.djangoapps.user_authn.views.password_reset.render_to_string',
Mock(side_effect=mock_render_to_string, autospec=True)
)
def test_nonexist_email_password_reset(self):
"""
Now test the exception cases with of reset_password called with invalid email.
"""
bad_email_req = self.request_factory.post('/password_reset/', {'email': self.user.email + "makeItFail"})
bad_email_resp = password_reset(bad_email_req)
# Note: even if the email is bad, we return a successful response code
# This prevents someone potentially trying to "brute-force" find out which
# emails are and aren't registered with edX
self.assertEqual(bad_email_resp.status_code, 200)
obj = json.loads(bad_email_resp.content.decode('utf-8'))
self.assertEqual(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
self.assert_no_events_were_emitted()
@patch(
'openedx.core.djangoapps.user_authn.views.password_reset.render_to_string',
Mock(side_effect=mock_render_to_string, autospec=True)
)
def test_password_reset_ratelimited(self):
"""
Try (and fail) resetting password 30 times in a row on an non-existant email address
"""
cache.clear()
for i in range(30):
good_req = self.request_factory.post('/password_reset/', {
'email': 'thisdoesnotexist{0}@foo.com'.format(i)
})
good_resp = password_reset(good_req)
self.assertEqual(good_resp.status_code, 200)
# then the rate limiter should kick in and give a HttpForbidden response
bad_req = self.request_factory.post('/password_reset/', {'email': '[email protected]'})
bad_resp = password_reset(bad_req)
self.assertEqual(bad_resp.status_code, 403)
self.assert_no_events_were_emitted()
cache.clear()
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@ddt.data(('plain_text', "You're receiving this e-mail because you requested a password reset"),
('html', "You're receiving this e-mail because you requested a password reset"))
@ddt.unpack
def test_reset_password_email(self, body_type, expected_output):
"""Tests contents of reset password email, and that user is not active"""
good_req = self.request_factory.post('/password_reset/', {'email': self.user.email})
good_req.user = self.user
good_req.site = Mock(domain='example.com')
dot_application = dot_factories.ApplicationFactory(user=self.user)
dot_access_token = dot_factories.AccessTokenFactory(user=self.user, application=dot_application)
dot_factories.RefreshTokenFactory(user=self.user, application=dot_application, access_token=dot_access_token)
good_resp = password_reset(good_req)
self.assertEqual(good_resp.status_code, 200)
self.assertFalse(dot_models.AccessToken.objects.filter(user=self.user).exists())
self.assertFalse(dot_models.RefreshToken.objects.filter(user=self.user).exists())
obj = json.loads(good_resp.content.decode('utf-8'))
self.assertTrue(obj['success'])
self.assertIn('e-mailed you instructions for setting your password', obj['value'])
from_email = configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
sent_message = mail.outbox[0]
bodies = {
'plain_text': sent_message.body,
'html': sent_message.alternatives[0][0],
}
body = bodies[body_type]
self.assertIn("Password reset", sent_message.subject)
self.assertIn(expected_output, body)
self.assertEqual(sent_message.from_email, from_email)
self.assertEqual(len(sent_message.to), 1)
self.assertIn(self.user.email, sent_message.to)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None,
)
# Test that the user is not active
self.user = User.objects.get(pk=self.user.pk)
self.assertFalse(self.user.is_active)
self.assertIn('password_reset_confirm/', body)
re.search(r'password_reset_confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/', body).groupdict()
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@ddt.data((False, 'http://'), (True, 'https://'))
@ddt.unpack
def test_reset_password_email_https(self, is_secure, protocol):
"""
Tests that the right url protocol is included in the reset password link
"""
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.site = Mock(domain='example.com')
req.is_secure = Mock(return_value=is_secure)
req.user = self.user
password_reset(req)
sent_message = mail.outbox[0]
msg = sent_message.body
expected_msg = "Please go to the following page and choose a new password:\n\n" + protocol
self.assertIn(expected_msg, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@ddt.data(('Crazy Awesome Site', 'Crazy Awesome Site'), ('edX', 'edX'))
@ddt.unpack
def test_reset_password_email_site(self, site_name, platform_name):
"""
Tests that the right url domain and platform name is included in
the reset password email
"""
with patch("django.conf.settings.PLATFORM_NAME", platform_name):
with patch("django.conf.settings.SITE_NAME", site_name):
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.user = self.user
req.site = Mock(domain='example.com')
password_reset(req)
sent_message = mail.outbox[0]
msg = sent_message.body
reset_msg = u"you requested a password reset for your user account at {}"
reset_msg = reset_msg.format(site_name)
self.assertIn(reset_msg, msg)
sign_off = u"The {} Team".format(platform_name)
self.assertIn(sign_off, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch("openedx.core.djangoapps.site_configuration.helpers.get_value", fake_get_value)
@ddt.data('plain_text', 'html')
def test_reset_password_email_configuration_override(self, body_type):
"""
Tests that the right url domain and platform name is included in
the reset password email
"""
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.get_host = Mock(return_value=None)
req.site = Mock(domain='example.com')
req.user = self.user
with patch('crum.get_current_request', return_value=req):
password_reset(req)
sent_message = mail.outbox[0]
bodies = {
'plain_text': sent_message.body,
'html': sent_message.alternatives[0][0],
}
body = bodies[body_type]
reset_msg = u"you requested a password reset for your user account at {}".format(
fake_get_value('PLATFORM_NAME')
)
self.assertIn(reset_msg, body)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
self.assertEqual(sent_message.from_email, "[email protected]")
@ddt.data(
('invalidUid', 'invalid_token'),
(None, 'invalid_token'),
('invalidUid', None),
)
@ddt.unpack
def test_reset_password_bad_token(self, uidb36, token):
"""
Tests bad token and uidb36 in password reset
"""
if uidb36 is None:
uidb36 = self.uidb36
if token is None:
token = self.token
bad_request = self.request_factory.get(
reverse(
"password_reset_confirm",
kwargs={"uidb36": uidb36, "token": token}
)
)
process_request(bad_request)
bad_request.user = AnonymousUser()
PasswordResetConfirmWrapper.as_view()(bad_request, uidb36=uidb36, token=token)
self.user = User.objects.get(pk=self.user.pk)
self.assertFalse(self.user.is_active)
def test_reset_password_good_token(self):
"""
Tests good token and uidb36 in password reset
"""
url = reverse(
"password_reset_confirm",
kwargs={"uidb36": self.uidb36, "token": self.token}
)
good_reset_req = self.request_factory.get(url)
process_request(good_reset_req)
good_reset_req.user = self.user
PasswordResetConfirmWrapper.as_view()(good_reset_req, uidb36=self.uidb36, token=self.token)
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
def test_reset_password_good_token_with_anonymous_user(self):
"""
Tests good token and uidb36 in password reset for anonymous user
"""
url = reverse(
"password_reset_confirm",
kwargs={"uidb36": self.uidb36, "token": self.token}
)
good_reset_req = self.request_factory.get(url)
process_request(good_reset_req)
good_reset_req.user = AnonymousUser()
PasswordResetConfirmWrapper.as_view()(good_reset_req, uidb36=self.uidb36, token=self.token)
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
def test_password_reset_fail(self):
"""
Tests that if we provide mismatched passwords, user is not marked as active.
"""
self.assertFalse(self.user.is_active)
url = reverse(
'password_reset_confirm',
kwargs={'uidb36': self.uidb36, 'token': self.token}
)
request_params = {'new_password1': 'password1', 'new_password2': 'password2'}
confirm_request = self.request_factory.post(url, data=request_params)
process_request(confirm_request)
confirm_request.user = self.user
# Make a password reset request with mismatching passwords.
resp = PasswordResetConfirmWrapper.as_view()(confirm_request, uidb36=self.uidb36, token=self.token)
# Verify the response status code is: 200 with password reset fail and also verify that
# the user is not marked as active.
self.assertEqual(resp.status_code, 200)
self.assertFalse(User.objects.get(pk=self.user.pk).is_active)
def test_password_reset_retired_user_fail(self):
"""
Tests that if a retired user attempts to reset their password, it fails.
"""
self.assertFalse(self.user.is_active)
# Retire the user.
UserRetirementRequest.create_retirement_request(self.user)
url = reverse(
'password_reset_confirm',
kwargs={'uidb36': self.uidb36, 'token': self.token}
)
reset_req = self.request_factory.get(url)
reset_req.user = self.user
resp = PasswordResetConfirmWrapper.as_view()(reset_req, uidb36=self.uidb36, token=self.token)
# Verify the response status code is: 200 with password reset fail and also verify that
# the user is not marked as active.
self.assertEqual(resp.status_code, 200)
self.assertFalse(User.objects.get(pk=self.user.pk).is_active)
def test_password_reset_prevent_auth_user_writes(self):
with waffle().override(PREVENT_AUTH_USER_WRITES, True):
url = reverse(
"password_reset_confirm",
kwargs={"uidb36": self.uidb36, "token": self.token}
)
for request in [self.request_factory.get(url), self.request_factory.post(url)]:
request.user = self.user
response = PasswordResetConfirmWrapper.as_view()(request, uidb36=self.uidb36, token=self.token)
assert response.context_data['err_msg'] == SYSTEM_MAINTENANCE_MSG
self.user.refresh_from_db()
assert not self.user.is_active
def test_password_reset_normalize_password(self):
# pylint: disable=anomalous-unicode-escape-in-string
"""
Tests that if we provide a not properly normalized password, it is saved using our normalization
method of NFKC.
In this test, the input password is u'p\u212bssword'. It should be normalized to u'p\xc5ssword'
"""
url = reverse(
"password_reset_confirm",
kwargs={"uidb36": self.uidb36, "token": self.token}
)
password = u'p\u212bssword'
request_params = {'new_password1': password, 'new_password2': password}
confirm_request = self.request_factory.post(url, data=request_params)
process_request(confirm_request)
confirm_request.session[INTERNAL_RESET_SESSION_TOKEN] = self.token
confirm_request.user = self.user
__ = PasswordResetConfirmWrapper.as_view()(confirm_request, uidb36=self.uidb36, token=self.token)
user = User.objects.get(pk=self.user.pk)
salt_val = user.password.split('$')[1]
expected_user_password = make_password(unicodedata.normalize('NFKC', u'p\u212bssword'), salt_val)
self.assertEqual(expected_user_password, user.password)
@override_settings(AUTH_PASSWORD_VALIDATORS=[
create_validator_config('util.password_policy_validators.MinimumLengthValidator', {'min_length': 2}),
create_validator_config('util.password_policy_validators.MaximumLengthValidator', {'max_length': 10})
])
@ddt.data(
{
'password': '1',
'error_message': 'This password is too short. It must contain at least 2 characters.',
},
{
'password': '01234567891',
'error_message': 'This password is too long. It must contain no more than 10 characters.',
}
)
def test_password_reset_with_invalid_length(self, password_dict):
"""
Tests that if we provide password characters less then PASSWORD_MIN_LENGTH,
or more than PASSWORD_MAX_LENGTH, password reset will fail with error message.
"""
url = reverse(
'password_reset_confirm',
kwargs={'uidb36': self.uidb36, 'token': self.token}
)
request_params = {'new_password1': password_dict['password'], 'new_password2': password_dict['password']}
confirm_request = self.request_factory.post(url, data=request_params)
confirm_request.user = self.user
# Make a password reset request with minimum/maximum passwords characters.
response = PasswordResetConfirmWrapper.as_view()(confirm_request, uidb36=self.uidb36, token=self.token)
self.assertEqual(response.context_data['err_msg'], password_dict['error_message'])
@patch.object(PasswordResetConfirmView, 'dispatch')
@patch("openedx.core.djangoapps.site_configuration.helpers.get_value", fake_get_value)
def test_reset_password_good_token_configuration_override(self, reset_confirm):
"""
Tests password reset confirmation page for site configuration override.
"""
url = reverse(
"password_reset_confirm",
kwargs={"uidb36": self.uidb36, "token": self.token}
)
good_reset_req = self.request_factory.get(url)
process_request(good_reset_req)
good_reset_req.user = self.user
PasswordResetConfirmWrapper.as_view()(good_reset_req, uidb36=self.uidb36, token=self.token)
confirm_kwargs = reset_confirm.call_args[1]
self.assertEqual(confirm_kwargs['extra_context']['platform_name'], 'Fake University')
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@ddt.data('Crazy Awesome Site', 'edX')
def test_reset_password_email_subject(self, platform_name):
"""
Tests that the right platform name is included in
the reset password email subject
"""
with patch("django.conf.settings.PLATFORM_NAME", platform_name):
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.user = self.user
req.site = Mock(domain='example.com')
password_reset(req)
sent_message = mail.outbox[0]
subj = sent_message.subject
self.assertIn(platform_name, subj)
def test_reset_password_with_other_user_link(self):
"""
Tests that user should not be able to reset password through other user's token
"""
reset_url = reverse(
"password_reset_confirm",
kwargs={"uidb36": self.uidb36, "token": self.token}
)
reset_request = self.request_factory.get(reset_url)
reset_request.user = UserFactory.create()
self.assertRaises(Http404, PasswordResetConfirmWrapper.as_view(), reset_request, uidb36=self.uidb36,
token=self.token)
@ddt.ddt
@skip_unless_lms
class PasswordResetViewTest(UserAPITestCase):
"""Tests of the user API's password reset endpoint. """
def setUp(self):
super(PasswordResetViewTest, self).setUp()
self.url = reverse("user_api_password_reset")
@ddt.data("get", "post")
def test_auth_disabled(self, method):
self.assertAuthDisabled(method, self.url)
def test_allowed_methods(self):
self.assertAllowedMethods(self.url, ["GET", "HEAD", "OPTIONS"])
def test_put_not_allowed(self):
response = self.client.put(self.url)
self.assertHttpMethodNotAllowed(response)
def test_delete_not_allowed(self):
response = self.client.delete(self.url)
self.assertHttpMethodNotAllowed(response)
def test_patch_not_allowed(self):
response = self.client.patch(self.url)
self.assertHttpMethodNotAllowed(response)
def test_password_reset_form(self):
# Retrieve the password reset form
response = self.client.get(self.url, content_type="application/json")
self.assertHttpOK(response)
# Verify that the form description matches what we expect
form_desc = json.loads(response.content.decode('utf-8'))
self.assertEqual(form_desc["method"], "post")
self.assertEqual(form_desc["submit_url"], reverse("password_change_request"))
self.assertEqual(form_desc["fields"], [
{
"name": "email",
"defaultValue": "",
"type": "email",
"required": True,
"label": "Email",
"placeholder": "[email protected]",
"instructions": u"The email address you used to register with {platform_name}".format(
platform_name=settings.PLATFORM_NAME
),
"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
"errorMessages": {},
"supplementalText": "",
"supplementalLink": "",
}
])
|
agpl-3.0
|
lpsinger/healpy
|
healpy/test/test_pixelfunc.py
|
5
|
8263
|
from ..pixelfunc import *
from .._query_disc import boundaries
from .._pixelfunc import ringinfo, pix2ring, isnsideok
import numpy as np
import unittest
class TestPixelFunc(unittest.TestCase):
def setUp(self):
# data fixture
self.theta0 = [1.52911759, 0.78550497, 1.57079633, 0.05103658, 3.09055608]
self.phi0 = [0.0, 0.78539816, 1.61988371, 0.78539816, 0.78539816]
self.lon0 = np.degrees(self.phi0)
self.lat0 = 90.0 - np.degrees(self.theta0)
def test_nside2npix(self):
self.assertEqual(nside2npix(512), 3145728)
self.assertEqual(nside2npix(1024), 12582912)
def test_nside2resol(self):
self.assertAlmostEqual(nside2resol(512, arcmin=True), 6.87097282363)
self.assertAlmostEqual(nside2resol(1024, arcmin=True), 3.43548641181)
def test_max_pixrad(self):
self.assertAlmostEqual(max_pixrad(512), 2.0870552355e-03)
self.assertAlmostEqual(
max_pixrad(512, degrees=True), np.rad2deg(2.0870552355e-03)
)
def test_nside2pixarea(self):
self.assertAlmostEqual(nside2pixarea(512), 3.9947416351188569e-06)
def test_ang2pix_ring(self):
# ensure nside = 1 << 23 is correctly calculated
# by comparing the original theta phi are restored.
# NOTE: nside needs to be sufficiently large!
id = ang2pix(1048576 * 8, self.theta0, self.phi0, nest=False)
theta1, phi1 = pix2ang(1048576 * 8, id, nest=False)
np.testing.assert_array_almost_equal(theta1, self.theta0)
np.testing.assert_array_almost_equal(phi1, self.phi0)
def test_ang2pix_ring_outofrange(self):
# Healpy_Base2 works up to nside = 2**29.
# Check that a ValueError is raised for nside = 2**30.
self.assertRaises(
ValueError, ang2pix, 1 << 30, self.theta0, self.phi0, nest=False
)
def test_ang2pix_nest(self):
# ensure nside = 1 << 23 is correctly calculated
# by comparing the original theta phi are restored.
# NOTE: nside needs to be sufficiently large!
# NOTE: with Healpy_Base this will fail because nside
# is limited to 1 << 13 with Healpy_Base.
id = ang2pix(1048576 * 8, self.theta0, self.phi0, nest=True)
theta1, phi1 = pix2ang(1048576 * 8, id, nest=True)
np.testing.assert_array_almost_equal(theta1, self.theta0)
np.testing.assert_array_almost_equal(phi1, self.phi0)
self.assertTrue(np.allclose(theta1, self.theta0))
self.assertTrue(np.allclose(phi1, self.phi0))
def test_ang2pix_nest_outofrange_doesntcrash(self):
# Healpy_Base2 works up to nside = 2**29.
# Check that a ValueError is raised for nside = 2**30.
self.assertRaises(
ValueError, ang2pix, 1 << 30, self.theta0, self.phi0, nest=False
)
def test_ang2pix_negative_theta(self):
self.assertRaises(ValueError, ang2pix, 32, -1, 0)
def test_ang2pix_lonlat(self):
# Need to decrease the precision of the check because deg not radians
id = ang2pix(1048576 * 8, self.lon0, self.lat0, nest=False, lonlat=True)
lon1, lat1 = pix2ang(1048576 * 8, id, nest=False, lonlat=True)
np.testing.assert_array_almost_equal(lon1, self.lon0, decimal=5)
np.testing.assert_array_almost_equal(lat1, self.lat0, decimal=5)
# Now test nested
id = ang2pix(1048576 * 8, self.theta0, self.phi0, nest=True)
theta1, phi1 = pix2ang(1048576 * 8, id, nest=True)
np.testing.assert_array_almost_equal(theta1, self.theta0)
np.testing.assert_array_almost_equal(phi1, self.phi0)
def test_vec2pix_lonlat(self):
# Need to decrease the precision of the check because deg not radians
vec = ang2vec(self.lon0, self.lat0, lonlat=True)
lon1, lat1 = vec2ang(vec, lonlat=True)
np.testing.assert_array_almost_equal(lon1, self.lon0, decimal=5)
np.testing.assert_array_almost_equal(lat1, self.lat0, decimal=5)
def test_get_interp_val_lonlat(self):
m = np.arange(12.0)
val0 = get_interp_val(m, self.theta0, self.phi0)
val1 = get_interp_val(m, self.lon0, self.lat0, lonlat=True)
np.testing.assert_array_almost_equal(val0, val1)
def test_get_interp_weights(self):
p0, w0 = (np.array([0, 1, 4, 5]), np.array([1.0, 0.0, 0.0, 0.0]))
# phi not specified, theta assumed to be pixel
p1, w1 = get_interp_weights(1, 0)
np.testing.assert_array_almost_equal(p0, p1)
np.testing.assert_array_almost_equal(w0, w1)
# If phi is not specified, lonlat should do nothing
p1, w1 = get_interp_weights(1, 0, lonlat=True)
np.testing.assert_array_almost_equal(p0, p1)
np.testing.assert_array_almost_equal(w0, w1)
p0, w0 = (np.array([1, 2, 3, 0]), np.array([0.25, 0.25, 0.25, 0.25]))
p1, w1 = get_interp_weights(1, 0, 0)
np.testing.assert_array_almost_equal(p0, p1)
np.testing.assert_array_almost_equal(w0, w1)
p1, w1 = get_interp_weights(1, 0, 90, lonlat=True)
np.testing.assert_array_almost_equal(p0, p1)
np.testing.assert_array_almost_equal(w0, w1)
def test_get_all_neighbours(self):
ipix0 = np.array([8, 4, 0, -1, 1, 6, 9, -1])
ipix1 = get_all_neighbours(1, np.pi / 2, np.pi / 2)
ipix2 = get_all_neighbours(1, 90, 0, lonlat=True)
np.testing.assert_array_almost_equal(ipix0, ipix1)
np.testing.assert_array_almost_equal(ipix0, ipix2)
def test_fit_dipole(self):
nside = 32
npix = nside2npix(nside)
d = [0.3, 0.5, 0.2]
vec = np.transpose(pix2vec(nside, np.arange(npix)))
signal = np.dot(vec, d)
mono, dipole = fit_dipole(signal)
self.assertAlmostEqual(mono, 0.0)
self.assertAlmostEqual(d[0], dipole[0])
self.assertAlmostEqual(d[1], dipole[1])
self.assertAlmostEqual(d[2], dipole[2])
def test_boundaries(self):
"""Test whether the boundary shapes look sane"""
for lgNside in range(1, 5):
nside = 1 << lgNside
for pix in range(nside2npix(nside)):
for res in range(1, 50, 7):
num = 4 * res # Expected number of points
for nest in (True, False):
points = boundaries(nside, pix, res, nest=nest)
self.assertTrue(points.shape == (3, num))
dist = np.linalg.norm(
points[:, : num - 1] - points[:, 1:]
) # distance between points
self.assertTrue((dist != 0).all())
dmin = np.min(dist)
dmax = np.max(dist)
self.assertTrue(dmax / dmin <= 2.0)
def test_ring(self):
for lgNside in range(1, 5):
nside = 1 << lgNside
numPix = nside2npix(nside)
numRings = 4 * nside - 1 # Expected number of rings
for nest in (True, False):
pix = np.arange(numPix, dtype=np.int64)
ring = pix2ring(nside, pix, nest=nest)
self.assertTrue(pix.shape == ring.shape)
self.assertTrue(len(set(ring)) == numRings)
if not nest:
first = ring[: numPix - 1]
second = ring[1:]
self.assertTrue(
np.logical_or(first == second, first == second - 1).all()
)
def test_accept_ma_allows_only_keywords(self):
""" Test whether the accept_ma wrapper accepts calls using only keywords."""
ma = np.zeros(12 * 16 ** 2)
try:
ud_grade(map_in=ma, nside_out=32)
except IndexError:
self.fail("IndexError raised")
def test_isnsideok(self):
""" Test the isnsideok."""
self.assertTrue(isnsideok(nside=1, nest=False))
self.assertTrue(isnsideok(nside=16, nest=True))
self.assertTrue(not isnsideok(nside=-16, nest=True))
self.assertTrue(not isnsideok(nside=-16, nest=False))
self.assertTrue(not isnsideok(nside=13, nest=True))
if __name__ == "__main__":
unittest.main()
|
gpl-2.0
|
rcharp/toyota-flask
|
venv/lib/python2.7/site-packages/markupsafe/tests.py
|
674
|
6107
|
# -*- coding: utf-8 -*-
import gc
import sys
import unittest
from markupsafe import Markup, escape, escape_silent
from markupsafe._compat import text_type
class MarkupTestCase(unittest.TestCase):
def test_adding(self):
# adding two strings should escape the unsafe one
unsafe = '<script type="application/x-some-script">alert("foo");</script>'
safe = Markup('<em>username</em>')
assert unsafe + safe == text_type(escape(unsafe)) + text_type(safe)
def test_string_interpolation(self):
# string interpolations are safe to use too
assert Markup('<em>%s</em>') % '<bad user>' == \
'<em><bad user></em>'
assert Markup('<em>%(username)s</em>') % {
'username': '<bad user>'
} == '<em><bad user></em>'
assert Markup('%i') % 3.14 == '3'
assert Markup('%.2f') % 3.14 == '3.14'
def test_type_behavior(self):
# an escaped object is markup too
assert type(Markup('foo') + 'bar') is Markup
# and it implements __html__ by returning itself
x = Markup("foo")
assert x.__html__() is x
def test_html_interop(self):
# it also knows how to treat __html__ objects
class Foo(object):
def __html__(self):
return '<em>awesome</em>'
def __unicode__(self):
return 'awesome'
__str__ = __unicode__
assert Markup(Foo()) == '<em>awesome</em>'
assert Markup('<strong>%s</strong>') % Foo() == \
'<strong><em>awesome</em></strong>'
def test_tuple_interpol(self):
self.assertEqual(Markup('<em>%s:%s</em>') % (
'<foo>',
'<bar>',
), Markup(u'<em><foo>:<bar></em>'))
def test_dict_interpol(self):
self.assertEqual(Markup('<em>%(foo)s</em>') % {
'foo': '<foo>',
}, Markup(u'<em><foo></em>'))
self.assertEqual(Markup('<em>%(foo)s:%(bar)s</em>') % {
'foo': '<foo>',
'bar': '<bar>',
}, Markup(u'<em><foo>:<bar></em>'))
def test_escaping(self):
# escaping and unescaping
assert escape('"<>&\'') == '"<>&''
assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
assert Markup("<test>").unescape() == "<test>"
def test_formatting(self):
for actual, expected in (
(Markup('%i') % 3.14, '3'),
(Markup('%.2f') % 3.14159, '3.14'),
(Markup('%s %s %s') % ('<', 123, '>'), '< 123 >'),
(Markup('<em>{awesome}</em>').format(awesome='<awesome>'),
'<em><awesome></em>'),
(Markup('{0[1][bar]}').format([0, {'bar': '<bar/>'}]),
'<bar/>'),
(Markup('{0[1][bar]}').format([0, {'bar': Markup('<bar/>')}]),
'<bar/>')):
assert actual == expected, "%r should be %r!" % (actual, expected)
# This is new in 2.7
if sys.version_info >= (2, 7):
def test_formatting_empty(self):
formatted = Markup('{}').format(0)
assert formatted == Markup('0')
def test_custom_formatting(self):
class HasHTMLOnly(object):
def __html__(self):
return Markup('<foo>')
class HasHTMLAndFormat(object):
def __html__(self):
return Markup('<foo>')
def __html_format__(self, spec):
return Markup('<FORMAT>')
assert Markup('{0}').format(HasHTMLOnly()) == Markup('<foo>')
assert Markup('{0}').format(HasHTMLAndFormat()) == Markup('<FORMAT>')
def test_complex_custom_formatting(self):
class User(object):
def __init__(self, id, username):
self.id = id
self.username = username
def __html_format__(self, format_spec):
if format_spec == 'link':
return Markup('<a href="/user/{0}">{1}</a>').format(
self.id,
self.__html__(),
)
elif format_spec:
raise ValueError('Invalid format spec')
return self.__html__()
def __html__(self):
return Markup('<span class=user>{0}</span>').format(self.username)
user = User(1, 'foo')
assert Markup('<p>User: {0:link}').format(user) == \
Markup('<p>User: <a href="/user/1"><span class=user>foo</span></a>')
def test_all_set(self):
import markupsafe as markup
for item in markup.__all__:
getattr(markup, item)
def test_escape_silent(self):
assert escape_silent(None) == Markup()
assert escape(None) == Markup(None)
assert escape_silent('<foo>') == Markup(u'<foo>')
def test_splitting(self):
self.assertEqual(Markup('a b').split(), [
Markup('a'),
Markup('b')
])
self.assertEqual(Markup('a b').rsplit(), [
Markup('a'),
Markup('b')
])
self.assertEqual(Markup('a\nb').splitlines(), [
Markup('a'),
Markup('b')
])
def test_mul(self):
self.assertEqual(Markup('a') * 3, Markup('aaa'))
class MarkupLeakTestCase(unittest.TestCase):
def test_markup_leaks(self):
counts = set()
for count in range(20):
for item in range(1000):
escape("foo")
escape("<foo>")
escape(u"foo")
escape(u"<foo>")
counts.add(len(gc.get_objects()))
assert len(counts) == 1, 'ouch, c extension seems to leak objects'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MarkupTestCase))
# this test only tests the c extension
if not hasattr(escape, 'func_code'):
suite.addTest(unittest.makeSuite(MarkupLeakTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
# vim:sts=4:sw=4:et:
|
apache-2.0
|
vertigo235/Sick-Beard-XEM
|
lib/hachoir_parser/misc/pifv.py
|
90
|
8492
|
"""
EFI Platform Initialization Firmware Volume parser.
Author: Alexandre Boeglin
Creation date: 08 jul 2007
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet,
UInt8, UInt16, UInt24, UInt32, UInt64, Enum,
CString, String, PaddingBytes, RawBytes, NullBytes)
from lib.hachoir_core.endian import LITTLE_ENDIAN
from lib.hachoir_core.tools import paddingSize, humanFilesize
from lib.hachoir_parser.common.win32 import GUID
EFI_SECTION_COMPRESSION = 0x1
EFI_SECTION_GUID_DEFINED = 0x2
EFI_SECTION_PE32 = 0x10
EFI_SECTION_PIC = 0x11
EFI_SECTION_TE = 0x12
EFI_SECTION_DXE_DEPEX = 0x13
EFI_SECTION_VERSION = 0x14
EFI_SECTION_USER_INTERFACE = 0x15
EFI_SECTION_COMPATIBILITY16 = 0x16
EFI_SECTION_FIRMWARE_VOLUME_IMAGE = 0x17
EFI_SECTION_FREEFORM_SUBTYPE_GUID = 0x18
EFI_SECTION_RAW = 0x19
EFI_SECTION_PEI_DEPEX = 0x1b
EFI_SECTION_TYPE = {
EFI_SECTION_COMPRESSION: "Encapsulation section where other sections" \
+ " are compressed",
EFI_SECTION_GUID_DEFINED: "Encapsulation section where other sections" \
+ " have format defined by a GUID",
EFI_SECTION_PE32: "PE32+ Executable image",
EFI_SECTION_PIC: "Position-Independent Code",
EFI_SECTION_TE: "Terse Executable image",
EFI_SECTION_DXE_DEPEX: "DXE Dependency Expression",
EFI_SECTION_VERSION: "Version, Text and Numeric",
EFI_SECTION_USER_INTERFACE: "User-Friendly name of the driver",
EFI_SECTION_COMPATIBILITY16: "DOS-style 16-bit EXE",
EFI_SECTION_FIRMWARE_VOLUME_IMAGE: "PI Firmware Volume image",
EFI_SECTION_FREEFORM_SUBTYPE_GUID: "Raw data with GUID in header to" \
+ " define format",
EFI_SECTION_RAW: "Raw data",
EFI_SECTION_PEI_DEPEX: "PEI Dependency Expression",
}
EFI_FV_FILETYPE_RAW = 0x1
EFI_FV_FILETYPE_FREEFORM = 0x2
EFI_FV_FILETYPE_SECURITY_CORE = 0x3
EFI_FV_FILETYPE_PEI_CORE = 0x4
EFI_FV_FILETYPE_DXE_CORE = 0x5
EFI_FV_FILETYPE_PEIM = 0x6
EFI_FV_FILETYPE_DRIVER = 0x7
EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER = 0x8
EFI_FV_FILETYPE_APPLICATION = 0x9
EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE = 0xb
EFI_FV_FILETYPE_FFS_PAD = 0xf0
EFI_FV_FILETYPE = {
EFI_FV_FILETYPE_RAW: "Binary data",
EFI_FV_FILETYPE_FREEFORM: "Sectioned data",
EFI_FV_FILETYPE_SECURITY_CORE: "Platform core code used during the SEC" \
+ " phase",
EFI_FV_FILETYPE_PEI_CORE: "PEI Foundation",
EFI_FV_FILETYPE_DXE_CORE: "DXE Foundation",
EFI_FV_FILETYPE_PEIM: "PEI module (PEIM)",
EFI_FV_FILETYPE_DRIVER: "DXE driver",
EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER: "Combined PEIM/DXE driver",
EFI_FV_FILETYPE_APPLICATION: "Application",
EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE: "Firmware volume image",
EFI_FV_FILETYPE_FFS_PAD: "Pad File For FFS",
}
for x in xrange(0xc0, 0xe0):
EFI_FV_FILETYPE[x] = "OEM File"
for x in xrange(0xe0, 0xf0):
EFI_FV_FILETYPE[x] = "Debug/Test File"
for x in xrange(0xf1, 0x100):
EFI_FV_FILETYPE[x] = "Firmware File System Specific File"
class BlockMap(FieldSet):
static_size = 8*8
def createFields(self):
yield UInt32(self, "num_blocks")
yield UInt32(self, "len")
def createDescription(self):
return "%d blocks of %s" % (
self["num_blocks"].value, humanFilesize(self["len"].value))
class FileSection(FieldSet):
COMPRESSION_TYPE = {
0: 'Not Compressed',
1: 'Standard Compression',
}
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
self._size = self["size"].value * 8
section_type = self["type"].value
if section_type in (EFI_SECTION_DXE_DEPEX, EFI_SECTION_PEI_DEPEX):
# These sections can sometimes be longer than what their size
# claims! It's so nice to have so detailled specs and not follow
# them ...
if self.stream.readBytes(self.absolute_address +
self._size, 1) == '\0':
self._size = self._size + 16
def createFields(self):
# Header
yield UInt24(self, "size")
yield Enum(UInt8(self, "type"), EFI_SECTION_TYPE)
section_type = self["type"].value
if section_type == EFI_SECTION_COMPRESSION:
yield UInt32(self, "uncomp_len")
yield Enum(UInt8(self, "comp_type"), self.COMPRESSION_TYPE)
elif section_type == EFI_SECTION_FREEFORM_SUBTYPE_GUID:
yield GUID(self, "sub_type_guid")
elif section_type == EFI_SECTION_GUID_DEFINED:
yield GUID(self, "section_definition_guid")
yield UInt16(self, "data_offset")
yield UInt16(self, "attributes")
elif section_type == EFI_SECTION_USER_INTERFACE:
yield CString(self, "file_name", charset="UTF-16-LE")
elif section_type == EFI_SECTION_VERSION:
yield UInt16(self, "build_number")
yield CString(self, "version", charset="UTF-16-LE")
# Content
content_size = (self.size - self.current_size) // 8
if content_size == 0:
return
if section_type == EFI_SECTION_COMPRESSION:
compression_type = self["comp_type"].value
if compression_type == 1:
while not self.eof:
yield RawBytes(self, "compressed_content", content_size)
else:
while not self.eof:
yield FileSection(self, "section[]")
elif section_type == EFI_SECTION_FIRMWARE_VOLUME_IMAGE:
yield FirmwareVolume(self, "firmware_volume")
else:
yield RawBytes(self, "content", content_size,
EFI_SECTION_TYPE.get(self["type"].value,
"Unknown Section Type"))
def createDescription(self):
return EFI_SECTION_TYPE.get(self["type"].value,
"Unknown Section Type")
class File(FieldSet):
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
self._size = self["size"].value * 8
def createFields(self):
# Header
yield GUID(self, "name")
yield UInt16(self, "integrity_check")
yield Enum(UInt8(self, "type"), EFI_FV_FILETYPE)
yield UInt8(self, "attributes")
yield UInt24(self, "size")
yield UInt8(self, "state")
# Content
while not self.eof:
yield FileSection(self, "section[]")
def createDescription(self):
return "%s: %s containing %d section(s)" % (
self["name"].value,
self["type"].display,
len(self.array("section")))
class FirmwareVolume(FieldSet):
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
if not self._size:
self._size = self["volume_len"].value * 8
def createFields(self):
# Header
yield NullBytes(self, "zero_vector", 16)
yield GUID(self, "fs_guid")
yield UInt64(self, "volume_len")
yield String(self, "signature", 4)
yield UInt32(self, "attributes")
yield UInt16(self, "header_len")
yield UInt16(self, "checksum")
yield UInt16(self, "ext_header_offset")
yield UInt8(self, "reserved")
yield UInt8(self, "revision")
while True:
bm = BlockMap(self, "block_map[]")
yield bm
if bm['num_blocks'].value == 0 and bm['len'].value == 0:
break
# TODO must handle extended header
# Content
while not self.eof:
padding = paddingSize(self.current_size // 8, 8)
if padding:
yield PaddingBytes(self, "padding[]", padding)
yield File(self, "file[]")
def createDescription(self):
return "Firmware Volume containing %d file(s)" % len(self.array("file"))
class PIFVFile(Parser):
endian = LITTLE_ENDIAN
MAGIC = '_FVH'
PARSER_TAGS = {
"id": "pifv",
"category": "program",
"file_ext": ("bin", ""),
"min_size": 64*8, # smallest possible header
"magic_regex": (("\0{16}.{24}%s" % MAGIC, 0), ),
"description": "EFI Platform Initialization Firmware Volume",
}
def validate(self):
if self.stream.readBytes(40*8, 4) != self.MAGIC:
return "Invalid magic number"
if self.stream.readBytes(0, 16) != "\0"*16:
return "Invalid zero vector"
return True
def createFields(self):
while not self.eof:
yield FirmwareVolume(self, "firmware_volume[]")
|
gpl-3.0
|
danielkza/dnf
|
dnf/yum/packages.py
|
15
|
3808
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Copyright 2004 Duke University
# Written by Seth Vidal <skvidal at phy.duke.edu>
"""
Classes and functions dealing with rpm package representations.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import misc
import re
import fnmatch
def buildPkgRefDict(pkgs, casematch=True):
"""take a list of pkg objects and return a dict the contains all the possible
naming conventions for them eg: for (name,i386,0,1,1)
dict[name] = (name, i386, 0, 1, 1)
dict[name.i386] = (name, i386, 0, 1, 1)
dict[name-1-1.i386] = (name, i386, 0, 1, 1)
dict[name-1] = (name, i386, 0, 1, 1)
dict[name-1-1] = (name, i386, 0, 1, 1)
dict[0:name-1-1.i386] = (name, i386, 0, 1, 1)
dict[name-0:1-1.i386] = (name, i386, 0, 1, 1)
"""
pkgdict = {}
for pkg in pkgs:
(n, a, e, v, r) = pkg.pkgtup
if not casematch:
n = n.lower()
a = a.lower()
e = e.lower()
v = v.lower()
r = r.lower()
name = n
nameArch = '%s.%s' % (n, a)
nameVerRelArch = '%s-%s-%s.%s' % (n, v, r, a)
nameVer = '%s-%s' % (n, v)
nameVerRel = '%s-%s-%s' % (n, v, r)
envra = '%s:%s-%s-%s.%s' % (e, n, v, r, a)
nevra = '%s-%s:%s-%s.%s' % (n, e, v, r, a)
for item in [name, nameArch, nameVerRelArch, nameVer, nameVerRel, envra, nevra]:
if item not in pkgdict:
pkgdict[item] = []
pkgdict[item].append(pkg)
return pkgdict
def parsePackages(pkgs, usercommands, casematch=0):
"""matches up the user request versus a pkg list:
for installs/updates available pkgs should be the 'others list'
for removes it should be the installed list of pkgs
takes an optional casematch option to determine if case should be matched
exactly. Defaults to not matching."""
pkgdict = buildPkgRefDict(pkgs, bool(casematch))
exactmatch = set()
matched = set()
unmatched = set()
for command in usercommands:
if not casematch:
command = command.lower()
if command in pkgdict:
exactmatch.update(pkgdict[command])
del pkgdict[command]
else:
# anything we couldn't find a match for
# could mean it's not there, could mean it's a wildcard
if misc.re_glob(command):
trylist = pkgdict.keys()
# command and pkgdict are already lowered if not casematch
# so case sensitive is always fine
restring = fnmatch.translate(command)
regex = re.compile(restring)
foundit = 0
for item in trylist:
if regex.match(item):
matched.update(pkgdict[item])
del pkgdict[item]
foundit = 1
if not foundit:
unmatched.add(command)
else:
unmatched.add(command)
return exactmatch, matched, unmatched
|
gpl-2.0
|
kristjankorjus/Replicating-DeepMind
|
src/ale/ale.py
|
6
|
4582
|
"""
ALE class launches the ALE game and manages the communication with it
"""
import os
import numpy as np
from preprocessor import Preprocessor
import traceback
import random
class ALE:
actions = [np.uint8(0), np.uint8(1), np.uint8(3), np.uint8(4), np.uint8(11), np.uint8(12)]
current_points = 0
next_screen = ""
game_over = False
skip_frames = None
display_screen = "true"
game_ROM = None
fin = ""
fout = ""
preprocessor = None
def __init__(self, display_screen, skip_frames, game_ROM):
"""
Initialize ALE class. Creates the FIFO pipes, launches ./ale and does the "handshake" phase of communication
@param display_screen: bool, whether to show the game on screen or not
@param skip_frames: int, number of frames to skip in the game emulator
@param game_ROM: location of the game binary to launch with ./ale
"""
self.display_screen = display_screen
self.skip_frames = skip_frames
self.game_ROM = game_ROM
#: create FIFO pipes
os.system("mkfifo ale_fifo_out")
os.system("mkfifo ale_fifo_in")
#: launch ALE with appropriate commands in the background
command='./../libraries/ale/ale -max_num_episodes 0 -game_controller fifo_named -disable_colour_averaging true -run_length_encoding false -frame_skip '+str(self.skip_frames)+' -display_screen '+self.display_screen+" "+self.game_ROM+" &"
os.system(command)
#: open communication with pipes
self.fin = open('ale_fifo_out')
self.fout = open('ale_fifo_in', 'w')
input = self.fin.readline()[:-1]
size = input.split("-") # saves the image sizes (160*210) for breakout
#: first thing we send to ALE is the output options- we want to get only image data
# and episode info(hence the zeros)
self.fout.write("1,0,0,1\n")
self.fout.flush() # send the lines written to pipe
#: initialize the variables that we will start receiving from ./ale
self.next_image = []
self.game_over = True
self.current_points = 0
#: initialise preprocessor
self.preprocessor = Preprocessor()
def new_game(self):
"""
Start a new game when all lives are lost.
"""
#: read from ALE: game screen + episode info
self.next_image, episode_info = self.fin.readline()[:-2].split(":")
self.game_over = bool(int(episode_info.split(",")[0]))
self.current_points = int(episode_info.split(",")[1])
#: send the fist command
# first command has to be 1,0 or 1,1, because the game starts when you press "fire!",
self.fout.write("1,0\n")
self.fout.flush()
self.fin.readline()
#: preprocess the image and add the image to memory D using a special add function
#self.memory.add_first(self.preprocessor.process(self.next_image))
return self.preprocessor.process(self.next_image)
def end_game(self):
"""
When all lives are lost, end_game adds last frame to memory resets the system
"""
#: tell the memory that we lost
# self.memory.add_last() # this will be done in Main.py
#: send reset command to ALE
self.fout.write("45,45\n")
self.fout.flush()
self.game_over = False # just in case, but new_game should do it anyway
def move(self, action_index):
"""
Sends action to ALE and reads responds
@param action_index: int, the index of the chosen action in the list of available actions
"""
#: Convert index to action
action = self.actions[action_index]
#: Generate a random number for the action of player B
action_b = random.choice(range(255))
#: Write and send to ALE stuff
self.fout.write(str(action)+","+str(action_b)+"\n")
#print "sent action to ALE: ", str(action)+",0"
self.fout.flush()
#: Read from ALE
line = self.fin.readline()
try:
self.next_image, episode_info = line[:-2].split(":")
#print "got correct info from ALE: image + ", episode_info
except:
print "got an error in reading stuff from ALE"
traceback.print_exc()
print line
exit()
self.game_over = bool(int(episode_info.split(",")[0]))
self.current_points = int(episode_info.split(",")[1])
return self.current_points, self.preprocessor.process(self.next_image)
|
gpl-3.0
|
jitendra29/servo
|
tests/wpt/css-tests/tools/wptserve/tests/functional/base.py
|
293
|
1831
|
import base64
import logging
import os
import unittest
import urllib
import urllib2
import urlparse
import wptserve
logging.basicConfig()
here = os.path.split(__file__)[0]
doc_root = os.path.join(here, "docroot")
class Request(urllib2.Request):
def __init__(self, *args, **kwargs):
urllib2.Request.__init__(self, *args, **kwargs)
self.method = "GET"
def get_method(self):
return self.method
def add_data(self, data):
if hasattr(data, "iteritems"):
data = urllib.urlencode(data)
print data
self.add_header("Content-Length", str(len(data)))
urllib2.Request.add_data(self, data)
class TestUsingServer(unittest.TestCase):
def setUp(self):
self.server = wptserve.server.WebTestHttpd(host="localhost",
port=0,
use_ssl=False,
certificate=None,
doc_root=doc_root)
self.server.start(False)
def tearDown(self):
self.server.stop()
def abs_url(self, path, query=None):
return urlparse.urlunsplit(("http", "%s:%i" % (self.server.host, self.server.port), path, query, None))
def request(self, path, query=None, method="GET", headers=None, body=None, auth=None):
req = Request(self.abs_url(path, query))
req.method = method
if headers is None:
headers = {}
for name, value in headers.iteritems():
req.add_header(name, value)
if body is not None:
req.add_data(body)
if auth is not None:
req.add_header("Authorization", "Basic %s" % base64.encodestring('%s:%s' % auth))
return urllib2.urlopen(req)
|
mpl-2.0
|
jshufelt/volatility
|
volatility/plugins/mac/bash.py
|
12
|
6355
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: [email protected]
@organization:
"""
import struct, string
import volatility.obj as obj
import volatility.debug as debug
import volatility.addrspace as addrspace
import volatility.plugins.mac.common as mac_common
import volatility.plugins.mac.pstasks as mac_tasks
from volatility.renderers import TreeGrid
bash_vtypes = {
'bash32_hist_entry': [ 0xc, {
'line': [0x0, ['pointer', ['String', dict(length = 1024)]]],
'timestamp': [0x4, ['pointer', ['String', dict(length = 1024)]]],
'data': [0x8, ['pointer', ['void']]],
}],
'bash64_hist_entry': [ 24, {
'line': [0, ['pointer', ['String', dict(length = 1024)]]],
'timestamp': [8, ['pointer', ['String', dict(length = 1024)]]],
'data': [16, ['pointer', ['void']]],
}],
}
class _mac_hist_entry(obj.CType):
"""A class for history entries"""
def is_valid(self):
line_addr = self.line_ptr()
time_addr = self.time_ptr()
if (not obj.CType.is_valid(self) or
not self.obj_vm.is_valid_address(line_addr) or
not self.obj_vm.is_valid_address(time_addr)):
return False
ts = self.obj_vm.read(time_addr, 256)
if not ts:
return False
idx = ts.find("\x00")
if idx != -1:
ts = ts[:idx]
# At this point in time, the epoc integer size will
# never be less than 10 characters, and the stamp is
# always preceded by a pound/hash character.
if len(ts) < 10 or str(ts)[0] != "#":
return False
# The final check is to make sure the entire string
# is composed of numbers. Try to convert to an int.
try:
int(str(ts)[1:])
except ValueError:
return False
return True
def line(self):
line_addr = self.line_ptr()
buf = self.obj_vm.read(line_addr, 256)
if buf:
idx = buf.find("\x00")
if idx != -1:
buf = buf[:idx]
ret = "".join([c for c in buf if c in string.printable])
else:
ret = ""
return ret
@property
def time_as_integer(self):
# Get the string and remove the leading "#" from the timestamp
time_addr = self.time_ptr()
ts = self.obj_vm.read(time_addr, 256)
ts = ts[1:]
idx = ts.find("\x00")
if idx != -1:
ts = ts[:idx]
# Convert the string into an integer (number of seconds)
return int(ts)
def time_object(self):
nsecs = self.time_as_integer
# Build a timestamp object from the integer
time_val = struct.pack("<I", nsecs)
time_buf = addrspace.BufferAddressSpace(self.obj_vm.get_config(), data = time_val)
time_obj = obj.Object("UnixTimeStamp", offset = 0, vm = time_buf, is_utc = True)
return time_obj
def line_ptr(self):
addr = self.m("line").obj_offset
return self.read_ptr(addr)
def time_ptr(self):
addr = self.m("timestamp").obj_offset
return self.read_ptr(addr)
class bash64_hist_entry(_mac_hist_entry):
def read_ptr(self, addr):
addr = self.obj_vm.read(addr, 8)
addr = struct.unpack("<Q", addr)[0]
return addr
class bash32_hist_entry(_mac_hist_entry):
def read_ptr(self, addr):
addr = self.obj_vm.read(addr, 4)
addr = struct.unpack("<I", addr)[0]
return addr
class MacBashTypes(obj.ProfileModification):
conditions = {"os" : lambda x : x in ["mac"]}
def modification(self, profile):
profile.vtypes.update(bash_vtypes)
profile.object_classes.update({"bash32_hist_entry": bash32_hist_entry, "bash64_hist_entry": bash64_hist_entry})
class mac_bash(mac_tasks.mac_tasks):
"""Recover bash history from bash process memory"""
def __init__(self, config, *args, **kwargs):
mac_tasks.mac_tasks.__init__(self, config, *args, **kwargs)
self._config.add_option('SCAN_ALL', short_option = 'A', default = False, help = 'scan all processes, not just those named bash', action = 'store_true')
def unified_output(self, data):
return TreeGrid([("Pid", int),
("Name", str),
("Command Time", str),
("Command", str),
], self.generator(data))
def generator(self, data):
for task in data:
if not (self._config.SCAN_ALL or str(task.p_comm) == "bash"):
continue
for hist_entry in task.bash_history_entries():
yield (0, [
int(task.p_pid),
str(task.p_comm),
str(hist_entry.time_object()),
str(hist_entry.line()),
])
def render_text(self, outfd, data):
self.table_header(outfd, [("Pid", "8"),
("Name", "20"),
("Command Time", "30"),
("Command", ""),])
for task in data:
if not (self._config.SCAN_ALL or str(task.p_comm) == "bash"):
continue
for hist_entry in task.bash_history_entries():
self.table_row(outfd, task.p_pid, task.p_comm,
hist_entry.time_object(),
hist_entry.line())
|
gpl-2.0
|
yongtang/tensorflow
|
tensorflow/python/keras/saving/saved_model/json_utils.py
|
6
|
4656
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utils for creating and loading the Layer metadata for SavedModel.
These are required to retain the original format of the build input shape, since
layers and models may have different build behaviors depending on if the shape
is a list, tuple, or TensorShape. For example, Network.build() will create
separate inputs if the given input_shape is a list, and will create a single
input if the given shape is a tuple.
"""
import collections
import enum
import json
import numpy as np
import wrapt
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import type_spec
class Encoder(json.JSONEncoder):
"""JSON encoder and decoder that handles TensorShapes and tuples."""
def default(self, obj): # pylint: disable=method-hidden
"""Encodes objects for types that aren't handled by the default encoder."""
if isinstance(obj, tensor_shape.TensorShape):
items = obj.as_list() if obj.rank is not None else None
return {'class_name': 'TensorShape', 'items': items}
return get_json_type(obj)
def encode(self, obj):
return super(Encoder, self).encode(_encode_tuple(obj))
def _encode_tuple(x):
if isinstance(x, tuple):
return {'class_name': '__tuple__',
'items': tuple(_encode_tuple(i) for i in x)}
elif isinstance(x, list):
return [_encode_tuple(i) for i in x]
elif isinstance(x, dict):
return {key: _encode_tuple(value) for key, value in x.items()}
else:
return x
def decode(json_string):
return json.loads(json_string, object_hook=_decode_helper)
def _decode_helper(obj):
"""A decoding helper that is TF-object aware."""
if isinstance(obj, dict) and 'class_name' in obj:
if obj['class_name'] == 'TensorShape':
return tensor_shape.TensorShape(obj['items'])
elif obj['class_name'] == 'TypeSpec':
return type_spec.lookup(obj['type_spec'])._deserialize( # pylint: disable=protected-access
_decode_helper(obj['serialized']))
elif obj['class_name'] == '__tuple__':
return tuple(_decode_helper(i) for i in obj['items'])
elif obj['class_name'] == '__ellipsis__':
return Ellipsis
return obj
def get_json_type(obj):
"""Serializes any object to a JSON-serializable structure.
Args:
obj: the object to serialize
Returns:
JSON-serializable structure representing `obj`.
Raises:
TypeError: if `obj` cannot be serialized.
"""
# if obj is a serializable Keras class instance
# e.g. optimizer, layer
if hasattr(obj, 'get_config'):
return {'class_name': obj.__class__.__name__, 'config': obj.get_config()}
# if obj is any numpy type
if type(obj).__module__ == np.__name__:
if isinstance(obj, np.ndarray):
return obj.tolist()
else:
return obj.item()
# misc functions (e.g. loss function)
if callable(obj):
return obj.__name__
# if obj is a python 'type'
if type(obj).__name__ == type.__name__:
return obj.__name__
if isinstance(obj, tensor_shape.Dimension):
return obj.value
if isinstance(obj, tensor_shape.TensorShape):
return obj.as_list()
if isinstance(obj, dtypes.DType):
return obj.name
if isinstance(obj, collections.abc.Mapping):
return dict(obj)
if obj is Ellipsis:
return {'class_name': '__ellipsis__'}
if isinstance(obj, wrapt.ObjectProxy):
return obj.__wrapped__
if isinstance(obj, type_spec.TypeSpec):
try:
type_spec_name = type_spec.get_name(type(obj))
return {'class_name': 'TypeSpec', 'type_spec': type_spec_name,
'serialized': obj._serialize()} # pylint: disable=protected-access
except ValueError:
raise ValueError('Unable to serialize {} to JSON, because the TypeSpec '
'class {} has not been registered.'
.format(obj, type(obj)))
if isinstance(obj, enum.Enum):
return obj.value
raise TypeError('Not JSON Serializable:', obj)
|
apache-2.0
|
mozilla/firefox-flicks
|
vendor-local/lib/python/billiard/forking.py
|
1
|
20104
|
#
# Module for starting a process object using os.fork() or CreateProcess()
#
# multiprocessing/forking.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
import os
import sys
import signal
import warnings
from ._ext import Connection, PipeConnection, win32
from pickle import load, HIGHEST_PROTOCOL
from billiard import util, process
__all__ = ['Popen', 'assert_spawning', 'exit',
'duplicate', 'close', 'ForkingPickler']
try:
WindowsError = WindowsError # noqa
except NameError:
class WindowsError(Exception): pass # noqa
W_OLD_DJANGO_LAYOUT = """\
Will add directory %r to path! This is necessary to accommodate \
pre-Django 1.4 layouts using setup_environ.
You can skip this warning by adding a DJANGO_SETTINGS_MODULE=settings \
environment variable.
"""
#
# Choose whether to do a fork or spawn (fork+exec) on Unix.
# This affects how some shared resources should be created.
#
_forking_is_enabled = sys.platform != 'win32'
#
# Check that the current thread is spawning a child process
#
def assert_spawning(self):
if not Popen.thread_is_spawning():
raise RuntimeError(
'%s objects should only be shared between processes'
' through inheritance' % type(self).__name__
)
#
# Try making some callable types picklable
#
from pickle import Pickler
if sys.version_info[0] == 3:
from copyreg import dispatch_table
class ForkingPickler(Pickler):
_extra_reducers = {}
def __init__(self, *args, **kwargs):
Pickler.__init__(self, *args, **kwargs)
self.dispatch_table = dispatch_table.copy()
self.dispatch_table.update(self._extra_reducers)
@classmethod
def register(cls, type, reduce):
cls._extra_reducers[type] = reduce
def _reduce_method(m):
if m.__self__ is None:
return getattr, (m.__class__, m.__func__.__name__)
else:
return getattr, (m.__self__, m.__func__.__name__)
class _C:
def f(self):
pass
ForkingPickler.register(type(_C().f), _reduce_method)
else:
class ForkingPickler(Pickler): # noqa
dispatch = Pickler.dispatch.copy()
@classmethod
def register(cls, type, reduce):
def dispatcher(self, obj):
rv = reduce(obj)
self.save_reduce(obj=obj, *rv)
cls.dispatch[type] = dispatcher
def _reduce_method(m): # noqa
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
ForkingPickler.register(type(ForkingPickler.save), _reduce_method)
def _reduce_method_descriptor(m):
return getattr, (m.__objclass__, m.__name__)
ForkingPickler.register(type(list.append), _reduce_method_descriptor)
ForkingPickler.register(type(int.__add__), _reduce_method_descriptor)
try:
from functools import partial
except ImportError:
pass
else:
def _reduce_partial(p):
return _rebuild_partial, (p.func, p.args, p.keywords or {})
def _rebuild_partial(func, args, keywords):
return partial(func, *args, **keywords)
ForkingPickler.register(partial, _reduce_partial)
def dump(obj, file, protocol=None):
ForkingPickler(file, protocol).dump(obj)
#
# Make (Pipe)Connection picklable
#
def reduce_connection(conn):
# XXX check not necessary since only registered with ForkingPickler
if not Popen.thread_is_spawning():
raise RuntimeError(
'By default %s objects can only be shared between processes\n'
'using inheritance' % type(conn).__name__
)
return type(conn), (Popen.duplicate_for_child(conn.fileno()),
conn.readable, conn.writable)
ForkingPickler.register(Connection, reduce_connection)
if PipeConnection:
ForkingPickler.register(PipeConnection, reduce_connection)
#
# Unix
#
if sys.platform != 'win32':
import thread
import select
WINEXE = False
WINSERVICE = False
exit = os._exit
duplicate = os.dup
close = os.close
_select = util._eintr_retry(select.select)
#
# We define a Popen class similar to the one from subprocess, but
# whose constructor takes a process object as its argument.
#
class Popen(object):
_tls = thread._local()
def __init__(self, process_obj):
_Django_old_layout_hack__save()
sys.stdout.flush()
sys.stderr.flush()
self.returncode = None
r, w = os.pipe()
self.sentinel = r
if _forking_is_enabled:
self.pid = os.fork()
if self.pid == 0:
os.close(r)
if 'random' in sys.modules:
import random
random.seed()
code = process_obj._bootstrap()
os._exit(code)
else:
from_parent_fd, to_child_fd = os.pipe()
cmd = get_command_line() + [str(from_parent_fd)]
self.pid = os.fork()
if self.pid == 0:
os.close(r)
os.close(to_child_fd)
os.execv(sys.executable, cmd)
# send information to child
prep_data = get_preparation_data(process_obj._name)
os.close(from_parent_fd)
to_child = os.fdopen(to_child_fd, 'wb')
Popen._tls.process_handle = self.pid
try:
dump(prep_data, to_child, HIGHEST_PROTOCOL)
dump(process_obj, to_child, HIGHEST_PROTOCOL)
finally:
del(Popen._tls.process_handle)
to_child.close()
# `w` will be closed when the child exits, at which point `r`
# will become ready for reading (using e.g. select()).
os.close(w)
util.Finalize(self, os.close, (r,))
def poll(self, flag=os.WNOHANG):
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, flag)
except os.error:
# Child process not yet created. See #1731717
# e.errno == errno.ECHILD == 10
return None
if pid == self.pid:
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
else:
assert os.WIFEXITED(sts)
self.returncode = os.WEXITSTATUS(sts)
return self.returncode
def wait(self, timeout=None):
if self.returncode is None:
if timeout is not None:
r = _select([self.sentinel], [], [], timeout)[0]
if not r:
return None
# This shouldn't block if select() returned successfully.
return self.poll(os.WNOHANG if timeout == 0.0 else 0)
return self.returncode
def terminate(self):
if self.returncode is None:
try:
os.kill(self.pid, signal.SIGTERM)
except OSError:
if self.wait(timeout=0.1) is None:
raise
@staticmethod
def thread_is_spawning():
if _forking_is_enabled:
return False
else:
return getattr(Popen._tls, 'process_handle', None) is not None
@staticmethod
def duplicate_for_child(handle):
return handle
#
# Windows
#
else:
import thread
import msvcrt
import _subprocess
#
#
#
TERMINATE = 0x10000
WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False))
WINSERVICE = sys.executable.lower().endswith("pythonservice.exe")
exit = win32.ExitProcess
close = win32.CloseHandle
#
#
#
def duplicate(handle, target_process=None, inheritable=False):
if target_process is None:
target_process = _subprocess.GetCurrentProcess()
return _subprocess.DuplicateHandle(
_subprocess.GetCurrentProcess(), handle, target_process,
0, inheritable, _subprocess.DUPLICATE_SAME_ACCESS
).Detach()
#
# We define a Popen class similar to the one from subprocess, but
# whose constructor takes a process object as its argument.
#
class Popen(object):
'''
Start a subprocess to run the code of a process object
'''
_tls = thread._local()
def __init__(self, process_obj):
_Django_old_layout_hack__save()
# create pipe for communication with child
rfd, wfd = os.pipe()
# get handle for read end of the pipe and make it inheritable
rhandle = duplicate(msvcrt.get_osfhandle(rfd), inheritable=True)
os.close(rfd)
# start process
cmd = get_command_line() + [rhandle]
cmd = ' '.join('"%s"' % x for x in cmd)
hp, ht, pid, tid = _subprocess.CreateProcess(
_python_exe, cmd, None, None, 1, 0, None, None, None
)
ht.Close()
close(rhandle)
# set attributes of self
self.pid = pid
self.returncode = None
self._handle = hp
self.sentinel = int(hp)
# send information to child
prep_data = get_preparation_data(process_obj._name)
to_child = os.fdopen(wfd, 'wb')
Popen._tls.process_handle = int(hp)
try:
dump(prep_data, to_child, HIGHEST_PROTOCOL)
dump(process_obj, to_child, HIGHEST_PROTOCOL)
finally:
del Popen._tls.process_handle
to_child.close()
@staticmethod
def thread_is_spawning():
return getattr(Popen._tls, 'process_handle', None) is not None
@staticmethod
def duplicate_for_child(handle):
return duplicate(handle, Popen._tls.process_handle)
def wait(self, timeout=None):
if self.returncode is None:
if timeout is None:
msecs = _subprocess.INFINITE
else:
msecs = max(0, int(timeout * 1000 + 0.5))
res = _subprocess.WaitForSingleObject(int(self._handle), msecs)
if res == _subprocess.WAIT_OBJECT_0:
code = _subprocess.GetExitCodeProcess(self._handle)
if code == TERMINATE:
code = -signal.SIGTERM
self.returncode = code
return self.returncode
def poll(self):
return self.wait(timeout=0)
def terminate(self):
if self.returncode is None:
try:
_subprocess.TerminateProcess(int(self._handle), TERMINATE)
except WindowsError:
if self.wait(timeout=0.1) is None:
raise
#
#
#
if WINSERVICE:
_python_exe = os.path.join(sys.exec_prefix, 'python.exe')
else:
_python_exe = sys.executable
def set_executable(exe):
global _python_exe
_python_exe = exe
def is_forking(argv):
'''
Return whether commandline indicates we are forking
'''
if len(argv) >= 2 and argv[1] == '--billiard-fork':
assert len(argv) == 3
os.environ["FORKED_BY_MULTIPROCESSING"] = "1"
return True
else:
return False
def freeze_support():
'''
Run code for process object if this in not the main process
'''
if is_forking(sys.argv):
main()
sys.exit()
def get_command_line():
'''
Returns prefix of command line used for spawning a child process
'''
if process.current_process()._identity == () and is_forking(sys.argv):
raise RuntimeError('''
Attempt to start a new process before the current process
has finished its bootstrapping phase.
This probably means that have forgotten to use the proper
idiom in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce a Windows executable.''')
if getattr(sys, 'frozen', False):
return [sys.executable, '--billiard-fork']
else:
prog = 'from billiard.forking import main; main()'
return [_python_exe, '-c', prog, '--billiard-fork']
def _Django_old_layout_hack__save():
if 'DJANGO_PROJECT_DIR' not in os.environ:
try:
settings_name = os.environ['DJANGO_SETTINGS_MODULE']
except KeyError:
return # not using Django.
conf_settings = sys.modules.get('django.conf.settings')
configured = conf_settings and conf_settings.configured
try:
project_name, _ = settings_name.split('.', 1)
except ValueError:
return # not modified by setup_environ
project = __import__(project_name)
try:
project_dir = os.path.normpath(_module_parent_dir(project))
except AttributeError:
return # dynamically generated module (no __file__)
if configured:
warnings.warn(UserWarning(
W_OLD_DJANGO_LAYOUT % os.path.realpath(project_dir)
))
os.environ['DJANGO_PROJECT_DIR'] = project_dir
def _Django_old_layout_hack__load():
try:
sys.path.append(os.environ['DJANGO_PROJECT_DIR'])
except KeyError:
pass
def _module_parent_dir(mod):
dir, filename = os.path.split(_module_dir(mod))
if dir == os.curdir or not dir:
dir = os.getcwd()
return dir
def _module_dir(mod):
if '__init__.py' in mod.__file__:
return os.path.dirname(mod.__file__)
return mod.__file__
def main():
'''
Run code specifed by data received over pipe
'''
global _forking_is_enabled
_Django_old_layout_hack__load()
assert is_forking(sys.argv)
_forking_is_enabled = False
handle = int(sys.argv[-1])
if sys.platform == 'win32':
fd = msvcrt.open_osfhandle(handle, os.O_RDONLY)
else:
fd = handle
from_parent = os.fdopen(fd, 'rb')
process.current_process()._inheriting = True
preparation_data = load(from_parent)
prepare(preparation_data)
# Huge hack to make logging before Process.run work.
try:
os.environ["MP_MAIN_FILE"] = sys.modules["__main__"].__file__
except KeyError:
pass
loglevel = os.environ.get("_MP_FORK_LOGLEVEL_")
logfile = os.environ.get("_MP_FORK_LOGFILE_") or None
format = os.environ.get("_MP_FORK_LOGFORMAT_")
if loglevel:
from billiard import util
import logging
logger = util.get_logger()
logger.setLevel(int(loglevel))
if not logger.handlers:
logger._rudimentary_setup = True
logfile = logfile or sys.__stderr__
if hasattr(logfile, "write"):
handler = logging.StreamHandler(logfile)
else:
handler = logging.FileHandler(logfile)
formatter = logging.Formatter(
format or util.DEFAULT_LOGGING_FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
self = load(from_parent)
process.current_process()._inheriting = False
from_parent.close()
exitcode = self._bootstrap()
exit(exitcode)
def get_preparation_data(name):
'''
Return info about parent needed by child to unpickle process object
'''
from billiard.util import _logger, _log_to_stderr
d = dict(
name=name,
sys_path=sys.path,
sys_argv=sys.argv,
log_to_stderr=_log_to_stderr,
orig_dir=process.ORIGINAL_DIR,
authkey=process.current_process().authkey,
)
if _logger is not None:
d['log_level'] = _logger.getEffectiveLevel()
if not WINEXE and not WINSERVICE:
main_path = getattr(sys.modules['__main__'], '__file__', None)
if not main_path and sys.argv[0] not in ('', '-c'):
main_path = sys.argv[0]
if main_path is not None:
if not os.path.isabs(main_path) and \
process.ORIGINAL_DIR is not None:
main_path = os.path.join(process.ORIGINAL_DIR, main_path)
d['main_path'] = os.path.normpath(main_path)
return d
#
# Make (Pipe)Connection picklable
#
def reduce_connection(conn):
if not Popen.thread_is_spawning():
raise RuntimeError(
'By default %s objects can only be shared between processes\n'
'using inheritance' % type(conn).__name__
)
return type(conn), (Popen.duplicate_for_child(conn.fileno()),
conn.readable, conn.writable)
ForkingPickler.register(Connection, reduce_connection)
ForkingPickler.register(PipeConnection, reduce_connection)
#
# Prepare current process
#
old_main_modules = []
def prepare(data):
'''
Try to get current process ready to unpickle process object
'''
old_main_modules.append(sys.modules['__main__'])
if 'name' in data:
process.current_process().name = data['name']
if 'authkey' in data:
process.current_process()._authkey = data['authkey']
if 'log_to_stderr' in data and data['log_to_stderr']:
util.log_to_stderr()
if 'log_level' in data:
util.get_logger().setLevel(data['log_level'])
if 'sys_path' in data:
sys.path = data['sys_path']
if 'sys_argv' in data:
sys.argv = data['sys_argv']
if 'dir' in data:
os.chdir(data['dir'])
if 'orig_dir' in data:
process.ORIGINAL_DIR = data['orig_dir']
if 'main_path' in data:
main_path = data['main_path']
main_name = os.path.splitext(os.path.basename(main_path))[0]
if main_name == '__init__':
main_name = os.path.basename(os.path.dirname(main_path))
if main_name == '__main__':
main_module = sys.modules['__main__']
main_module.__file__ = main_path
elif main_name != 'ipython':
# Main modules not actually called __main__.py may
# contain additional code that should still be executed
import imp
if main_path is None:
dirs = None
elif os.path.basename(main_path).startswith('__init__.py'):
dirs = [os.path.dirname(os.path.dirname(main_path))]
else:
dirs = [os.path.dirname(main_path)]
assert main_name not in sys.modules, main_name
file, path_name, etc = imp.find_module(main_name, dirs)
try:
# We would like to do "imp.load_module('__main__', ...)"
# here. However, that would cause 'if __name__ ==
# "__main__"' clauses to be executed.
main_module = imp.load_module(
'__parents_main__', file, path_name, etc
)
finally:
if file:
file.close()
sys.modules['__main__'] = main_module
main_module.__name__ = '__main__'
# Try to make the potentially picklable objects in
# sys.modules['__main__'] realize they are in the main
# module -- somewhat ugly.
for obj in main_module.__dict__.values():
try:
if obj.__module__ == '__parents_main__':
obj.__module__ = '__main__'
except Exception:
pass
|
bsd-3-clause
|
M4sse/chromium.src
|
third_party/cython/src/Cython/Compiler/Interpreter.py
|
99
|
2063
|
"""
This module deals with interpreting the parse tree as Python
would have done, in the compiler.
For now this only covers parse tree to value conversion of
compile-time values.
"""
from Nodes import *
from ExprNodes import *
from Errors import CompileError
class EmptyScope(object):
def lookup(self, name):
return None
empty_scope = EmptyScope()
def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()):
"""
Tries to interpret a list of compile time option nodes.
The result will be a tuple (optlist, optdict) but where
all expression nodes have been interpreted. The result is
in the form of tuples (value, pos).
optlist is a list of nodes, while optdict is a DictNode (the
result optdict is a dict)
If type_env is set, all type nodes will be analysed and the resulting
type set. Otherwise only interpretateable ExprNodes
are allowed, other nodes raises errors.
A CompileError will be raised if there are problems.
"""
def interpret(node, ix):
if ix in type_args:
if type_env:
type = node.analyse_as_type(type_env)
if not type:
raise CompileError(node.pos, "Invalid type.")
return (type, node.pos)
else:
raise CompileError(node.pos, "Type not allowed here.")
else:
if (sys.version_info[0] >=3 and
isinstance(node, StringNode) and
node.unicode_value is not None):
return (node.unicode_value, node.pos)
return (node.compile_time_value(empty_scope), node.pos)
if optlist:
optlist = [interpret(x, ix) for ix, x in enumerate(optlist)]
if optdict:
assert isinstance(optdict, DictNode)
new_optdict = {}
for item in optdict.key_value_pairs:
new_key, dummy = interpret(item.key, None)
new_optdict[new_key] = interpret(item.value, item.key.value)
optdict = new_optdict
return (optlist, new_optdict)
|
bsd-3-clause
|
silly-wacky-3-town-toon/SOURCE-COD
|
Panda3D-1.10.0/python/Lib/urllib.py
|
37
|
58056
|
"""Open an arbitrary URL.
See the following document for more info on URLs:
"Names and Addresses, URIs, URLs, URNs, URCs", at
http://www.w3.org/pub/WWW/Addressing/Overview.html
See also the HTTP spec (from which the error codes are derived):
"HTTP - Hypertext Transfer Protocol", at
http://www.w3.org/pub/WWW/Protocols/
Related standards and specs:
- RFC1808: the "relative URL" spec. (authoritative status)
- RFC1738 - the "URL standard". (authoritative status)
- RFC1630 - the "URI spec". (informational status)
The object returned by URLopener().open(file) will differ per
protocol. All you know is that is has methods read(), readline(),
readlines(), fileno(), close() and info(). The read*(), fileno()
and close() methods work like those of open files.
The info() method returns a mimetools.Message object which can be
used to query various info about the object, if available.
(mimetools.Message objects are queried with the getheader() method.)
"""
import string
import socket
import os
import time
import sys
import base64
import re
from urlparse import urljoin as basejoin
__all__ = ["urlopen", "URLopener", "FancyURLopener", "urlretrieve",
"urlcleanup", "quote", "quote_plus", "unquote", "unquote_plus",
"urlencode", "url2pathname", "pathname2url", "splittag",
"localhost", "thishost", "ftperrors", "basejoin", "unwrap",
"splittype", "splithost", "splituser", "splitpasswd", "splitport",
"splitnport", "splitquery", "splitattr", "splitvalue",
"getproxies"]
__version__ = '1.17' # XXX This version is not always updated :-(
MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
# Helper for non-unix systems
if os.name == 'nt':
from nturl2path import url2pathname, pathname2url
elif os.name == 'riscos':
from rourl2path import url2pathname, pathname2url
else:
def url2pathname(pathname):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
return unquote(pathname)
def pathname2url(pathname):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
return quote(pathname)
# This really consists of two pieces:
# (1) a class which handles opening of all sorts of URLs
# (plus assorted utilities etc.)
# (2) a set of functions for parsing URLs
# XXX Should these be separated out into different modules?
# Shortcut for basic usage
_urlopener = None
def urlopen(url, data=None, proxies=None):
"""Create a file-like object for the specified URL to read from."""
from warnings import warnpy3k
warnpy3k("urllib.urlopen() has been removed in Python 3.0 in "
"favor of urllib2.urlopen()", stacklevel=2)
global _urlopener
if proxies is not None:
opener = FancyURLopener(proxies=proxies)
elif not _urlopener:
opener = FancyURLopener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
def urlretrieve(url, filename=None, reporthook=None, data=None):
global _urlopener
if not _urlopener:
_urlopener = FancyURLopener()
return _urlopener.retrieve(url, filename, reporthook, data)
def urlcleanup():
if _urlopener:
_urlopener.cleanup()
_safe_quoters.clear()
ftpcache.clear()
# check for SSL
try:
import ssl
except:
_have_ssl = False
else:
_have_ssl = True
# exception raised when downloaded size does not match content-length
class ContentTooShortError(IOError):
def __init__(self, message, content):
IOError.__init__(self, message)
self.content = content
ftpcache = {}
class URLopener:
"""Class to open URLs.
This is a class rather than just a subroutine because we may need
more than one set of global protocol-specific options.
Note -- this is a base class for those who don't want the
automatic handling of errors type 302 (relocated) and 401
(authorization needed)."""
__tempfiles = None
version = "Python-urllib/%s" % __version__
# Constructor
def __init__(self, proxies=None, **x509):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file')
self.addheaders = [('User-Agent', self.version)]
self.__tempfiles = []
self.__unlink = os.unlink # See cleanup()
self.tempcache = None
# Undocumented feature: if you assign {} to tempcache,
# it is used to cache files retrieved with
# self.retrieve(). This is not enabled by default
# since it does not work for changing documents (and I
# haven't got the logic to check expiration headers
# yet).
self.ftpcache = ftpcache
# Undocumented feature: you can use a different
# ftp cache by assigning to the .ftpcache member;
# in case you want logically independent URL openers
# XXX This is not threadsafe. Bah.
def __del__(self):
self.close()
def close(self):
self.cleanup()
def cleanup(self):
# This code sometimes runs when the rest of this module
# has already been deleted, so it can't use any globals
# or import anything.
if self.__tempfiles:
for file in self.__tempfiles:
try:
self.__unlink(file)
except OSError:
pass
del self.__tempfiles[:]
if self.tempcache:
self.tempcache.clear()
def addheader(self, *args):
"""Add a header to be used by the HTTP interface only
e.g. u.addheader('Accept', 'sound/basic')"""
self.addheaders.append(args)
# External interface
def open(self, fullurl, data=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(toBytes(fullurl))
# percent encode url, fixing lame server errors for e.g, like space
# within url paths.
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
urltype, url = splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
urltype, proxyhost = splittype(proxy)
host, selector = splithost(proxyhost)
url = (host, fullurl) # Signal special case to open_*()
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
if not hasattr(self, name):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
return self.open_unknown(fullurl, data)
try:
if data is None:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
except socket.error, msg:
raise IOError, ('socket error', msg), sys.exc_info()[2]
def open_unknown(self, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError, ('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError, ('url error', 'invalid proxy for %s' % type, proxy)
# External interface
def retrieve(self, url, filename=None, reporthook=None, data=None):
"""retrieve(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object."""
url = unwrap(toBytes(url))
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
type, url1 = splittype(url)
if filename is None and (not type or type == 'file'):
try:
fp = self.open_local_file(url1)
hdrs = fp.info()
fp.close()
return url2pathname(splithost(url1)[1]), hdrs
except IOError:
pass
fp = self.open(url, data)
try:
headers = fp.info()
if filename:
tfp = open(filename, 'wb')
else:
import tempfile
garbage, path = splittype(url)
garbage, path = splithost(path or "")
path, garbage = splitquery(path or "")
path, garbage = splitattr(path or "")
suffix = os.path.splitext(path)[1]
(fd, filename) = tempfile.mkstemp(suffix)
self.__tempfiles.append(filename)
tfp = os.fdopen(fd, 'wb')
try:
result = filename, headers
if self.tempcache is not None:
self.tempcache[url] = result
bs = 1024*8
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, bs, size)
while 1:
block = fp.read(bs)
if block == "":
break
read += len(block)
tfp.write(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
finally:
tfp.close()
finally:
fp.close()
# raise exception if actual size does not match content-length header
if size >= 0 and read < size:
raise ContentTooShortError("retrieval incomplete: got only %i out "
"of %i bytes" % (read, size), result)
return result
# Each method named open_<type> knows how to open that type of URL
def open_http(self, url, data=None):
"""Use HTTP protocol."""
import httplib
user_passwd = None
proxy_passwd= None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# check whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
# now we proceed with the url we want to obtain
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
if proxy_bypass(realhost):
host = realhost
#print "proxy via http:", host, selector
if not host: raise IOError, ('http error', 'no host given')
if proxy_passwd:
proxy_passwd = unquote(proxy_passwd)
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
user_passwd = unquote(user_passwd)
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTP(host)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-Type', 'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "http:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers, data)
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
"""Handle http errors.
Derived class can override this, or provide specific handlers
named http_error_DDD where DDD is the 3-digit error code."""
# First check if there's a specific handler for this error
name = 'http_error_%d' % errcode
if hasattr(self, name):
method = getattr(self, name)
if data is None:
result = method(url, fp, errcode, errmsg, headers)
else:
result = method(url, fp, errcode, errmsg, headers, data)
if result: return result
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handler: close the connection and raise IOError."""
fp.close()
raise IOError, ('http error', errcode, errmsg, headers)
if _have_ssl:
def open_https(self, url, data=None):
"""Use HTTPS protocol."""
import httplib
user_passwd = None
proxy_passwd = None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# here, we determine, whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'https':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
#print "proxy via https:", host, selector
if not host: raise IOError, ('https error', 'no host given')
if proxy_passwd:
proxy_passwd = unquote(proxy_passwd)
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
user_passwd = unquote(user_passwd)
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTPS(host, 0,
key_file=self.key_file,
cert_file=self.cert_file)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-Type',
'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "https:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers,
data)
def open_file(self, url):
"""Use local file or FTP depending on form of URL."""
if not isinstance(url, str):
raise IOError, ('file error', 'proxy support for file protocol currently not implemented')
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
return self.open_ftp(url)
else:
return self.open_local_file(url)
def open_local_file(self, url):
"""Use local file."""
import mimetypes, mimetools, email.utils
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
host, file = splithost(url)
localname = url2pathname(file)
try:
stats = os.stat(localname)
except OSError, e:
raise IOError(e.errno, e.strerror, e.filename)
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(url)[0]
headers = mimetools.Message(StringIO(
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if not host:
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
elif file[:2] == './':
raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url)
return addinfourl(open(localname, 'rb'),
headers, urlfile)
host, port = splitport(host)
if not port \
and socket.gethostbyname(host) in (localhost(), thishost()):
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'),
headers, urlfile)
raise IOError, ('local file error', 'not on local host')
def open_ftp(self, url):
"""Use FTP protocol."""
if not isinstance(url, str):
raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented')
import mimetypes, mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
host, path = splithost(url)
if not host: raise IOError, ('ftp error', 'no host given')
host, port = splitport(host)
user, host = splituser(host)
if user: user, passwd = splitpasswd(user)
else: passwd = None
host = unquote(host)
user = user or ''
passwd = passwd or ''
host = socket.gethostbyname(host)
if not port:
import ftplib
port = ftplib.FTP_PORT
else:
port = int(port)
path, attrs = splitattr(path)
path = unquote(path)
dirs = path.split('/')
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]: dirs = dirs[1:]
if dirs and not dirs[0]: dirs[0] = '/'
key = user, host, port, '/'.join(dirs)
# XXX thread unsafe!
if len(self.ftpcache) > MAXFTPCACHE:
# Prune the cache, rather arbitrarily
for k in self.ftpcache.keys():
if k != key:
v = self.ftpcache[k]
del self.ftpcache[k]
v.close()
try:
if not key in self.ftpcache:
self.ftpcache[key] = \
ftpwrapper(user, passwd, host, port, dirs)
if not file: type = 'D'
else: type = 'I'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
mtype = mimetypes.guess_type("ftp:" + url)[0]
headers = ""
if mtype:
headers += "Content-Type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-Length: %d\n" % retrlen
headers = mimetools.Message(StringIO(headers))
return addinfourl(fp, headers, "ftp:" + url)
except ftperrors(), msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def open_data(self, url, data=None):
"""Use "data" URL."""
if not isinstance(url, str):
raise IOError, ('data error', 'proxy support for data protocol currently not implemented')
# ignore POSTed data
#
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
[type, data] = url.split(',', 1)
except ValueError:
raise IOError, ('data error', 'bad data URL')
if not type:
type = 'text/plain;charset=US-ASCII'
semi = type.rfind(';')
if semi >= 0 and '=' not in type[semi:]:
encoding = type[semi+1:]
type = type[:semi]
else:
encoding = ''
msg = []
msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
time.gmtime(time.time())))
msg.append('Content-type: %s' % type)
if encoding == 'base64':
data = base64.decodestring(data)
else:
data = unquote(data)
msg.append('Content-Length: %d' % len(data))
msg.append('')
msg.append(data)
msg = '\n'.join(msg)
f = StringIO(msg)
headers = mimetools.Message(f, 0)
#f.fileno = None # needed for addinfourl
return addinfourl(f, headers, url)
class FancyURLopener(URLopener):
"""Derived class with handlers for errors we can handle (perhaps)."""
def __init__(self, *args, **kwargs):
URLopener.__init__(self, *args, **kwargs)
self.auth_cache = {}
self.tries = 0
self.maxtries = 10
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handling -- don't raise an exception."""
return addinfourl(fp, headers, "http:" + url, errcode)
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 302 -- relocated (temporarily)."""
self.tries += 1
if self.maxtries and self.tries >= self.maxtries:
if hasattr(self, "http_error_500"):
meth = self.http_error_500
else:
meth = self.http_error_default
self.tries = 0
return meth(url, fp, 500,
"Internal Server Error: Redirect Recursion", headers)
result = self.redirect_internal(url, fp, errcode, errmsg, headers,
data)
self.tries = 0
return result
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
if 'location' in headers:
newurl = headers['location']
elif 'uri' in headers:
newurl = headers['uri']
else:
return
fp.close()
# In case the server sent a relative URL, join with original:
newurl = basejoin(self.type + ":" + url, newurl)
# For security reasons we do not allow redirects to protocols
# other than HTTP, HTTPS or FTP.
newurl_lower = newurl.lower()
if not (newurl_lower.startswith('http://') or
newurl_lower.startswith('https://') or
newurl_lower.startswith('ftp://')):
raise IOError('redirect error', errcode,
errmsg + " - Redirection to url '%s' is not allowed" %
newurl,
headers)
return self.open(newurl)
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 301 -- also relocated (permanently)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 303 -- also relocated (essentially identical to 302)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 307 -- relocated, but turn POST into error."""
if data is None:
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
else:
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 401 -- authentication required.
This function supports Basic authentication only."""
if not 'www-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['www-authenticate']
import re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
name = 'retry_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 407 -- proxy authentication required.
This function supports Basic authentication only."""
if not 'proxy-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['proxy-authenticate']
import re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
name = 'retry_proxy_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def retry_proxy_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'http://' + host + selector
proxy = self.proxies['http']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
self.proxies['http'] = 'http://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_proxy_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'https://' + host + selector
proxy = self.proxies['https']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
self.proxies['https'] = 'https://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
newurl = 'http://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
newurl = 'https://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def get_user_passwd(self, host, realm, clear_cache=0):
key = realm + '@' + host.lower()
if key in self.auth_cache:
if clear_cache:
del self.auth_cache[key]
else:
return self.auth_cache[key]
user, passwd = self.prompt_user_passwd(host, realm)
if user or passwd: self.auth_cache[key] = (user, passwd)
return user, passwd
def prompt_user_passwd(self, host, realm):
"""Override this in a GUI environment!"""
import getpass
try:
user = raw_input("Enter username for %s at %s: " % (realm,
host))
passwd = getpass.getpass("Enter password for %s in %s at %s: " %
(user, realm, host))
return user, passwd
except KeyboardInterrupt:
print
return None, None
# Utility functions
_localhost = None
def localhost():
"""Return the IP address of the magic hostname 'localhost'."""
global _localhost
if _localhost is None:
_localhost = socket.gethostbyname('localhost')
return _localhost
_thishost = None
def thishost():
"""Return the IP address of the current host."""
global _thishost
if _thishost is None:
_thishost = socket.gethostbyname(socket.gethostname())
return _thishost
_ftperrors = None
def ftperrors():
"""Return the set of errors raised by the FTP class."""
global _ftperrors
if _ftperrors is None:
import ftplib
_ftperrors = ftplib.all_errors
return _ftperrors
_noheaders = None
def noheaders():
"""Return an empty mimetools.Message object."""
global _noheaders
if _noheaders is None:
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
_noheaders = mimetools.Message(StringIO(), 0)
_noheaders.fp.close() # Recycle file descriptor
return _noheaders
# Utility classes
class ftpwrapper:
"""Class used by open_ftp() for cache of open FTP connections."""
def __init__(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
persistent=True):
self.user = user
self.passwd = passwd
self.host = host
self.port = port
self.dirs = dirs
self.timeout = timeout
self.refcount = 0
self.keepalive = persistent
self.init()
def init(self):
import ftplib
self.busy = 0
self.ftp = ftplib.FTP()
self.ftp.connect(self.host, self.port, self.timeout)
self.ftp.login(self.user, self.passwd)
for dir in self.dirs:
self.ftp.cwd(dir)
def retrfile(self, file, type):
import ftplib
self.endtransfer()
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
else: cmd = 'TYPE ' + type; isdir = 0
try:
self.ftp.voidcmd(cmd)
except ftplib.all_errors:
self.init()
self.ftp.voidcmd(cmd)
conn = None
if file and not isdir:
# Try to retrieve as a file
try:
cmd = 'RETR ' + file
conn, retrlen = self.ftp.ntransfercmd(cmd)
except ftplib.error_perm, reason:
if str(reason)[:3] != '550':
raise IOError, ('ftp error', reason), sys.exc_info()[2]
if not conn:
# Set transfer mode to ASCII!
self.ftp.voidcmd('TYPE A')
# Try a directory listing. Verify that directory exists.
if file:
pwd = self.ftp.pwd()
try:
try:
self.ftp.cwd(file)
except ftplib.error_perm, reason:
raise IOError, ('ftp error', reason), sys.exc_info()[2]
finally:
self.ftp.cwd(pwd)
cmd = 'LIST ' + file
else:
cmd = 'LIST'
conn, retrlen = self.ftp.ntransfercmd(cmd)
self.busy = 1
ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
self.refcount += 1
conn.close()
# Pass back both a suitably decorated object and a retrieval length
return (ftpobj, retrlen)
def endtransfer(self):
if not self.busy:
return
self.busy = 0
try:
self.ftp.voidresp()
except ftperrors():
pass
def close(self):
self.keepalive = False
if self.refcount <= 0:
self.real_close()
def file_close(self):
self.endtransfer()
self.refcount -= 1
if self.refcount <= 0 and not self.keepalive:
self.real_close()
def real_close(self):
self.endtransfer()
try:
self.ftp.close()
except ftperrors():
pass
class addbase:
"""Base class for addinfo and addclosehook."""
def __init__(self, fp):
self.fp = fp
self.read = self.fp.read
self.readline = self.fp.readline
if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines
if hasattr(self.fp, "fileno"):
self.fileno = self.fp.fileno
else:
self.fileno = lambda: None
if hasattr(self.fp, "__iter__"):
self.__iter__ = self.fp.__iter__
if hasattr(self.fp, "next"):
self.next = self.fp.next
def __repr__(self):
return '<%s at %r whose fp = %r>' % (self.__class__.__name__,
id(self), self.fp)
def close(self):
self.read = None
self.readline = None
self.readlines = None
self.fileno = None
if self.fp: self.fp.close()
self.fp = None
class addclosehook(addbase):
"""Class to add a close hook to an open file."""
def __init__(self, fp, closehook, *hookargs):
addbase.__init__(self, fp)
self.closehook = closehook
self.hookargs = hookargs
def close(self):
if self.closehook:
self.closehook(*self.hookargs)
self.closehook = None
self.hookargs = None
addbase.close(self)
class addinfo(addbase):
"""class to add an info() method to an open file."""
def __init__(self, fp, headers):
addbase.__init__(self, fp)
self.headers = headers
def info(self):
return self.headers
class addinfourl(addbase):
"""class to add info() and geturl() methods to an open file."""
def __init__(self, fp, headers, url, code=None):
addbase.__init__(self, fp)
self.headers = headers
self.url = url
self.code = code
def info(self):
return self.headers
def getcode(self):
return self.code
def geturl(self):
return self.url
# Utilities to parse URLs (most of these return None for missing parts):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
# splittype('type:opaquestring') --> 'type', 'opaquestring'
# splithost('//host[:port]/path') --> 'host[:port]', '/path'
# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
# splitpasswd('user:passwd') -> 'user', 'passwd'
# splitport('host:port') --> 'host', 'port'
# splitquery('/path?query') --> '/path', 'query'
# splittag('/path#tag') --> '/path', 'tag'
# splitattr('/path;attr1=value1;attr2=value2;...') ->
# '/path', ['attr1=value1', 'attr2=value2', ...]
# splitvalue('attr=value') --> 'attr', 'value'
# unquote('abc%20def') -> 'abc def'
# quote('abc def') -> 'abc%20def')
try:
unicode
except NameError:
def _is_unicode(x):
return 0
else:
def _is_unicode(x):
return isinstance(x, unicode)
def toBytes(url):
"""toBytes(u"URL") --> 'URL'."""
# Most URL schemes require ASCII. If that changes, the conversion
# can be relaxed
if _is_unicode(url):
try:
url = url.encode("ASCII")
except UnicodeError:
raise UnicodeError("URL " + repr(url) +
" contains non-ASCII characters")
return url
def unwrap(url):
"""unwrap('<URL:type://host/path>') --> 'type://host/path'."""
url = url.strip()
if url[:1] == '<' and url[-1:] == '>':
url = url[1:-1].strip()
if url[:4] == 'URL:': url = url[4:].strip()
return url
_typeprog = None
def splittype(url):
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
global _typeprog
if _typeprog is None:
import re
_typeprog = re.compile('^([^/:]+):')
match = _typeprog.match(url)
if match:
scheme = match.group(1)
return scheme.lower(), url[len(scheme) + 1:]
return None, url
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/?]*)(.*)$')
match = _hostprog.match(url)
if match:
host_port = match.group(1)
path = match.group(2)
if path and not path.startswith('/'):
path = '/' + path
return host_port, path
return None, url
_userprog = None
def splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
global _userprog
if _userprog is None:
import re
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match: return match.group(1, 2)
return None, host
_passwdprog = None
def splitpasswd(user):
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
global _passwdprog
if _passwdprog is None:
import re
_passwdprog = re.compile('^([^:]*):(.*)$',re.S)
match = _passwdprog.match(user)
if match: return match.group(1, 2)
return user, None
# splittag('/path#tag') --> '/path', 'tag'
_portprog = None
def splitport(host):
"""splitport('host:port') --> 'host', 'port'."""
global _portprog
if _portprog is None:
import re
_portprog = re.compile('^(.*):([0-9]+)$')
match = _portprog.match(host)
if match: return match.group(1, 2)
return host, None
_nportprog = None
def splitnport(host, defport=-1):
"""Split host and port, returning numeric port.
Return given default port if no ':' found; defaults to -1.
Return numerical port if a valid number are found after ':'.
Return None if ':' but not a valid number."""
global _nportprog
if _nportprog is None:
import re
_nportprog = re.compile('^(.*):(.*)$')
match = _nportprog.match(host)
if match:
host, port = match.group(1, 2)
try:
if not port: raise ValueError, "no digits"
nport = int(port)
except ValueError:
nport = None
return host, nport
return host, defport
_queryprog = None
def splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
global _queryprog
if _queryprog is None:
import re
_queryprog = re.compile('^(.*)\?([^?]*)$')
match = _queryprog.match(url)
if match: return match.group(1, 2)
return url, None
_tagprog = None
def splittag(url):
"""splittag('/path#tag') --> '/path', 'tag'."""
global _tagprog
if _tagprog is None:
import re
_tagprog = re.compile('^(.*)#([^#]*)$')
match = _tagprog.match(url)
if match: return match.group(1, 2)
return url, None
def splitattr(url):
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
'/path', ['attr1=value1', 'attr2=value2', ...]."""
words = url.split(';')
return words[0], words[1:]
_valueprog = None
def splitvalue(attr):
"""splitvalue('attr=value') --> 'attr', 'value'."""
global _valueprog
if _valueprog is None:
import re
_valueprog = re.compile('^([^=]*)=(.*)$')
match = _valueprog.match(attr)
if match: return match.group(1, 2)
return attr, None
# urlparse contains a duplicate of this method to avoid a circular import. If
# you update this method, also update the copy in urlparse. This code
# duplication does not exist in Python3.
_hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a + b, chr(int(a + b, 16)))
for a in _hexdig for b in _hexdig)
_asciire = re.compile('([\x00-\x7f]+)')
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
if _is_unicode(s):
if '%' not in s:
return s
bits = _asciire.split(s)
res = [bits[0]]
append = res.append
for i in range(1, len(bits), 2):
append(unquote(str(bits[i])).decode('latin1'))
append(bits[i + 1])
return ''.join(res)
bits = s.split('%')
# fastpath
if len(bits) == 1:
return s
res = [bits[0]]
append = res.append
for item in bits[1:]:
try:
append(_hextochr[item[:2]])
append(item[2:])
except KeyError:
append('%')
append(item)
return ''.join(res)
def unquote_plus(s):
"""unquote('%7e/abc+def') -> '~/abc def'"""
s = s.replace('+', ' ')
return unquote(s)
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
_safe_map = {}
for i, c in zip(xrange(256), str(bytearray(xrange(256)))):
_safe_map[c] = c if (i < 128 and c in always_safe) else '%{:02X}'.format(i)
_safe_quoters = {}
def quote(s, safe='/'):
"""quote('abc def') -> 'abc%20def'
Each part of a URL, e.g. the path info, the query, etc., has a
different set of reserved characters that must be quoted.
RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
the following reserved characters.
reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
"$" | ","
Each of these characters is reserved in some component of a URL,
but not necessarily in all of them.
By default, the quote function is intended for quoting the path
section of a URL. Thus, it will not encode '/'. This character
is reserved, but in typical usage the quote function is being
called on a path where the existing slash characters are used as
reserved characters.
"""
# fastpath
if not s:
if s is None:
raise TypeError('None object cannot be quoted')
return s
cachekey = (safe, always_safe)
try:
(quoter, safe) = _safe_quoters[cachekey]
except KeyError:
safe_map = _safe_map.copy()
safe_map.update([(c, c) for c in safe])
quoter = safe_map.__getitem__
safe = always_safe + safe
_safe_quoters[cachekey] = (quoter, safe)
if not s.rstrip(safe):
return s
return ''.join(map(quoter, s))
def quote_plus(s, safe=''):
"""Quote the query fragment of a URL; replacing ' ' with '+'"""
if ' ' in s:
s = quote(s, safe + ' ')
return s.replace(' ', '+')
return quote(s, safe)
def urlencode(query, doseq=0):
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
If any values in the query arg are sequences and doseq is true, each
sequence element is converted to a separate parameter.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query,"items"):
# mapping objects
query = query.items()
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty,va,tb = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", tb
l = []
if not doseq:
# preserve old behavior
for k, v in query:
k = quote_plus(str(k))
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
for k, v in query:
k = quote_plus(str(k))
if isinstance(v, str):
v = quote_plus(v)
l.append(k + '=' + v)
elif _is_unicode(v):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII","replace"))
l.append(k + '=' + v)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
# loop over the sequence
for elt in v:
l.append(k + '=' + quote_plus(str(elt)))
return '&'.join(l)
# Proxy handling
def getproxies_environment():
"""Return a dictionary of scheme -> proxy server URL mappings.
Scan the environment for variables named <scheme>_proxy;
this seems to be the standard convention. If you need a
different way, you can pass a proxies dictionary to the
[Fancy]URLopener constructor.
"""
proxies = {}
for name, value in os.environ.items():
name = name.lower()
if value and name[-6:] == '_proxy':
proxies[name[:-6]] = value
return proxies
def proxy_bypass_environment(host):
"""Test if proxies should not be used for a particular host.
Checks the environment for a variable named no_proxy, which should
be a list of DNS suffixes separated by commas, or '*' for all hosts.
"""
no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
# '*' is special case for always bypass
if no_proxy == '*':
return 1
# strip port off host
hostonly, port = splitport(host)
# check if the host ends with any of the DNS suffixes
no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
for name in no_proxy_list:
if name and (hostonly.endswith(name) or host.endswith(name)):
return 1
# otherwise, don't bypass
return 0
if sys.platform == 'darwin':
from _scproxy import _get_proxy_settings, _get_proxies
def proxy_bypass_macosx_sysconf(host):
"""
Return True iff this host shouldn't be accessed using a proxy
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
import re
import socket
from fnmatch import fnmatch
hostonly, port = splitport(host)
def ip2num(ipAddr):
parts = ipAddr.split('.')
parts = map(int, parts)
if len(parts) != 4:
parts = (parts + [0, 0, 0, 0])[:4]
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
proxy_settings = _get_proxy_settings()
# Check for simple host names:
if '.' not in host:
if proxy_settings['exclude_simple']:
return True
hostIP = None
for value in proxy_settings.get('exceptions', ()):
# Items in the list are strings like these: *.local, 169.254/16
if not value: continue
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
if m is not None:
if hostIP is None:
try:
hostIP = socket.gethostbyname(hostonly)
hostIP = ip2num(hostIP)
except socket.error:
continue
base = ip2num(m.group(1))
mask = m.group(2)
if mask is None:
mask = 8 * (m.group(1).count('.') + 1)
else:
mask = int(mask[1:])
mask = 32 - mask
if (hostIP >> mask) == (base >> mask):
return True
elif fnmatch(host, value):
return True
return False
def getproxies_macosx_sysconf():
"""Return a dictionary of scheme -> proxy server URL mappings.
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
return _get_proxies()
def proxy_bypass(host):
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_macosx_sysconf(host)
def getproxies():
return getproxies_environment() or getproxies_macosx_sysconf()
elif os.name == 'nt':
def getproxies_registry():
"""Return a dictionary of scheme -> proxy server URL mappings.
Win32 uses the registry to store proxies.
"""
proxies = {}
try:
import _winreg
except ImportError:
# Std module, so should be around - but you never know!
return proxies
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
if proxyEnable:
# Returned as Unicode but problems if not converted to ASCII
proxyServer = str(_winreg.QueryValueEx(internetSettings,
'ProxyServer')[0])
if '=' in proxyServer:
# Per-protocol settings
for p in proxyServer.split(';'):
protocol, address = p.split('=', 1)
# See if address has a type:// prefix
import re
if not re.match('^([^/:]+)://', address):
address = '%s://%s' % (protocol, address)
proxies[protocol] = address
else:
# Use one setting for all protocols
if proxyServer[:5] == 'http:':
proxies['http'] = proxyServer
else:
proxies['http'] = 'http://%s' % proxyServer
proxies['https'] = 'https://%s' % proxyServer
proxies['ftp'] = 'ftp://%s' % proxyServer
internetSettings.Close()
except (WindowsError, ValueError, TypeError):
# Either registry key not found etc, or the value in an
# unexpected format.
# proxies already set up to be empty so nothing to do
pass
return proxies
def getproxies():
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
return getproxies_environment() or getproxies_registry()
def proxy_bypass_registry(host):
try:
import _winreg
import re
except ImportError:
# Std modules, so should be around - but you never know!
return 0
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
proxyOverride = str(_winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0])
# ^^^^ Returned as Unicode but problems if not converted to ASCII
except WindowsError:
return 0
if not proxyEnable or not proxyOverride:
return 0
# try to make a host list from name and IP address.
rawHost, port = splitport(host)
host = [rawHost]
try:
addr = socket.gethostbyname(rawHost)
if addr != rawHost:
host.append(addr)
except socket.error:
pass
try:
fqdn = socket.getfqdn(rawHost)
if fqdn != rawHost:
host.append(fqdn)
except socket.error:
pass
# make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding
# canonical entry.
proxyOverride = proxyOverride.split(';')
# now check if we match one of the registry values.
for test in proxyOverride:
if test == '<local>':
if '.' not in rawHost:
return 1
test = test.replace(".", r"\.") # mask dots
test = test.replace("*", r".*") # change glob sequence
test = test.replace("?", r".") # change glob char
for val in host:
# print "%s <--> %s" %( test, val )
if re.match(test, val, re.I):
return 1
return 0
def proxy_bypass(host):
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_registry(host)
else:
# By default use environment variables
getproxies = getproxies_environment
proxy_bypass = proxy_bypass_environment
# Test and time quote() and unquote()
def test1():
s = ''
for i in range(256): s = s + chr(i)
s = s*4
t0 = time.time()
qs = quote(s)
uqs = unquote(qs)
t1 = time.time()
if uqs != s:
print 'Wrong!'
print repr(s)
print repr(qs)
print repr(uqs)
print round(t1 - t0, 3), 'sec'
def reporthook(blocknum, blocksize, totalsize):
# Report during remote transfers
print "Block number: %d, Block size: %d, Total size: %d" % (
blocknum, blocksize, totalsize)
|
apache-2.0
|
chrisjaquet/FreeCAD
|
src/Mod/Ship/shipUtils/Locale.py
|
38
|
2210
|
#***************************************************************************
#* *
#* Copyright (c) 2011, 2016 *
#* Jose Luis Cercos Pita <[email protected]> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
from PySide import QtCore
def toString(valueStr):
"""Natural extension of QtCore.QLocale.toString method, in this case
conveniently transforming a value string"""
dec_sep = QtCore.QLocale.system().decimalPoint()
return valueStr.replace(".", dec_sep)
def fromString(valueStr):
"""Natural extension of QtCore.QLocale.toFloat method, in this case
conveniently transforming a value string"""
grp_sep = QtCore.QLocale.system().groupSeparator()
return valueStr.replace(grp_sep, "")
|
lgpl-2.1
|
tarzan0820/odoo
|
addons/mrp_repair/mrp_repair.py
|
148
|
36935
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from datetime import datetime
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
class mrp_repair(osv.osv):
_name = 'mrp.repair'
_inherit = 'mail.thread'
_description = 'Repair Order'
def _amount_untaxed(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates untaxed amount.
@param self: The object pointer
@param cr: The current row, from the database cursor,
@param uid: The current user ID for security checks
@param ids: List of selected IDs
@param field_name: Name of field.
@param arg: Argument
@param context: A standard dictionary for contextual values
@return: Dictionary of values.
"""
res = {}
cur_obj = self.pool.get('res.currency')
for repair in self.browse(cr, uid, ids, context=context):
res[repair.id] = 0.0
for line in repair.operations:
res[repair.id] += line.price_subtotal
for line in repair.fees_lines:
res[repair.id] += line.price_subtotal
cur = repair.pricelist_id.currency_id
res[repair.id] = cur_obj.round(cr, uid, cur, res[repair.id])
return res
def _amount_tax(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates taxed amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
#return {}.fromkeys(ids, 0)
cur_obj = self.pool.get('res.currency')
tax_obj = self.pool.get('account.tax')
for repair in self.browse(cr, uid, ids, context=context):
val = 0.0
cur = repair.pricelist_id.currency_id
for line in repair.operations:
#manage prices with tax included use compute_all instead of compute
if line.to_invoice:
tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, repair.partner_id)
for c in tax_calculate['taxes']:
val += c['amount']
for line in repair.fees_lines:
if line.to_invoice:
tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, repair.partner_id)
for c in tax_calculate['taxes']:
val += c['amount']
res[repair.id] = cur_obj.round(cr, uid, cur, val)
return res
def _amount_total(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates total amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
untax = self._amount_untaxed(cr, uid, ids, field_name, arg, context=context)
tax = self._amount_tax(cr, uid, ids, field_name, arg, context=context)
cur_obj = self.pool.get('res.currency')
for id in ids:
repair = self.browse(cr, uid, id, context=context)
cur = repair.pricelist_id.currency_id
res[id] = cur_obj.round(cr, uid, cur, untax.get(id, 0.0) + tax.get(id, 0.0))
return res
def _get_default_address(self, cr, uid, ids, field_name, arg, context=None):
res = {}
partner_obj = self.pool.get('res.partner')
for data in self.browse(cr, uid, ids, context=context):
adr_id = False
if data.partner_id:
adr_id = partner_obj.address_get(cr, uid, [data.partner_id.id], ['default'])['default']
res[data.id] = adr_id
return res
def _get_lines(self, cr, uid, ids, context=None):
return self.pool['mrp.repair'].search(cr, uid, [('operations', 'in', ids)], context=context)
def _get_fee_lines(self, cr, uid, ids, context=None):
return self.pool['mrp.repair'].search(cr, uid, [('fees_lines', 'in', ids)], context=context)
_columns = {
'name': fields.char('Repair Reference', required=True, states={'confirmed': [('readonly', True)]}, copy=False),
'product_id': fields.many2one('product.product', string='Product to Repair', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_qty': fields.float('Product Quantity', digits_compute=dp.get_precision('Product Unit of Measure'),
required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'partner_id': fields.many2one('res.partner', 'Partner', select=True, help='Choose partner for whom the order will be invoiced and delivered.', states={'confirmed': [('readonly', True)]}),
'address_id': fields.many2one('res.partner', 'Delivery Address', domain="[('parent_id','=',partner_id)]", states={'confirmed': [('readonly', True)]}),
'default_address_id': fields.function(_get_default_address, type="many2one", relation="res.partner"),
'state': fields.selection([
('draft', 'Quotation'),
('cancel', 'Cancelled'),
('confirmed', 'Confirmed'),
('under_repair', 'Under Repair'),
('ready', 'Ready to Repair'),
('2binvoiced', 'To be Invoiced'),
('invoice_except', 'Invoice Exception'),
('done', 'Repaired')
], 'Status', readonly=True, track_visibility='onchange', copy=False,
help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed repair order. \
\n* The \'Confirmed\' status is used when a user confirms the repair order. \
\n* The \'Ready to Repair\' status is used to start to repairing, user can start repairing only after repair order is confirmed. \
\n* The \'To be Invoiced\' status is used to generate the invoice before or after repairing done. \
\n* The \'Done\' status is set when repairing is completed.\
\n* The \'Cancelled\' status is used when user cancel repair order.'),
'location_id': fields.many2one('stock.location', 'Current Location', select=True, required=True, readonly=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}),
'location_dest_id': fields.many2one('stock.location', 'Delivery Location', readonly=True, required=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}),
'lot_id': fields.many2one('stock.production.lot', 'Repaired Lot', domain="[('product_id','=', product_id)]", help="Products repaired are all belonging to this lot", oldname="prodlot_id"),
'guarantee_limit': fields.date('Warranty Expiration', states={'confirmed': [('readonly', True)]}),
'operations': fields.one2many('mrp.repair.line', 'repair_id', 'Operation Lines', readonly=True, states={'draft': [('readonly', False)]}, copy=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', help='Pricelist of the selected partner.'),
'partner_invoice_id': fields.many2one('res.partner', 'Invoicing Address'),
'invoice_method': fields.selection([
("none", "No Invoice"),
("b4repair", "Before Repair"),
("after_repair", "After Repair")
], "Invoice Method",
select=True, required=True, states={'draft': [('readonly', False)]}, readonly=True, help='Selecting \'Before Repair\' or \'After Repair\' will allow you to generate invoice before or after the repair is done respectively. \'No invoice\' means you don\'t want to generate invoice for this repair order.'),
'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, track_visibility="onchange", copy=False),
'move_id': fields.many2one('stock.move', 'Move', readonly=True, help="Move created by the repair order", track_visibility="onchange", copy=False),
'fees_lines': fields.one2many('mrp.repair.fee', 'repair_id', 'Fees', readonly=True, states={'draft': [('readonly', False)]}, copy=True),
'internal_notes': fields.text('Internal Notes'),
'quotation_notes': fields.text('Quotation Notes'),
'company_id': fields.many2one('res.company', 'Company'),
'invoiced': fields.boolean('Invoiced', readonly=True, copy=False),
'repaired': fields.boolean('Repaired', readonly=True, copy=False),
'amount_untaxed': fields.function(_amount_untaxed, string='Untaxed Amount',
store={
'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10),
'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
}),
'amount_tax': fields.function(_amount_tax, string='Taxes',
store={
'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10),
'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
}),
'amount_total': fields.function(_amount_total, string='Total',
store={
'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10),
'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
}),
}
def _default_stock_location(self, cr, uid, context=None):
try:
warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0')
return warehouse.lot_stock_id.id
except:
return False
_defaults = {
'state': lambda *a: 'draft',
'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'mrp.repair'),
'invoice_method': lambda *a: 'none',
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'mrp.repair', context=context),
'pricelist_id': lambda self, cr, uid, context: self.pool.get('product.pricelist').search(cr, uid, [('type', '=', 'sale')])[0],
'product_qty': 1.0,
'location_id': _default_stock_location,
}
_sql_constraints = [
('name', 'unique (name)', 'The name of the Repair Order must be unique!'),
]
def onchange_product_id(self, cr, uid, ids, product_id=None):
""" On change of product sets some values.
@param product_id: Changed product
@return: Dictionary of values.
"""
product = False
if product_id:
product = self.pool.get("product.product").browse(cr, uid, product_id)
return {'value': {
'guarantee_limit': False,
'lot_id': False,
'product_uom': product and product.uom_id.id or False,
}
}
def onchange_product_uom(self, cr, uid, ids, product_id, product_uom, context=None):
res = {'value': {}}
if not product_uom or not product_id:
return res
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context)
if uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
res['value'].update({'product_uom': product.uom_id.id})
return res
def onchange_location_id(self, cr, uid, ids, location_id=None):
""" On change of location
"""
return {'value': {'location_dest_id': location_id}}
def button_dummy(self, cr, uid, ids, context=None):
return True
def onchange_partner_id(self, cr, uid, ids, part, address_id):
""" On change of partner sets the values of partner address,
partner invoice address and pricelist.
@param part: Changed id of partner.
@param address_id: Address id from current record.
@return: Dictionary of values.
"""
part_obj = self.pool.get('res.partner')
pricelist_obj = self.pool.get('product.pricelist')
if not part:
return {'value': {
'address_id': False,
'partner_invoice_id': False,
'pricelist_id': pricelist_obj.search(cr, uid, [('type', '=', 'sale')])[0]
}
}
addr = part_obj.address_get(cr, uid, [part], ['delivery', 'invoice', 'default'])
partner = part_obj.browse(cr, uid, part)
pricelist = partner.property_product_pricelist and partner.property_product_pricelist.id or False
return {'value': {
'address_id': addr['delivery'] or addr['default'],
'partner_invoice_id': addr['invoice'],
'pricelist_id': pricelist
}
}
def action_cancel_draft(self, cr, uid, ids, *args):
""" Cancels repair order when it is in 'Draft' state.
@param *arg: Arguments
@return: True
"""
if not len(ids):
return False
mrp_line_obj = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids):
mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'draft'})
self.write(cr, uid, ids, {'state': 'draft'})
return self.create_workflow(cr, uid, ids)
def action_confirm(self, cr, uid, ids, *args):
""" Repair order state is set to 'To be invoiced' when invoice method
is 'Before repair' else state becomes 'Confirmed'.
@param *arg: Arguments
@return: True
"""
mrp_line_obj = self.pool.get('mrp.repair.line')
for o in self.browse(cr, uid, ids):
if (o.invoice_method == 'b4repair'):
self.write(cr, uid, [o.id], {'state': '2binvoiced'})
else:
self.write(cr, uid, [o.id], {'state': 'confirmed'})
for line in o.operations:
if line.product_id.track_production and not line.lot_id:
raise osv.except_osv(_('Warning!'), _("Serial number is required for operation line with product '%s'") % (line.product_id.name))
mrp_line_obj.write(cr, uid, [l.id for l in o.operations], {'state': 'confirmed'})
return True
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels repair order.
@return: True
"""
mrp_line_obj = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids, context=context):
if not repair.invoiced:
mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'cancel'}, context=context)
else:
raise osv.except_osv(_('Warning!'), _('Repair order is already invoiced.'))
return self.write(cr, uid, ids, {'state': 'cancel'})
def wkf_invoice_create(self, cr, uid, ids, *args):
self.action_invoice_create(cr, uid, ids)
return True
def action_invoice_create(self, cr, uid, ids, group=False, context=None):
""" Creates invoice(s) for repair order.
@param group: It is set to true when group invoice is to be generated.
@return: Invoice Ids.
"""
res = {}
invoices_group = {}
inv_line_obj = self.pool.get('account.invoice.line')
inv_obj = self.pool.get('account.invoice')
repair_line_obj = self.pool.get('mrp.repair.line')
repair_fee_obj = self.pool.get('mrp.repair.fee')
for repair in self.browse(cr, uid, ids, context=context):
res[repair.id] = False
if repair.state in ('draft', 'cancel') or repair.invoice_id:
continue
if not (repair.partner_id.id and repair.partner_invoice_id.id):
raise osv.except_osv(_('No partner!'), _('You have to select a Partner Invoice Address in the repair form!'))
comment = repair.quotation_notes
if (repair.invoice_method != 'none'):
if group and repair.partner_invoice_id.id in invoices_group:
inv_id = invoices_group[repair.partner_invoice_id.id]
invoice = inv_obj.browse(cr, uid, inv_id)
invoice_vals = {
'name': invoice.name + ', ' + repair.name,
'origin': invoice.origin + ', ' + repair.name,
'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''),
}
inv_obj.write(cr, uid, [inv_id], invoice_vals, context=context)
else:
if not repair.partner_id.property_account_receivable:
raise osv.except_osv(_('Error!'), _('No account defined for partner "%s".') % repair.partner_id.name)
account_id = repair.partner_id.property_account_receivable.id
inv = {
'name': repair.name,
'origin': repair.name,
'type': 'out_invoice',
'account_id': account_id,
'partner_id': repair.partner_invoice_id.id or repair.partner_id.id,
'currency_id': repair.pricelist_id.currency_id.id,
'comment': repair.quotation_notes,
'fiscal_position': repair.partner_id.property_account_position.id
}
inv_id = inv_obj.create(cr, uid, inv)
invoices_group[repair.partner_invoice_id.id] = inv_id
self.write(cr, uid, repair.id, {'invoiced': True, 'invoice_id': inv_id})
for operation in repair.operations:
if operation.to_invoice:
if group:
name = repair.name + '-' + operation.name
else:
name = operation.name
if operation.product_id.property_account_income:
account_id = operation.product_id.property_account_income.id
elif operation.product_id.categ_id.property_account_income_categ:
account_id = operation.product_id.categ_id.property_account_income_categ.id
else:
raise osv.except_osv(_('Error!'), _('No account defined for product "%s".') % operation.product_id.name)
invoice_line_id = inv_line_obj.create(cr, uid, {
'invoice_id': inv_id,
'name': name,
'origin': repair.name,
'account_id': account_id,
'quantity': operation.product_uom_qty,
'invoice_line_tax_id': [(6, 0, [x.id for x in operation.tax_id])],
'uos_id': operation.product_uom.id,
'price_unit': operation.price_unit,
'price_subtotal': operation.product_uom_qty * operation.price_unit,
'product_id': operation.product_id and operation.product_id.id or False
})
repair_line_obj.write(cr, uid, [operation.id], {'invoiced': True, 'invoice_line_id': invoice_line_id})
for fee in repair.fees_lines:
if fee.to_invoice:
if group:
name = repair.name + '-' + fee.name
else:
name = fee.name
if not fee.product_id:
raise osv.except_osv(_('Warning!'), _('No product defined on Fees!'))
if fee.product_id.property_account_income:
account_id = fee.product_id.property_account_income.id
elif fee.product_id.categ_id.property_account_income_categ:
account_id = fee.product_id.categ_id.property_account_income_categ.id
else:
raise osv.except_osv(_('Error!'), _('No account defined for product "%s".') % fee.product_id.name)
invoice_fee_id = inv_line_obj.create(cr, uid, {
'invoice_id': inv_id,
'name': name,
'origin': repair.name,
'account_id': account_id,
'quantity': fee.product_uom_qty,
'invoice_line_tax_id': [(6, 0, [x.id for x in fee.tax_id])],
'uos_id': fee.product_uom.id,
'product_id': fee.product_id and fee.product_id.id or False,
'price_unit': fee.price_unit,
'price_subtotal': fee.product_uom_qty * fee.price_unit
})
repair_fee_obj.write(cr, uid, [fee.id], {'invoiced': True, 'invoice_line_id': invoice_fee_id})
inv_obj.button_reset_taxes(cr, uid, inv_id, context=context)
res[repair.id] = inv_id
return res
def action_repair_ready(self, cr, uid, ids, context=None):
""" Writes repair order state to 'Ready'
@return: True
"""
for repair in self.browse(cr, uid, ids, context=context):
self.pool.get('mrp.repair.line').write(cr, uid, [l.id for
l in repair.operations], {'state': 'confirmed'}, context=context)
self.write(cr, uid, [repair.id], {'state': 'ready'})
return True
def action_repair_start(self, cr, uid, ids, context=None):
""" Writes repair order state to 'Under Repair'
@return: True
"""
repair_line = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids, context=context):
repair_line.write(cr, uid, [l.id for
l in repair.operations], {'state': 'confirmed'}, context=context)
repair.write({'state': 'under_repair'})
return True
def action_repair_end(self, cr, uid, ids, context=None):
""" Writes repair order state to 'To be invoiced' if invoice method is
After repair else state is set to 'Ready'.
@return: True
"""
for order in self.browse(cr, uid, ids, context=context):
val = {}
val['repaired'] = True
if (not order.invoiced and order.invoice_method == 'after_repair'):
val['state'] = '2binvoiced'
elif (not order.invoiced and order.invoice_method == 'b4repair'):
val['state'] = 'ready'
else:
pass
self.write(cr, uid, [order.id], val)
return True
def wkf_repair_done(self, cr, uid, ids, *args):
self.action_repair_done(cr, uid, ids)
return True
def action_repair_done(self, cr, uid, ids, context=None):
""" Creates stock move for operation and stock move for final product of repair order.
@return: Move ids of final products
"""
res = {}
move_obj = self.pool.get('stock.move')
repair_line_obj = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids, context=context):
move_ids = []
for move in repair.operations:
move_id = move_obj.create(cr, uid, {
'name': move.name,
'product_id': move.product_id.id,
'restrict_lot_id': move.lot_id.id,
'product_uom_qty': move.product_uom_qty,
'product_uom': move.product_uom.id,
'partner_id': repair.address_id and repair.address_id.id or False,
'location_id': move.location_id.id,
'location_dest_id': move.location_dest_id.id,
})
move_ids.append(move_id)
repair_line_obj.write(cr, uid, [move.id], {'move_id': move_id, 'state': 'done'}, context=context)
move_id = move_obj.create(cr, uid, {
'name': repair.name,
'product_id': repair.product_id.id,
'product_uom': repair.product_uom.id or repair.product_id.uom_id.id,
'product_uom_qty': repair.product_qty,
'partner_id': repair.address_id and repair.address_id.id or False,
'location_id': repair.location_id.id,
'location_dest_id': repair.location_dest_id.id,
'restrict_lot_id': repair.lot_id.id,
})
move_ids.append(move_id)
move_obj.action_done(cr, uid, move_ids, context=context)
self.write(cr, uid, [repair.id], {'state': 'done', 'move_id': move_id}, context=context)
res[repair.id] = move_id
return res
class ProductChangeMixin(object):
def product_id_change(self, cr, uid, ids, pricelist, product, uom=False,
product_uom_qty=0, partner_id=False, guarantee_limit=False, context=None):
""" On change of product it sets product quantity, tax account, name,
uom of product, unit price and price subtotal.
@param pricelist: Pricelist of current record.
@param product: Changed id of product.
@param uom: UoM of current record.
@param product_uom_qty: Quantity of current record.
@param partner_id: Partner of current record.
@param guarantee_limit: Guarantee limit of current record.
@return: Dictionary of values and warning message.
"""
result = {}
warning = {}
ctx = context and context.copy() or {}
ctx['uom'] = uom
if not product_uom_qty:
product_uom_qty = 1
result['product_uom_qty'] = product_uom_qty
if product:
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=ctx)
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id)
result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, partner.property_account_position, product_obj.taxes_id, context=ctx)
result['name'] = product_obj.display_name
result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id or False
if not pricelist:
warning = {
'title': _('No Pricelist!'),
'message':
_('You have to select a pricelist in the Repair form !\n'
'Please set one before choosing a product.')
}
else:
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product, product_uom_qty, partner_id, context=ctx)[pricelist]
if price is False:
warning = {
'title': _('No valid pricelist line found !'),
'message':
_("Couldn't find a pricelist line matching this product and quantity.\n"
"You have to change either the product, the quantity or the pricelist.")
}
else:
result.update({'price_unit': price, 'price_subtotal': price * product_uom_qty})
return {'value': result, 'warning': warning}
class mrp_repair_line(osv.osv, ProductChangeMixin):
_name = 'mrp.repair.line'
_description = 'Repair Line'
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
for line in self.browse(cr, uid, ids, context=context):
if line.to_invoice:
taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, line.repair_id.partner_id)
cur = line.repair_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
else:
res[line.id] = 0
return res
_columns = {
'name': fields.char('Description', required=True),
'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', ondelete='cascade', select=True),
'type': fields.selection([('add', 'Add'), ('remove', 'Remove')], 'Type', required=True),
'to_invoice': fields.boolean('To Invoice'),
'product_id': fields.many2one('product.product', 'Product', required=True),
'invoiced': fields.boolean('Invoiced', readonly=True, copy=False),
'price_unit': fields.float('Unit Price', required=True, digits_compute=dp.get_precision('Product Price')),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')),
'tax_id': fields.many2many('account.tax', 'repair_operation_line_tax', 'repair_operation_line_id', 'tax_id', 'Taxes'),
'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False),
'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True),
'location_dest_id': fields.many2one('stock.location', 'Dest. Location', required=True, select=True),
'move_id': fields.many2one('stock.move', 'Inventory Move', readonly=True, copy=False),
'lot_id': fields.many2one('stock.production.lot', 'Lot'),
'state': fields.selection([
('draft', 'Draft'),
('confirmed', 'Confirmed'),
('done', 'Done'),
('cancel', 'Cancelled')], 'Status', required=True, readonly=True, copy=False,
help=' * The \'Draft\' status is set automatically as draft when repair order in draft status. \
\n* The \'Confirmed\' status is set automatically as confirm when repair order in confirm status. \
\n* The \'Done\' status is set automatically when repair order is completed.\
\n* The \'Cancelled\' status is set automatically when user cancel repair order.'),
}
_defaults = {
'state': lambda *a: 'draft',
'product_uom_qty': lambda *a: 1,
}
def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit, company_id=False, context=None):
""" On change of operation type it sets source location, destination location
and to invoice field.
@param product: Changed operation type.
@param guarantee_limit: Guarantee limit of current record.
@return: Dictionary of values.
"""
if not type:
return {'value': {
'location_id': False,
'location_dest_id': False
}}
location_obj = self.pool.get('stock.location')
warehouse_obj = self.pool.get('stock.warehouse')
location_id = location_obj.search(cr, uid, [('usage', '=', 'production')], context=context)
location_id = location_id and location_id[0] or False
if type == 'add':
# TOCHECK: Find stock location for user's company warehouse or
# repair order's company's warehouse (company_id field is added in fix of lp:831583)
args = company_id and [('company_id', '=', company_id)] or []
warehouse_ids = warehouse_obj.search(cr, uid, args, context=context)
stock_id = False
if warehouse_ids:
stock_id = warehouse_obj.browse(cr, uid, warehouse_ids[0], context=context).lot_stock_id.id
to_invoice = (guarantee_limit and datetime.strptime(guarantee_limit, '%Y-%m-%d') < datetime.now())
return {'value': {
'to_invoice': to_invoice,
'location_id': stock_id,
'location_dest_id': location_id
}}
scrap_location_ids = location_obj.search(cr, uid, [('scrap_location', '=', True)], context=context)
return {'value': {
'to_invoice': False,
'location_id': location_id,
'location_dest_id': scrap_location_ids and scrap_location_ids[0] or False,
}}
class mrp_repair_fee(osv.osv, ProductChangeMixin):
_name = 'mrp.repair.fee'
_description = 'Repair Fees Line'
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
for line in self.browse(cr, uid, ids, context=context):
if line.to_invoice:
taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, line.repair_id.partner_id)
cur = line.repair_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
else:
res[line.id] = 0
return res
_columns = {
'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', required=True, ondelete='cascade', select=True),
'name': fields.char('Description', select=True, required=True),
'product_id': fields.many2one('product.product', 'Product'),
'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'price_unit': fields.float('Unit Price', required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')),
'tax_id': fields.many2many('account.tax', 'repair_fee_line_tax', 'repair_fee_line_id', 'tax_id', 'Taxes'),
'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False),
'to_invoice': fields.boolean('To Invoice'),
'invoiced': fields.boolean('Invoiced', readonly=True, copy=False),
}
_defaults = {
'to_invoice': lambda *a: True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
sergmelikyan/murano
|
murano/utils.py
|
1
|
2779
|
# Copyright (c) 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from webob import exc
from murano.db import models
from murano.db.services import sessions
from murano.db import session as db_session
from murano.openstack.common.gettextutils import _ # noqa
from murano.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def verify_env(func):
@functools.wraps(func)
def __inner(self, request, environment_id, *args, **kwargs):
unit = db_session.get_session()
environment = unit.query(models.Environment).get(environment_id)
if environment is None:
LOG.info(_("Environment with id '{0}'"
" not found").format(environment_id))
raise exc.HTTPNotFound()
if hasattr(request, 'context'):
if environment.tenant_id != request.context.tenant:
LOG.info(_('User is not authorized to access'
' this tenant resources'))
raise exc.HTTPUnauthorized()
return func(self, request, environment_id, *args, **kwargs)
return __inner
def verify_session(func):
@functools.wraps(func)
def __inner(self, request, *args, **kwargs):
if hasattr(request, 'context') and not request.context.session:
LOG.info(_('Session is required for this call'))
raise exc.HTTPForbidden()
session_id = request.context.session
unit = db_session.get_session()
session = unit.query(models.Session).get(session_id)
if session is None:
LOG.info(_('Session <SessionId {0}> '
'is not found').format(session_id))
raise exc.HTTPForbidden()
if not sessions.SessionServices.validate(session):
LOG.info(_('Session <SessionId {0}> '
'is invalid').format(session_id))
raise exc.HTTPForbidden()
if session.state == sessions.SessionState.DEPLOYING:
LOG.info(_('Session <SessionId {0}> is already in '
'deployment state').format(session_id))
raise exc.HTTPForbidden()
return func(self, request, *args, **kwargs)
return __inner
|
apache-2.0
|
alexryndin/ambari
|
ambari-client/python-client/src/examples/main.py
|
4
|
4761
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import logging
from ambari_client.ambari_api import AmbariClient
def main():
path = os.getcwd() ;
print path
sys.path.append(path)
logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w")
logging.info("Program started")
client = AmbariClient("localhost", 8080, "admin", "admin", version=1)
print client.version
print client.host_url
print"\n"
######################################
# High level
######################################
all_clusters = client.get_all_clusters()
print all_clusters.to_json_dict()
print all_clusters
all_hosts = client.get_all_hosts()
print all_hosts
print all_hosts.to_json_dict()
print"\n"
######################################
# going into a specific cluster
######################################
cluster = client.get_cluster('test46')
print cluster
print cluster.to_json_dict()
print"\n"
clusters_hosts = cluster.get_all_hosts()
print clusters_hosts.to_json_dict()
print clusters_hosts
print"\n"
host1 = cluster.get_host('r01wn01')
print host1
print host1.clusterRef.cluster_name
print host1.to_json_dict()
print"\n"
print "==================== host components ====================\n"
host1_comp = host1.get_host_components()
print host1_comp
print host1_comp.to_json_dict()
print"\n"
nn = host1.get_host_component("NAMENODE")
print nn
print nn.to_json_dict()
print nn.clusterRef.cluster_name
metric_json = nn.get_metrics()
print metric_json["metrics"]["cpu"]
print"\n"
serviceList = cluster.get_all_services()
print serviceList
print serviceList.to_json_dict()
print"\n"
ganglia = cluster.get_service("GANGLIA")
print ganglia
print ganglia.to_json_dict()
print"\n"
print "==================== service components ====================\n"
ganglia_comps = ganglia.get_service_components()
print ganglia_comps
print ganglia_comps.to_json_dict()
print"\n"
ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR')
print ganglia_comp1
print ganglia_comp1.to_json_dict()
print ganglia_comp1.clusterRef.cluster_name
print"\n"
mr = cluster.get_service("MAPREDUCE")
print mr
print mr.to_json_dict()
print"\n"
mr_comp1 = mr.get_service_component('TASKTRACKER')
print mr_comp1
print mr_comp1.to_json_dict()
print mr_comp1.clusterRef.cluster_name
metric_json = mr_comp1.get_metrics()
print metric_json["metrics"]["cpu"]
print"\n"
######################################
# configurations
######################################
hdfs_config = cluster.get_hdfs_site_config()
print hdfs_config
print hdfs_config.properties
global_config = cluster.get_global_config()
core_config = cluster.get_core_site_config()
mapred_config = cluster.get_mapred_site_config()
print global_config
print core_config
print mapred_config
print global_config.clusterRef.cluster_name
print core_config.clusterRef.cluster_name
print mapred_config.clusterRef.cluster_name
hdfs_config.properties["dfs.replication.max"] = 51
#print hdfs_config.properties
hdfs_config1 = cluster.update_hdfs_site_config(hdfs_config)
print hdfs_config1.properties
######################################
# create cluster
######################################
# ganglia.stop()
# ganglia.start()
########################################################################
#
# The "main" entry
#
########################################################################
if __name__ == '__main__':
main()
########################################################################
|
apache-2.0
|
bastianh/zulip
|
zerver/lib/logging_util.py
|
124
|
1529
|
from __future__ import absolute_import
import logging
from datetime import datetime, timedelta
# Adapted http://djangosnippets.org/snippets/2242/ by user s29 (October 25, 2010)
class _RateLimitFilter(object):
last_error = datetime.min
def filter(self, record):
from django.conf import settings
from django.core.cache import cache
# Track duplicate errors
duplicate = False
rate = getattr(settings, '%s_LIMIT' % self.__class__.__name__.upper(),
600) # seconds
if rate > 0:
# Test if the cache works
try:
cache.set('RLF_TEST_KEY', 1, 1)
use_cache = cache.get('RLF_TEST_KEY') == 1
except:
use_cache = False
if use_cache:
key = self.__class__.__name__.upper()
duplicate = cache.get(key) == 1
cache.set(key, 1, rate)
else:
min_date = datetime.now() - timedelta(seconds=rate)
duplicate = (self.last_error >= min_date)
if not duplicate:
self.last_error = datetime.now()
return not duplicate
class ZulipLimiter(_RateLimitFilter):
pass
class EmailLimiter(_RateLimitFilter):
pass
class ReturnTrue(logging.Filter):
def filter(self, record):
return True
class RequireReallyDeployed(logging.Filter):
def filter(self, record):
from django.conf import settings
return settings.PRODUCTION
|
apache-2.0
|
BlackBox-Kernel/blackbox_sprout_lp
|
tools/perf/tests/attr.py
|
3174
|
9441
|
#! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
gpl-2.0
|
elimence/edx-platform
|
lms/djangoapps/certificates/migrations/0002_auto__add_field_generatedcertificate_download_url.py
|
188
|
6807
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GeneratedCertificate.download_url'
db.add_column('certificates_generatedcertificate', 'download_url',
self.gf('django.db.models.fields.CharField')(max_length=128, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'GeneratedCertificate.download_url'
db.delete_column('certificates_generatedcertificate', 'download_url')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'certificates.generatedcertificate': {
'Meta': {'object_name': 'GeneratedCertificate'},
'certificate_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'download_url': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['certificates']
|
agpl-3.0
|
diorcety/intellij-community
|
python/lib/Lib/site-packages/django/conf/locale/pl/formats.py
|
78
|
1286
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j E Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
NUMBER_GROUPING = 3
|
apache-2.0
|
pyprism/Diary
|
hiren/settings.py
|
1
|
4794
|
"""
Django settings for hiren project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
import json
import datetime
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
try:
with open(BASE_DIR + '/' + 'config.local.json') as f:
JSON_DATA = json.load(f)
except FileNotFoundError:
with open(BASE_DIR + '/' + 'config.json') as f:
JSON_DATA = json.load(f)
SECRET_KEY = os.environ.get('SECRET_KEY', JSON_DATA['secret_key'])
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG', False)
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework_swagger',
'diary',
'webpack_loader',
'taggit',
'taggit_serializer',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hiren.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hiren.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
if 'TRAVIS' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'travisci',
'USER': 'postgres',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
}
}
else:
DATABASES = {
'default': {
'NAME': JSON_DATA['db_name'],
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'USER': JSON_DATA['db_user'],
'PASSWORD': JSON_DATA['db_password'],
'HOST': 'localhost',
'PORT': '',
'CONN_MAX_AGE': 600,
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Dhaka'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder"
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer'
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
),
}
# JWT auth conf
JWT_AUTH = {
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=3000),
}
# webpack loader conf
WEBPACK_LOADER = {
'DEFAULT': {
'CACHE': not DEBUG,
'BUNDLE_DIR_NAME': 'js/bundles/', # must end with slash
'STATS_FILE': os.path.join(BASE_DIR, 'webpack-stats.json'),
'POLL_INTERVAL': 0.1,
'IGNORE': ['.+\.hot-update.js', '.+\.map']
}
}
|
mit
|
parinporecha/backend_gtgonline
|
GTG/gtk/__init__.py
|
1
|
1400
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2012 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
""" Configuration for browser, it contains path to .glade files """
import os
class ViewConfig:
current_rep = os.path.dirname(os.path.abspath(__file__))
DELETE_GLADE_FILE = os.path.join(current_rep, "deletion.glade")
PREFERENCES_GLADE_FILE = os.path.join(current_rep, "preferences.glade")
PLUGINS_GLADE_FILE = os.path.join(current_rep, "plugins.glade")
BACKENDS_GLADE_FILE = os.path.join(current_rep, "backends_dialog.glade")
|
gpl-3.0
|
seanrivera/rust
|
src/etc/debugger_pretty_printers_common.py
|
16
|
12327
|
# Copyright 2015 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
"""
This module provides an abstraction layer over common Rust pretty printing
functionality needed by both GDB and LLDB.
"""
import re
# Type codes that indicate the kind of type as it appears in DWARF debug
# information. This code alone is not sufficient to determine the Rust type.
# For example structs, tuples, fat pointers, or enum variants will all have
# DWARF_TYPE_CODE_STRUCT.
DWARF_TYPE_CODE_STRUCT = 1
DWARF_TYPE_CODE_UNION = 2
DWARF_TYPE_CODE_PTR = 3
DWARF_TYPE_CODE_ARRAY = 4
DWARF_TYPE_CODE_ENUM = 5
# These constants specify the most specific kind of type that could be
# determined for a given value.
TYPE_KIND_UNKNOWN = -1
TYPE_KIND_EMPTY = 0
TYPE_KIND_SLICE = 1
TYPE_KIND_REGULAR_STRUCT = 2
TYPE_KIND_TUPLE = 3
TYPE_KIND_TUPLE_STRUCT = 4
TYPE_KIND_CSTYLE_VARIANT = 5
TYPE_KIND_TUPLE_VARIANT = 6
TYPE_KIND_STRUCT_VARIANT = 7
TYPE_KIND_STR_SLICE = 8
TYPE_KIND_STD_VEC = 9
TYPE_KIND_STD_STRING = 10
TYPE_KIND_REGULAR_ENUM = 11
TYPE_KIND_COMPRESSED_ENUM = 12
TYPE_KIND_SINGLETON_ENUM = 13
TYPE_KIND_CSTYLE_ENUM = 14
TYPE_KIND_PTR = 15
TYPE_KIND_FIXED_SIZE_VEC = 16
ENCODED_ENUM_PREFIX = "RUST$ENCODED$ENUM$"
ENUM_DISR_FIELD_NAME = "RUST$ENUM$DISR"
# Slice related constants
SLICE_FIELD_NAME_DATA_PTR = "data_ptr"
SLICE_FIELD_NAME_LENGTH = "length"
SLICE_FIELD_NAMES = [SLICE_FIELD_NAME_DATA_PTR, SLICE_FIELD_NAME_LENGTH]
# std::Vec<> related constants
STD_VEC_FIELD_NAME_DATA_PTR = "ptr"
STD_VEC_FIELD_NAME_LENGTH = "len"
STD_VEC_FIELD_NAME_CAPACITY = "cap"
STD_VEC_FIELD_NAMES = [STD_VEC_FIELD_NAME_DATA_PTR,
STD_VEC_FIELD_NAME_LENGTH,
STD_VEC_FIELD_NAME_CAPACITY]
# std::String related constants
STD_STRING_FIELD_NAMES = ["vec"]
class Type(object):
"""
This class provides a common interface for type-oriented operations.
Sub-classes are supposed to wrap a debugger-specific type-object and
provide implementations for the abstract methods in this class.
"""
def __init__(self):
self.__type_kind = None
def get_unqualified_type_name(self):
"""
Implementations of this method should return the unqualified name of the
type-object they are wrapping. Some examples:
'int' -> 'int'
'std::vec::Vec<std::string::String>' -> 'Vec<std::string::String>'
'&std::option::Option<std::string::String>' -> '&std::option::Option<std::string::String>'
As you can see, type arguments stay fully qualified.
"""
raise NotImplementedError("Override this method")
def get_dwarf_type_kind(self):
"""
Implementations of this method should return the correct
DWARF_TYPE_CODE_* value for the wrapped type-object.
"""
raise NotImplementedError("Override this method")
def get_fields(self):
"""
Implementations of this method should return a list of field-objects of
this type. For Rust-enums (i.e. with DWARF_TYPE_CODE_UNION) these field-
objects represent the variants of the enum. Field-objects must have a
`name` attribute that gives their name as specified in DWARF.
"""
assert ((self.get_dwarf_type_kind() == DWARF_TYPE_CODE_STRUCT) or
(self.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION))
raise NotImplementedError("Override this method")
def get_wrapped_value(self):
"""
Returns the debugger-specific type-object wrapped by this object. This
is sometimes needed for doing things like pointer-arithmetic in GDB.
"""
raise NotImplementedError("Override this method")
def get_type_kind(self):
"""This method returns the TYPE_KIND_* value for this type-object."""
if self.__type_kind is None:
dwarf_type_code = self.get_dwarf_type_kind()
if dwarf_type_code == DWARF_TYPE_CODE_STRUCT:
self.__type_kind = self.__classify_struct()
elif dwarf_type_code == DWARF_TYPE_CODE_UNION:
self.__type_kind = self.__classify_union()
elif dwarf_type_code == DWARF_TYPE_CODE_PTR:
self.__type_kind = TYPE_KIND_PTR
elif dwarf_type_code == DWARF_TYPE_CODE_ARRAY:
self.__type_kind = TYPE_KIND_FIXED_SIZE_VEC
else:
self.__type_kind = TYPE_KIND_UNKNOWN
return self.__type_kind
def __classify_struct(self):
assert self.get_dwarf_type_kind() == DWARF_TYPE_CODE_STRUCT
unqualified_type_name = self.get_unqualified_type_name()
# STR SLICE
if unqualified_type_name == "&str":
return TYPE_KIND_STR_SLICE
# REGULAR SLICE
if (unqualified_type_name.startswith("&[") and
unqualified_type_name.endswith("]") and
self.__conforms_to_field_layout(SLICE_FIELD_NAMES)):
return TYPE_KIND_SLICE
fields = self.get_fields()
field_count = len(fields)
# EMPTY STRUCT
if field_count == 0:
return TYPE_KIND_EMPTY
# STD VEC
if (unqualified_type_name.startswith("Vec<") and
self.__conforms_to_field_layout(STD_VEC_FIELD_NAMES)):
return TYPE_KIND_STD_VEC
# STD STRING
if (unqualified_type_name.startswith("String") and
self.__conforms_to_field_layout(STD_STRING_FIELD_NAMES)):
return TYPE_KIND_STD_STRING
# ENUM VARIANTS
if fields[0].name == ENUM_DISR_FIELD_NAME:
if field_count == 1:
return TYPE_KIND_CSTYLE_VARIANT
elif self.__all_fields_conform_to_tuple_field_naming(1):
return TYPE_KIND_TUPLE_VARIANT
else:
return TYPE_KIND_STRUCT_VARIANT
# TUPLE
if self.__all_fields_conform_to_tuple_field_naming(0):
if unqualified_type_name.startswith("("):
return TYPE_KIND_TUPLE
else:
return TYPE_KIND_TUPLE_STRUCT
# REGULAR STRUCT
return TYPE_KIND_REGULAR_STRUCT
def __classify_union(self):
assert self.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION
union_members = self.get_fields()
union_member_count = len(union_members)
if union_member_count == 0:
return TYPE_KIND_EMPTY
elif union_member_count == 1:
first_variant_name = union_members[0].name
if first_variant_name is None:
return TYPE_KIND_SINGLETON_ENUM
else:
assert first_variant_name.startswith(ENCODED_ENUM_PREFIX)
return TYPE_KIND_COMPRESSED_ENUM
else:
return TYPE_KIND_REGULAR_ENUM
def __conforms_to_field_layout(self, expected_fields):
actual_fields = self.get_fields()
actual_field_count = len(actual_fields)
if actual_field_count != len(expected_fields):
return False
for i in range(0, actual_field_count):
if actual_fields[i].name != expected_fields[i]:
return False
return True
def __all_fields_conform_to_tuple_field_naming(self, start_index):
fields = self.get_fields()
field_count = len(fields)
for i in range(start_index, field_count):
field_name = fields[i].name
if (field_name is None) or (re.match(r"__\d+$", field_name) is None):
return False
return True
class Value(object):
"""
This class provides a common interface for value-oriented operations.
Sub-classes are supposed to wrap a debugger-specific value-object and
provide implementations for the abstract methods in this class.
"""
def __init__(self, ty):
self.type = ty
def get_child_at_index(self, index):
"""Returns the value of the field, array element or variant at the given index"""
raise NotImplementedError("Override this method")
def as_integer(self):
"""
Try to convert the wrapped value into a Python integer. This should
always succeed for values that are pointers or actual integers.
"""
raise NotImplementedError("Override this method")
def get_wrapped_value(self):
"""
Returns the debugger-specific value-object wrapped by this object. This
is sometimes needed for doing things like pointer-arithmetic in GDB.
"""
raise NotImplementedError("Override this method")
class EncodedEnumInfo(object):
"""
This class provides facilities for handling enum values with compressed
encoding where a non-null field in one variant doubles as the discriminant.
"""
def __init__(self, enum_val):
assert enum_val.type.get_type_kind() == TYPE_KIND_COMPRESSED_ENUM
variant_name = enum_val.type.get_fields()[0].name
last_separator_index = variant_name.rfind("$")
start_index = len(ENCODED_ENUM_PREFIX)
indices_substring = variant_name[start_index:last_separator_index].split("$")
self.__enum_val = enum_val
self.__disr_field_indices = [int(index) for index in indices_substring]
self.__null_variant_name = variant_name[last_separator_index + 1:]
def is_null_variant(self):
ty = self.__enum_val.type
sole_variant_val = self.__enum_val.get_child_at_index(0)
discriminant_val = sole_variant_val
for disr_field_index in self.__disr_field_indices:
discriminant_val = discriminant_val.get_child_at_index(disr_field_index)
# If the discriminant field is a fat pointer we have to consider the
# first word as the true discriminant
if discriminant_val.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_STRUCT:
discriminant_val = discriminant_val.get_child_at_index(0)
return discriminant_val.as_integer() == 0
def get_non_null_variant_val(self):
return self.__enum_val.get_child_at_index(0)
def get_null_variant_name(self):
return self.__null_variant_name
def get_discriminant_value_as_integer(enum_val):
assert enum_val.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION
# we can take any variant here because the discriminant has to be the same
# for all of them.
variant_val = enum_val.get_child_at_index(0)
disr_val = variant_val.get_child_at_index(0)
return disr_val.as_integer()
def extract_length_ptr_and_cap_from_std_vec(vec_val):
assert vec_val.type.get_type_kind() == TYPE_KIND_STD_VEC
length_field_index = STD_VEC_FIELD_NAMES.index(STD_VEC_FIELD_NAME_LENGTH)
ptr_field_index = STD_VEC_FIELD_NAMES.index(STD_VEC_FIELD_NAME_DATA_PTR)
cap_field_index = STD_VEC_FIELD_NAMES.index(STD_VEC_FIELD_NAME_CAPACITY)
length = vec_val.get_child_at_index(length_field_index).as_integer()
vec_ptr_val = vec_val.get_child_at_index(ptr_field_index)
capacity = vec_val.get_child_at_index(cap_field_index).as_integer()
unique_ptr_val = vec_ptr_val.get_child_at_index(0)
data_ptr = unique_ptr_val.get_child_at_index(0)
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
return (length, data_ptr, capacity)
def extract_length_and_ptr_from_slice(slice_val):
assert (slice_val.type.get_type_kind() == TYPE_KIND_SLICE or
slice_val.type.get_type_kind() == TYPE_KIND_STR_SLICE)
length_field_index = SLICE_FIELD_NAMES.index(SLICE_FIELD_NAME_LENGTH)
ptr_field_index = SLICE_FIELD_NAMES.index(SLICE_FIELD_NAME_DATA_PTR)
length = slice_val.get_child_at_index(length_field_index).as_integer()
data_ptr = slice_val.get_child_at_index(ptr_field_index)
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
return (length, data_ptr)
|
apache-2.0
|
alanjw/GreenOpenERP-Win-X86
|
openerp/addons/hr_recruitment/report/__init__.py
|
442
|
1107
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_recruitment_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ilayn/scipy
|
scipy/sparse/csr.py
|
12
|
11609
|
"""Compressed Sparse Row matrix format"""
__docformat__ = "restructuredtext en"
__all__ = ['csr_matrix', 'isspmatrix_csr']
import numpy as np
from .base import spmatrix
from ._sparsetools import (csr_tocsc, csr_tobsr, csr_count_blocks,
get_csr_submatrix)
from .sputils import upcast, get_index_dtype
from .compressed import _cs_matrix
class csr_matrix(_cs_matrix):
"""
Compressed Sparse Row matrix
This can be instantiated in several ways:
csr_matrix(D)
with a dense matrix or rank-2 ndarray D
csr_matrix(S)
with another sparse matrix S (equivalent to S.tocsr())
csr_matrix((M, N), [dtype])
to construct an empty matrix with shape (M, N)
dtype is optional, defaulting to dtype='d'.
csr_matrix((data, (row_ind, col_ind)), [shape=(M, N)])
where ``data``, ``row_ind`` and ``col_ind`` satisfy the
relationship ``a[row_ind[k], col_ind[k]] = data[k]``.
csr_matrix((data, indices, indptr), [shape=(M, N)])
is the standard CSR representation where the column indices for
row i are stored in ``indices[indptr[i]:indptr[i+1]]`` and their
corresponding values are stored in ``data[indptr[i]:indptr[i+1]]``.
If the shape parameter is not supplied, the matrix dimensions
are inferred from the index arrays.
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of stored values, including explicit zeros
data
CSR format data array of the matrix
indices
CSR format index array of the matrix
indptr
CSR format index pointer array of the matrix
has_sorted_indices
Whether indices are sorted
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
Advantages of the CSR format
- efficient arithmetic operations CSR + CSR, CSR * CSR, etc.
- efficient row slicing
- fast matrix vector products
Disadvantages of the CSR format
- slow column slicing operations (consider CSC)
- changes to the sparsity structure are expensive (consider LIL or DOK)
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import csr_matrix
>>> csr_matrix((3, 4), dtype=np.int8).toarray()
array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=int8)
>>> row = np.array([0, 0, 1, 2, 2, 2])
>>> col = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6])
>>> csr_matrix((data, (row, col)), shape=(3, 3)).toarray()
array([[1, 0, 2],
[0, 0, 3],
[4, 5, 6]])
>>> indptr = np.array([0, 2, 3, 6])
>>> indices = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6])
>>> csr_matrix((data, indices, indptr), shape=(3, 3)).toarray()
array([[1, 0, 2],
[0, 0, 3],
[4, 5, 6]])
Duplicate entries are summed together:
>>> row = np.array([0, 1, 2, 0])
>>> col = np.array([0, 1, 1, 0])
>>> data = np.array([1, 2, 4, 8])
>>> csr_matrix((data, (row, col)), shape=(3, 3)).toarray()
array([[9, 0, 0],
[0, 2, 0],
[0, 4, 0]])
As an example of how to construct a CSR matrix incrementally,
the following snippet builds a term-document matrix from texts:
>>> docs = [["hello", "world", "hello"], ["goodbye", "cruel", "world"]]
>>> indptr = [0]
>>> indices = []
>>> data = []
>>> vocabulary = {}
>>> for d in docs:
... for term in d:
... index = vocabulary.setdefault(term, len(vocabulary))
... indices.append(index)
... data.append(1)
... indptr.append(len(indices))
...
>>> csr_matrix((data, indices, indptr), dtype=int).toarray()
array([[2, 1, 0, 0],
[0, 1, 1, 1]])
"""
format = 'csr'
def transpose(self, axes=None, copy=False):
if axes is not None:
raise ValueError(("Sparse matrices do not support "
"an 'axes' parameter because swapping "
"dimensions is the only logical permutation."))
M, N = self.shape
from .csc import csc_matrix
return csc_matrix((self.data, self.indices,
self.indptr), shape=(N, M), copy=copy)
transpose.__doc__ = spmatrix.transpose.__doc__
def tolil(self, copy=False):
from .lil import lil_matrix
lil = lil_matrix(self.shape,dtype=self.dtype)
self.sum_duplicates()
ptr,ind,dat = self.indptr,self.indices,self.data
rows, data = lil.rows, lil.data
for n in range(self.shape[0]):
start = ptr[n]
end = ptr[n+1]
rows[n] = ind[start:end].tolist()
data[n] = dat[start:end].tolist()
return lil
tolil.__doc__ = spmatrix.tolil.__doc__
def tocsr(self, copy=False):
if copy:
return self.copy()
else:
return self
tocsr.__doc__ = spmatrix.tocsr.__doc__
def tocsc(self, copy=False):
idx_dtype = get_index_dtype((self.indptr, self.indices),
maxval=max(self.nnz, self.shape[0]))
indptr = np.empty(self.shape[1] + 1, dtype=idx_dtype)
indices = np.empty(self.nnz, dtype=idx_dtype)
data = np.empty(self.nnz, dtype=upcast(self.dtype))
csr_tocsc(self.shape[0], self.shape[1],
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
self.data,
indptr,
indices,
data)
from .csc import csc_matrix
A = csc_matrix((data, indices, indptr), shape=self.shape)
A.has_sorted_indices = True
return A
tocsc.__doc__ = spmatrix.tocsc.__doc__
def tobsr(self, blocksize=None, copy=True):
from .bsr import bsr_matrix
if blocksize is None:
from .spfuncs import estimate_blocksize
return self.tobsr(blocksize=estimate_blocksize(self))
elif blocksize == (1,1):
arg1 = (self.data.reshape(-1,1,1),self.indices,self.indptr)
return bsr_matrix(arg1, shape=self.shape, copy=copy)
else:
R,C = blocksize
M,N = self.shape
if R < 1 or C < 1 or M % R != 0 or N % C != 0:
raise ValueError('invalid blocksize %s' % blocksize)
blks = csr_count_blocks(M,N,R,C,self.indptr,self.indices)
idx_dtype = get_index_dtype((self.indptr, self.indices),
maxval=max(N//C, blks))
indptr = np.empty(M//R+1, dtype=idx_dtype)
indices = np.empty(blks, dtype=idx_dtype)
data = np.zeros((blks,R,C), dtype=self.dtype)
csr_tobsr(M, N, R, C,
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
self.data,
indptr, indices, data.ravel())
return bsr_matrix((data,indices,indptr), shape=self.shape)
tobsr.__doc__ = spmatrix.tobsr.__doc__
# these functions are used by the parent class (_cs_matrix)
# to remove redundancy between csc_matrix and csr_matrix
def _swap(self, x):
"""swap the members of x if this is a column-oriented matrix
"""
return x
def __iter__(self):
indptr = np.zeros(2, dtype=self.indptr.dtype)
shape = (1, self.shape[1])
i0 = 0
for i1 in self.indptr[1:]:
indptr[1] = i1 - i0
indices = self.indices[i0:i1]
data = self.data[i0:i1]
yield csr_matrix((data, indices, indptr), shape=shape, copy=True)
i0 = i1
def getrow(self, i):
"""Returns a copy of row i of the matrix, as a (1 x n)
CSR matrix (row vector).
"""
M, N = self.shape
i = int(i)
if i < 0:
i += M
if i < 0 or i >= M:
raise IndexError('index (%d) out of range' % i)
indptr, indices, data = get_csr_submatrix(
M, N, self.indptr, self.indices, self.data, i, i + 1, 0, N)
return csr_matrix((data, indices, indptr), shape=(1, N),
dtype=self.dtype, copy=False)
def getcol(self, i):
"""Returns a copy of column i of the matrix, as a (m x 1)
CSR matrix (column vector).
"""
M, N = self.shape
i = int(i)
if i < 0:
i += N
if i < 0 or i >= N:
raise IndexError('index (%d) out of range' % i)
indptr, indices, data = get_csr_submatrix(
M, N, self.indptr, self.indices, self.data, 0, M, i, i + 1)
return csr_matrix((data, indices, indptr), shape=(M, 1),
dtype=self.dtype, copy=False)
def _get_intXarray(self, row, col):
return self.getrow(row)._minor_index_fancy(col)
def _get_intXslice(self, row, col):
if col.step in (1, None):
return self._get_submatrix(row, col, copy=True)
# TODO: uncomment this once it's faster:
# return self.getrow(row)._minor_slice(col)
M, N = self.shape
start, stop, stride = col.indices(N)
ii, jj = self.indptr[row:row+2]
row_indices = self.indices[ii:jj]
row_data = self.data[ii:jj]
if stride > 0:
ind = (row_indices >= start) & (row_indices < stop)
else:
ind = (row_indices <= start) & (row_indices > stop)
if abs(stride) > 1:
ind &= (row_indices - start) % stride == 0
row_indices = (row_indices[ind] - start) // stride
row_data = row_data[ind]
row_indptr = np.array([0, len(row_indices)])
if stride < 0:
row_data = row_data[::-1]
row_indices = abs(row_indices[::-1])
shape = (1, int(np.ceil(float(stop - start) / stride)))
return csr_matrix((row_data, row_indices, row_indptr), shape=shape,
dtype=self.dtype, copy=False)
def _get_sliceXint(self, row, col):
if row.step in (1, None):
return self._get_submatrix(row, col, copy=True)
return self._major_slice(row)._get_submatrix(minor=col)
def _get_sliceXarray(self, row, col):
return self._major_slice(row)._minor_index_fancy(col)
def _get_arrayXint(self, row, col):
return self._major_index_fancy(row)._get_submatrix(minor=col)
def _get_arrayXslice(self, row, col):
if col.step not in (1, None):
col = np.arange(*col.indices(self.shape[1]))
return self._get_arrayXarray(row, col)
return self._major_index_fancy(row)._get_submatrix(minor=col)
def isspmatrix_csr(x):
"""Is x of csr_matrix type?
Parameters
----------
x
object to check for being a csr matrix
Returns
-------
bool
True if x is a csr matrix, False otherwise
Examples
--------
>>> from scipy.sparse import csr_matrix, isspmatrix_csr
>>> isspmatrix_csr(csr_matrix([[5]]))
True
>>> from scipy.sparse import csc_matrix, csr_matrix, isspmatrix_csc
>>> isspmatrix_csr(csc_matrix([[5]]))
False
"""
return isinstance(x, csr_matrix)
|
bsd-3-clause
|
chirilo/mozillians
|
vendor-local/lib/python/celery/tests/test_events/test_events_state.py
|
14
|
10736
|
from __future__ import absolute_import
from time import time
from itertools import count
from celery import states
from celery.events import Event
from celery.events.state import State, Worker, Task, HEARTBEAT_EXPIRE
from celery.utils import uuid
from celery.tests.utils import Case
class replay(object):
def __init__(self, state):
self.state = state
self.rewind()
self.setup()
def setup(self):
pass
def __iter__(self):
return self
def __next__(self):
try:
self.state.event(self.events[self.position()])
except IndexError:
raise StopIteration()
next = __next__
def rewind(self):
self.position = count(0).next
return self
def play(self):
for _ in self:
pass
class ev_worker_online_offline(replay):
def setup(self):
self.events = [
Event("worker-online", hostname="utest1"),
Event("worker-offline", hostname="utest1"),
]
class ev_worker_heartbeats(replay):
def setup(self):
self.events = [
Event("worker-heartbeat", hostname="utest1",
timestamp=time() - HEARTBEAT_EXPIRE * 2),
Event("worker-heartbeat", hostname="utest1"),
]
class ev_task_states(replay):
def setup(self):
tid = self.tid = uuid()
self.events = [
Event("task-received", uuid=tid, name="task1",
args="(2, 2)", kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname="utest1"),
Event("task-started", uuid=tid, hostname="utest1"),
Event("task-revoked", uuid=tid, hostname="utest1"),
Event("task-retried", uuid=tid, exception="KeyError('bar')",
traceback="line 2 at main", hostname="utest1"),
Event("task-failed", uuid=tid, exception="KeyError('foo')",
traceback="line 1 at main", hostname="utest1"),
Event("task-succeeded", uuid=tid, result="4",
runtime=0.1234, hostname="utest1"),
]
class ev_snapshot(replay):
def setup(self):
self.events = [
Event("worker-online", hostname="utest1"),
Event("worker-online", hostname="utest2"),
Event("worker-online", hostname="utest3"),
]
for i in range(20):
worker = not i % 2 and "utest2" or "utest1"
type = not i % 2 and "task2" or "task1"
self.events.append(Event("task-received", name=type,
uuid=uuid(), hostname=worker))
class test_Worker(Case):
def test_survives_missing_timestamp(self):
worker = Worker(hostname="foo")
worker.on_heartbeat(timestamp=None)
self.assertEqual(worker.heartbeats, [])
def test_repr(self):
self.assertTrue(repr(Worker(hostname="foo")))
class test_Task(Case):
def test_info(self):
task = Task(uuid="abcdefg",
name="tasks.add",
args="(2, 2)",
kwargs="{}",
retries=2,
result=42,
eta=1,
runtime=0.0001,
expires=1,
exception=1,
received=time() - 10,
started=time() - 8,
succeeded=time())
self.assertEqual(sorted(list(task._info_fields)),
sorted(task.info().keys()))
self.assertEqual(sorted(list(task._info_fields + ("received", ))),
sorted(task.info(extra=("received", ))))
self.assertEqual(sorted(["args", "kwargs"]),
sorted(task.info(["args", "kwargs"]).keys()))
def test_ready(self):
task = Task(uuid="abcdefg",
name="tasks.add")
task.on_received(timestamp=time())
self.assertFalse(task.ready)
task.on_succeeded(timestamp=time())
self.assertTrue(task.ready)
def test_sent(self):
task = Task(uuid="abcdefg",
name="tasks.add")
task.on_sent(timestamp=time())
self.assertEqual(task.state, states.PENDING)
def test_merge(self):
task = Task()
task.on_failed(timestamp=time())
task.on_started(timestamp=time())
task.on_received(timestamp=time(), name="tasks.add", args=(2, 2))
self.assertEqual(task.state, states.FAILURE)
self.assertEqual(task.name, "tasks.add")
self.assertTupleEqual(task.args, (2, 2))
task.on_retried(timestamp=time())
self.assertEqual(task.state, states.RETRY)
def test_repr(self):
self.assertTrue(repr(Task(uuid="xxx", name="tasks.add")))
class test_State(Case):
def test_repr(self):
self.assertTrue(repr(State()))
def test_worker_online_offline(self):
r = ev_worker_online_offline(State())
r.next()
self.assertTrue(r.state.alive_workers())
self.assertTrue(r.state.workers["utest1"].alive)
r.play()
self.assertFalse(r.state.alive_workers())
self.assertFalse(r.state.workers["utest1"].alive)
def test_worker_heartbeat_expire(self):
r = ev_worker_heartbeats(State())
r.next()
self.assertFalse(r.state.alive_workers())
self.assertFalse(r.state.workers["utest1"].alive)
r.play()
self.assertTrue(r.state.alive_workers())
self.assertTrue(r.state.workers["utest1"].alive)
def test_task_states(self):
r = ev_task_states(State())
# RECEIVED
r.next()
self.assertTrue(r.tid in r.state.tasks)
task = r.state.tasks[r.tid]
self.assertEqual(task.state, states.RECEIVED)
self.assertTrue(task.received)
self.assertEqual(task.timestamp, task.received)
self.assertEqual(task.worker.hostname, "utest1")
# STARTED
r.next()
self.assertTrue(r.state.workers["utest1"].alive,
"any task event adds worker heartbeat")
self.assertEqual(task.state, states.STARTED)
self.assertTrue(task.started)
self.assertEqual(task.timestamp, task.started)
self.assertEqual(task.worker.hostname, "utest1")
# REVOKED
r.next()
self.assertEqual(task.state, states.REVOKED)
self.assertTrue(task.revoked)
self.assertEqual(task.timestamp, task.revoked)
self.assertEqual(task.worker.hostname, "utest1")
# RETRY
r.next()
self.assertEqual(task.state, states.RETRY)
self.assertTrue(task.retried)
self.assertEqual(task.timestamp, task.retried)
self.assertEqual(task.worker.hostname, "utest1")
self.assertEqual(task.exception, "KeyError('bar')")
self.assertEqual(task.traceback, "line 2 at main")
# FAILURE
r.next()
self.assertEqual(task.state, states.FAILURE)
self.assertTrue(task.failed)
self.assertEqual(task.timestamp, task.failed)
self.assertEqual(task.worker.hostname, "utest1")
self.assertEqual(task.exception, "KeyError('foo')")
self.assertEqual(task.traceback, "line 1 at main")
# SUCCESS
r.next()
self.assertEqual(task.state, states.SUCCESS)
self.assertTrue(task.succeeded)
self.assertEqual(task.timestamp, task.succeeded)
self.assertEqual(task.worker.hostname, "utest1")
self.assertEqual(task.result, "4")
self.assertEqual(task.runtime, 0.1234)
def assertStateEmpty(self, state):
self.assertFalse(state.tasks)
self.assertFalse(state.workers)
self.assertFalse(state.event_count)
self.assertFalse(state.task_count)
def assertState(self, state):
self.assertTrue(state.tasks)
self.assertTrue(state.workers)
self.assertTrue(state.event_count)
self.assertTrue(state.task_count)
def test_freeze_while(self):
s = State()
r = ev_snapshot(s)
r.play()
def work():
pass
s.freeze_while(work, clear_after=True)
self.assertFalse(s.event_count)
s2 = State()
r = ev_snapshot(s2)
r.play()
s2.freeze_while(work, clear_after=False)
self.assertTrue(s2.event_count)
def test_clear_tasks(self):
s = State()
r = ev_snapshot(s)
r.play()
self.assertTrue(s.tasks)
s.clear_tasks(ready=False)
self.assertFalse(s.tasks)
def test_clear(self):
r = ev_snapshot(State())
r.play()
self.assertTrue(r.state.event_count)
self.assertTrue(r.state.workers)
self.assertTrue(r.state.tasks)
self.assertTrue(r.state.task_count)
r.state.clear()
self.assertFalse(r.state.event_count)
self.assertFalse(r.state.workers)
self.assertTrue(r.state.tasks)
self.assertFalse(r.state.task_count)
r.state.clear(False)
self.assertFalse(r.state.tasks)
def test_task_types(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(sorted(r.state.task_types()), ["task1", "task2"])
def test_tasks_by_timestamp(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.tasks_by_timestamp()), 20)
def test_tasks_by_type(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.tasks_by_type("task1")), 10)
self.assertEqual(len(r.state.tasks_by_type("task2")), 10)
def test_alive_workers(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.alive_workers()), 3)
def test_tasks_by_worker(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.tasks_by_worker("utest1")), 10)
self.assertEqual(len(r.state.tasks_by_worker("utest2")), 10)
def test_survives_unknown_worker_event(self):
s = State()
s.worker_event("worker-unknown-event-xxx", {"foo": "bar"})
s.worker_event("worker-unknown-event-xxx", {"hostname": "xxx",
"foo": "bar"})
def test_survives_unknown_task_event(self):
s = State()
s.task_event("task-unknown-event-xxx", {"foo": "bar",
"uuid": "x",
"hostname": "y"})
def test_callback(self):
scratch = {}
def callback(state, event):
scratch["recv"] = True
s = State(callback=callback)
s.event({"type": "worker-online"})
self.assertTrue(scratch.get("recv"))
|
bsd-3-clause
|
Slezhuk/ansible
|
lib/ansible/modules/remote_management/ipmi/ipmi_power.py
|
69
|
3949
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipmi_power
short_description: Power management for machine
description:
- Use this module for power management
version_added: "2.2"
options:
name:
description:
- Hostname or ip address of the BMC.
required: true
port:
description:
- Remote RMCP port.
required: false
default: 623
user:
description:
- Username to use to connect to the BMC.
required: true
password:
description:
- Password to connect to the BMC.
required: true
default: null
state:
description:
- Whether to ensure that the machine in desired state.
required: true
choices:
- on -- Request system turn on
- off -- Request system turn off without waiting for OS to shutdown
- shutdown -- Have system request OS proper shutdown
- reset -- Request system reset without waiting for OS
- boot -- If system is off, then 'on', else 'reset'
timeout:
description:
- Maximum number of seconds before interrupt request.
required: false
default: 300
requirements:
- "python >= 2.6"
- pyghmi
author: "Bulat Gaifullin ([email protected])"
'''
RETURN = '''
powerstate:
description: The current power state of the machine.
returned: success
type: string
sample: on
'''
EXAMPLES = '''
# Ensure machine is powered on.
- ipmi_power:
name: test.testdomain.com
user: admin
password: password
state: on
'''
try:
from pyghmi.ipmi import command
except ImportError:
command = None
from ansible.module_utils.basic import *
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
port=dict(default=623, type='int'),
state=dict(required=True, choices=['on', 'off', 'shutdown', 'reset', 'boot']),
user=dict(required=True, no_log=True),
password=dict(required=True, no_log=True),
timeout=dict(default=300, type='int'),
),
supports_check_mode=True,
)
if command is None:
module.fail_json(msg='the python pyghmi module is required')
name = module.params['name']
port = module.params['port']
user = module.params['user']
password = module.params['password']
state = module.params['state']
timeout = module.params['timeout']
# --- run command ---
try:
ipmi_cmd = command.Command(
bmc=name, userid=user, password=password, port=port
)
module.debug('ipmi instantiated - name: "%s"' % name)
current = ipmi_cmd.get_power()
if current['powerstate'] != state:
response = {'powerstate': state} if module.check_mode else ipmi_cmd.set_power(state, wait=timeout)
changed = True
else:
response = current
changed = False
if 'error' in response:
module.fail_json(msg=response['error'])
module.exit_json(changed=changed, **response)
except Exception as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
gpl-3.0
|
omegamoon/rockchip-rk3188-mk908
|
tools/perf/scripts/python/failed-syscalls-by-pid.py
|
11180
|
2058
|
# failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
gpl-2.0
|
asser/django
|
tests/gis_tests/gis_migrations/test_operations.py
|
284
|
7957
|
from __future__ import unicode_literals
from django.contrib.gis.db.models import fields
from django.core.exceptions import ImproperlyConfigured
from django.db import connection, migrations, models
from django.db.migrations.migration import Migration
from django.db.migrations.state import ProjectState
from django.test import (
TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from ..utils import mysql
if connection.features.gis_enabled:
try:
GeometryColumns = connection.ops.geometry_columns()
HAS_GEOMETRY_COLUMNS = True
except NotImplementedError:
HAS_GEOMETRY_COLUMNS = False
@skipUnlessDBFeature('gis_enabled')
class OperationTests(TransactionTestCase):
available_apps = ['gis_tests.gis_migrations']
def tearDown(self):
# Delete table after testing
if hasattr(self, 'current_state'):
self.apply_operations('gis', self.current_state, [migrations.DeleteModel('Neighborhood')])
super(OperationTests, self).tearDown()
def get_table_description(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_table_description(cursor, table)
def assertColumnExists(self, table, column):
self.assertIn(column, [c.name for c in self.get_table_description(table)])
def assertColumnNotExists(self, table, column):
self.assertNotIn(column, [c.name for c in self.get_table_description(table)])
def apply_operations(self, app_label, project_state, operations):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor() as editor:
return migration.apply(project_state, editor)
def set_up_test_model(self, force_raster_creation=False):
test_fields = [
('id', models.AutoField(primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('geom', fields.MultiPolygonField(srid=4326))
]
if connection.features.supports_raster or force_raster_creation:
test_fields += [('rast', fields.RasterField(srid=4326))]
operations = [migrations.CreateModel('Neighborhood', test_fields)]
return self.apply_operations('gis', ProjectState(), operations)
def assertGeometryColumnsCount(self, expected_count):
table_name = 'gis_neighborhood'
if connection.features.uppercases_column_names:
table_name = table_name.upper()
self.assertEqual(
GeometryColumns.objects.filter(**{
GeometryColumns.table_name_col(): table_name,
}).count(),
expected_count
)
def assertSpatialIndexExists(self, table, column):
with connection.cursor() as cursor:
indexes = connection.introspection.get_indexes(cursor, table)
self.assertIn(column, indexes)
def alter_gis_model(self, migration_class, model_name, field_name,
blank=False, field_class=None):
project_state = self.set_up_test_model()
self.current_state = project_state
args = [model_name, field_name]
if field_class:
args.append(field_class(srid=4326, blank=blank))
operation = migration_class(*args)
new_state = project_state.clone()
operation.state_forwards('gis', new_state)
with connection.schema_editor() as editor:
operation.database_forwards('gis', editor, project_state, new_state)
self.current_state = new_state
def test_add_geom_field(self):
"""
Test the AddField operation with a geometry-enabled column.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood',
'path', False, fields.LineStringField)
self.assertColumnExists('gis_neighborhood', 'path')
# Test GeometryColumns when available
if HAS_GEOMETRY_COLUMNS:
self.assertGeometryColumnsCount(2)
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'path')
@skipUnlessDBFeature('supports_raster')
def test_add_raster_field(self):
"""
Test the AddField operation with a raster-enabled column.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood',
'heatmap', False, fields.RasterField)
self.assertColumnExists('gis_neighborhood', 'heatmap')
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'heatmap')
@skipIfDBFeature('supports_raster')
def test_create_raster_model_on_db_without_raster_support(self):
"""
Test creating a model with a raster field on a db without raster support.
"""
msg = 'Raster fields require backends with raster support.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.set_up_test_model(True)
@skipIfDBFeature('supports_raster')
def test_add_raster_field_on_db_without_raster_support(self):
"""
Test adding a raster field on a db without raster support.
"""
msg = 'Raster fields require backends with raster support.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.alter_gis_model(
migrations.AddField, 'Neighborhood', 'heatmap',
False, fields.RasterField
)
def test_add_blank_geom_field(self):
"""
Should be able to add a GeometryField with blank=True.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood',
'path', True, fields.LineStringField)
self.assertColumnExists('gis_neighborhood', 'path')
# Test GeometryColumns when available
if HAS_GEOMETRY_COLUMNS:
self.assertGeometryColumnsCount(2)
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'path')
@skipUnlessDBFeature('supports_raster')
def test_add_blank_raster_field(self):
"""
Should be able to add a RasterField with blank=True.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood',
'heatmap', True, fields.RasterField)
self.assertColumnExists('gis_neighborhood', 'heatmap')
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'heatmap')
def test_remove_geom_field(self):
"""
Test the RemoveField operation with a geometry-enabled column.
"""
self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'geom')
self.assertColumnNotExists('gis_neighborhood', 'geom')
# Test GeometryColumns when available
if HAS_GEOMETRY_COLUMNS:
self.assertGeometryColumnsCount(0)
@skipUnlessDBFeature('supports_raster')
def test_remove_raster_field(self):
"""
Test the RemoveField operation with a raster-enabled column.
"""
self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'rast')
self.assertColumnNotExists('gis_neighborhood', 'rast')
def test_create_model_spatial_index(self):
self.current_state = self.set_up_test_model()
if not self.has_spatial_indexes:
self.skipTest('No support for Spatial indexes')
self.assertSpatialIndexExists('gis_neighborhood', 'geom')
if connection.features.supports_raster:
self.assertSpatialIndexExists('gis_neighborhood', 'rast')
@property
def has_spatial_indexes(self):
if mysql:
with connection.cursor() as cursor:
return connection.introspection.supports_spatial_index(cursor, 'gis_neighborhood')
return True
|
bsd-3-clause
|
thebohemian/openembedded
|
lib/oe/packagedata.py
|
27
|
2978
|
import os
import bb.data
import codecs
def packaged(pkg, d):
return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
def read_pkgdatafile(fn):
pkgdata = {}
def decode(str):
c = codecs.getdecoder("string_escape")
return c(str)[0]
if os.access(fn, os.R_OK):
import re
f = file(fn, 'r')
lines = f.readlines()
f.close()
r = re.compile("([^:]+):\s*(.*)")
for l in lines:
m = r.match(l)
if m:
pkgdata[m.group(1)] = decode(m.group(2))
return pkgdata
def get_subpkgedata_fn(pkg, d):
archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
archs.reverse()
pkgdata = bb.data.expand('${TMPDIR}/pkgdata/', d)
targetdir = bb.data.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/', d)
for arch in archs:
fn = pkgdata + arch + targetdir + pkg
if os.path.exists(fn):
return fn
return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
def has_subpkgdata(pkg, d):
return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
def read_subpkgdata(pkg, d):
return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
def has_pkgdata(pn, d):
fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
return os.access(fn, os.R_OK)
def read_pkgdata(pn, d):
fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
return read_pkgdatafile(fn)
#
# Collapse FOO_pkg variables into FOO
#
def read_subpkgdata_dict(pkg, d):
ret = {}
subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
for var in subd:
newvar = var.replace("_" + pkg, "")
ret[newvar] = subd[var]
return ret
def _pkgmap(d):
"""Return a dictionary mapping package to recipe name."""
target_os = d.getVar("TARGET_OS", True)
target_vendor = d.getVar("TARGET_VENDOR", True)
basedir = os.path.dirname(d.getVar("PKGDATA_DIR", True))
dirs = ("%s%s-%s" % (arch, target_vendor, target_os)
for arch in d.getVar("PACKAGE_ARCHS", True).split())
pkgmap = {}
for pkgdatadir in (os.path.join(basedir, sys) for sys in dirs):
try:
files = os.listdir(pkgdatadir)
except OSError:
continue
for pn in filter(lambda f: not os.path.isdir(os.path.join(pkgdatadir, f)), files):
try:
pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn))
except OSError:
continue
for pkg in pkgdata["PACKAGES"].split():
pkgmap[pkg] = pn
return pkgmap
def pkgmap(d):
"""Return a dictionary mapping package to recipe name.
Cache the mapping in the metadata"""
pkgmap_data = d.getVar("__pkgmap_data", False)
if pkgmap_data is None:
pkgmap_data = _pkgmap(d)
d.setVar("__pkgmap_data", pkgmap_data)
return pkgmap_data
def recipename(pkg, d):
"""Return the recipe name for the given binary package name."""
return pkgmap(d).get(pkg)
|
mit
|
elena/django
|
tests/admin_changelist/test_date_hierarchy.py
|
24
|
3483
|
from datetime import datetime
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.auth.models import User
from django.test import RequestFactory, TestCase
from django.utils.timezone import make_aware
from .admin import EventAdmin, site as custom_site
from .models import Event
class DateHierarchyTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='xxx')
def assertDateParams(self, query, expected_from_date, expected_to_date):
query = {'date__%s' % field: val for field, val in query.items()}
request = self.factory.get('/', query)
request.user = self.superuser
changelist = EventAdmin(Event, custom_site).get_changelist_instance(request)
_, _, lookup_params, *_ = changelist.get_filters(request)
self.assertEqual(lookup_params['date__gte'], expected_from_date)
self.assertEqual(lookup_params['date__lt'], expected_to_date)
def test_bounded_params(self):
tests = (
({'year': 2017}, datetime(2017, 1, 1), datetime(2018, 1, 1)),
({'year': 2017, 'month': 2}, datetime(2017, 2, 1), datetime(2017, 3, 1)),
({'year': 2017, 'month': 12}, datetime(2017, 12, 1), datetime(2018, 1, 1)),
({'year': 2017, 'month': 12, 'day': 15}, datetime(2017, 12, 15), datetime(2017, 12, 16)),
({'year': 2017, 'month': 12, 'day': 31}, datetime(2017, 12, 31), datetime(2018, 1, 1)),
({'year': 2017, 'month': 2, 'day': 28}, datetime(2017, 2, 28), datetime(2017, 3, 1)),
)
for query, expected_from_date, expected_to_date in tests:
with self.subTest(query=query):
self.assertDateParams(query, expected_from_date, expected_to_date)
def test_bounded_params_with_time_zone(self):
with self.settings(USE_TZ=True, TIME_ZONE='Asia/Jerusalem'):
self.assertDateParams(
{'year': 2017, 'month': 2, 'day': 28},
make_aware(datetime(2017, 2, 28)),
make_aware(datetime(2017, 3, 1)),
)
def test_bounded_params_with_dst_time_zone(self):
tests = [
# Northern hemisphere.
('Asia/Jerusalem', 3),
('Asia/Jerusalem', 10),
# Southern hemisphere.
('Pacific/Chatham', 4),
('Pacific/Chatham', 9),
]
for time_zone, month in tests:
with self.subTest(time_zone=time_zone, month=month):
with self.settings(USE_TZ=True, TIME_ZONE=time_zone):
self.assertDateParams(
{'year': 2019, 'month': month},
make_aware(datetime(2019, month, 1)),
make_aware(datetime(2019, month + 1, 1)),
)
def test_invalid_params(self):
tests = (
{'year': 'x'},
{'year': 2017, 'month': 'x'},
{'year': 2017, 'month': 12, 'day': 'x'},
{'year': 2017, 'month': 13},
{'year': 2017, 'month': 12, 'day': 32},
{'year': 2017, 'month': 0},
{'year': 2017, 'month': 12, 'day': 0},
)
for invalid_query in tests:
with self.subTest(query=invalid_query), self.assertRaises(IncorrectLookupParameters):
self.assertDateParams(invalid_query, None, None)
|
bsd-3-clause
|
MQQiang/kbengine
|
kbe/src/lib/python/Lib/distutils/util.py
|
81
|
20665
|
"""distutils.util
Miscellaneous utility functions -- anything that doesn't fit into
one of the other *util.py modules.
"""
import os
import re
import importlib.util
import sys
import string
from distutils.errors import DistutilsPlatformError
from distutils.dep_util import newer
from distutils.spawn import spawn
from distutils import log
from distutils.errors import DistutilsByteCompileError
def get_platform ():
"""Return a string that identifies the current platform. This is used
mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
# Set for cross builds explicitly
if "_PYTHON_HOST_PLATFORM" in os.environ:
return os.environ["_PYTHON_HOST_PLATFORM"]
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
(osname, host, release, version, machine) = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# We can't use "platform.architecture()[0]" because a
# bootstrap problem. We use a dict to get an error
# if some suspicious happens.
bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
machine += ".%s" % bitness[sys.maxsize]
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile (r'[\d.]+', re.ASCII)
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
import _osx_support, distutils.sysconfig
osname, release, machine = _osx_support.get_platform_osx(
distutils.sysconfig.get_config_vars(),
osname, release, machine)
return "%s-%s-%s" % (osname, release, machine)
# get_platform ()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,
i.e. split it on '/' and put it back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while '.' in paths:
paths.remove('.')
if not paths:
return os.curdir
return os.path.join(*paths)
# convert_path ()
def change_root (new_root, pathname):
"""Return 'pathname' with 'new_root' prepended. If 'pathname' is
relative, this is equivalent to "os.path.join(new_root,pathname)".
Otherwise, it requires making 'pathname' relative and then joining the
two, which is tricky on DOS/Windows and Mac OS.
"""
if os.name == 'posix':
if not os.path.isabs(pathname):
return os.path.join(new_root, pathname)
else:
return os.path.join(new_root, pathname[1:])
elif os.name == 'nt':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == '\\':
path = path[1:]
return os.path.join(new_root, path)
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
_environ_checked = 0
def check_environ ():
"""Ensure that 'os.environ' has all the environment variables we
guarantee that users can use in config files, command-line options,
etc. Currently this includes:
HOME - user's home directory (Unix only)
PLAT - description of the current platform, including hardware
and OS (see 'get_platform()')
"""
global _environ_checked
if _environ_checked:
return
if os.name == 'posix' and 'HOME' not in os.environ:
import pwd
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
if 'PLAT' not in os.environ:
os.environ['PLAT'] = get_platform()
_environ_checked = 1
def subst_vars (s, local_vars):
"""Perform shell/Perl-style variable substitution on 'string'. Every
occurrence of '$' followed by a name is considered a variable, and
variable is substituted by the value found in the 'local_vars'
dictionary, or in 'os.environ' if it's not in 'local_vars'.
'os.environ' is first checked/augmented to guarantee that it contains
certain values: see 'check_environ()'. Raise ValueError for any
variables not found in either 'local_vars' or 'os.environ'.
"""
check_environ()
def _subst (match, local_vars=local_vars):
var_name = match.group(1)
if var_name in local_vars:
return str(local_vars[var_name])
else:
return os.environ[var_name]
try:
return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
except KeyError as var:
raise ValueError("invalid variable '$%s'" % var)
# subst_vars ()
def grok_environment_error (exc, prefix="error: "):
# Function kept for backward compatibility.
# Used to try clever things with EnvironmentErrors,
# but nowadays str(exception) produces good messages.
return prefix + str(exc)
# Needed by 'split_quoted()'
_wordchars_re = _squote_re = _dquote_re = None
def _init_regex():
global _wordchars_re, _squote_re, _dquote_re
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
def split_quoted (s):
"""Split a string up according to Unix shell-like rules for quotes and
backslashes. In short: words are delimited by spaces, as long as those
spaces are not escaped by a backslash, or inside a quoted string.
Single and double quotes are equivalent, and the quote characters can
be backslash-escaped. The backslash is stripped from any two-character
escape sequence, leaving only the escaped character. The quote
characters are stripped from any quoted string. Returns a list of
words.
"""
# This is a nice algorithm for splitting up a single string, since it
# doesn't require character-by-character examination. It was a little
# bit of a brain-bender to get it working right, though...
if _wordchars_re is None: _init_regex()
s = s.strip()
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = s[end:].lstrip()
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
(beg, end) = m.span()
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
if pos >= len(s):
words.append(s)
break
return words
# split_quoted ()
def execute (func, args, msg=None, verbose=0, dry_run=0):
"""Perform some action that affects the outside world (eg. by
writing to the filesystem). Such actions are special because they
are disabled by the 'dry_run' flag. This method takes care of all
that bureaucracy for you; all you have to do is supply the
function to call and an argument tuple for it (to embody the
"external action" being performed), and an optional message to
print.
"""
if msg is None:
msg = "%s%r" % (func.__name__, args)
if msg[-2:] == ',)': # correct for singleton tuple
msg = msg[0:-2] + ')'
log.info(msg)
if not dry_run:
func(*args)
def strtobool (val):
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
'val' is anything else.
"""
val = val.lower()
if val in ('y', 'yes', 't', 'true', 'on', '1'):
return 1
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
return 0
else:
raise ValueError("invalid truth value %r" % (val,))
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in a __pycache__ subdirectory. 'py_files' is a list
of files to compile; any files that don't end in ".py" are silently
skipped. 'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(",\n".join(map(repr, py_files)) + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
if optimize >= 0:
cfile = importlib.util.cache_from_source(
file, debug_override=not optimize)
else:
cfile = importlib.util.cache_from_source(file)
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
# byte_compile ()
def rfc822_escape (header):
"""Return a version of the string escaped for inclusion in an
RFC-822 header, by ensuring there are 8 spaces space after each newline.
"""
lines = header.split('\n')
sep = '\n' + 8 * ' '
return sep.join(lines)
# 2to3 support
def run_2to3(files, fixer_names=None, options=None, explicit=None):
"""Invoke 2to3 on a list of Python files.
The files should all come from the build area, as the
modification is done in-place. To reduce the build time,
only files modified since the last invocation of this
function should be passed in the files argument."""
if not files:
return
# Make this class local, to delay import of 2to3
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class DistutilsRefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = DistutilsRefactoringTool(fixer_names, options=options)
r.refactor(files, write=True)
def copydir_run_2to3(src, dest, template=None, fixer_names=None,
options=None, explicit=None):
"""Recursively copy a directory, only copying new and changed files,
running run_2to3 over all newly copied Python modules afterward.
If you give a template string, it's parsed like a MANIFEST.in.
"""
from distutils.dir_util import mkpath
from distutils.file_util import copy_file
from distutils.filelist import FileList
filelist = FileList()
curdir = os.getcwd()
os.chdir(src)
try:
filelist.findall()
finally:
os.chdir(curdir)
filelist.files[:] = filelist.allfiles
if template:
for line in template.splitlines():
line = line.strip()
if not line: continue
filelist.process_template_line(line)
copied = []
for filename in filelist.files:
outname = os.path.join(dest, filename)
mkpath(os.path.dirname(outname))
res = copy_file(os.path.join(src, filename), outname, update=1)
if res[1]: copied.append(outname)
run_2to3([fn for fn in copied if fn.lower().endswith('.py')],
fixer_names=fixer_names, options=options, explicit=explicit)
return copied
class Mixin2to3:
'''Mixin class for commands that run 2to3.
To configure 2to3, setup scripts may either change
the class variables, or inherit from individual commands
to override how 2to3 is invoked.'''
# provide list of fixers to run;
# defaults to all from lib2to3.fixers
fixer_names = None
# options dictionary
options = None
# list of fixers to invoke even though they are marked as explicit
explicit = None
def run_2to3(self, files):
return run_2to3(files, self.fixer_names, self.options, self.explicit)
|
lgpl-3.0
|
beni55/djangolint
|
project/lint/views.py
|
2
|
2264
|
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.utils import simplejson as json
from django.views.decorators.http import require_POST
from .forms import ReportForm
from .models import Report, STAGES
from .tasks import process_report
def index(request):
report = None
report_pk = request.session.get('report_pk')
if report_pk is not None:
try:
report = Report.objects.get(pk=report_pk)
except Report.DoesNotExist:
pass
return render(request, 'lint/form.html', {'report': report})
@require_POST
def create(request):
form = ReportForm(data=request.POST)
report_pk = request.session.get('report_pk')
try:
report = Report.objects.get(pk=report_pk)
except Report.DoesNotExist:
report = None
if not (report is None or report.stage == 'done' or report.error):
data = {'status': 'error', 'error': 'You are already in the queue'}
elif form.is_valid():
report = form.save()
request.session['report_pk'] = report.pk
process_report.delay(report.pk)
data = {'status': 'ok', 'url': report.get_absolute_url()}
else:
data = {'status': 'error', 'error': 'Invalid URL'}
return HttpResponse(json.dumps(data), mimetype='application/json')
def get_status(request):
pk = request.session.get('report_pk')
if pk is not None:
result = ['waiting', 'waiting', 'waiting', 'waiting']
report = get_object_or_404(Report, pk=pk)
stage = report.stage
stage_index = STAGES.index(stage)
for status in range(stage_index):
result[status] = 'done'
if stage != 'done':
result[stage_index] = 'working'
if report.error:
result[stage_index] = 'error'
data = {'queue': result[0], 'cloning': result[1],
'parsing': result[2], 'analyzing': result[3]}
return HttpResponse(json.dumps(data), mimetype='application/json')
return HttpResponse()
def results(request, hash):
qs = Report.objects.filter(stage='done')
qs = qs.exclude(error='')
report = get_object_or_404(qs, hash=hash)
return render(request, 'lint/results.html', {'report': report})
|
isc
|
lovelylain/pyctp
|
example/pyctp2/trader/trade_command.py
|
7
|
7748
|
#-*- coding:utf-8 -*-
"""
与底层无关的交易命令
"""
import logging
from ..common.base import Nope
from .position import POSITION_APPROVE_STATUS
LOWEST_PRIORITY = 9000000
class Command(object):
def __init__(self,priority=LOWEST_PRIORITY):
self._priority = priority
@property
def priority(self):
return self._priority
def __lt__(self,other): #同优先级比较
return self._priority < other.priority
def __gt__(self,other):
return self._priority > other.priority
class NormalCommand(Command):
#不做优先级的判定
pass
class SysCommand(Command):
def __init__(self,priority):
assert 100 < priority < 300,str(priority)
Command.__init__(self,priority)
class LoginCommand(SysCommand):
pass
class SettlementQueryCommand(SysCommand):
pass
class SettlementConfirmCommand(SysCommand):
pass
#priority不写在__init__函数中,目的是放在一起便于比较
#无参数的可以这么整
LOGIN_COMMAND = LoginCommand(priority=101)
SETTLEMENT_QUERY_COMMAND = SettlementQueryCommand(priority=102)
SETTLEMENT_CONFIRM_COMMAND = SettlementConfirmCommand(priority=103)
class TradeCommand(Command):
def __init__(self,priority):
assert 1000 < priority < 10000
Command.__init__(self,priority)
class QueryCommand(Command):
def __init__(self,priority):
assert 10000 < priority < 20000
Command.__init__(self,priority)
class TrivialCommand(Command):
def __init__(self,priority):
assert priority > 20000
Command.__init__(self,priority)
class QueryAccountCommand(QueryCommand):
pass
class QueryAccountCommand(QueryCommand):
def __init__(self,stamp):
Command.__init__(self,10001)
self.stamp = stamp
class QueryInstrumentCommand(QueryCommand):
def __init__(self,instrument_id):
Command.__init__(self,10101)
self.instrument_id = instrument_id
class QueryInstrumentMarginRateCommand(QueryCommand):
def __init__(self,instrument_id):
Command.__init__(self,10102)
self.instrument_id = instrument_id
class QueryDepthMarketDataCommand(QueryCommand):
def __init__(self,instrument_id):
Command.__init__(self,10100)
self.instrument_id = instrument_id
class QueryPositionCommand(QueryCommand):
def __init__(self,instrument_id):
Command.__init__(self,10103)
self.instrument_id = instrument_id
class QueryPositionDetailCommand(QueryCommand):
def __init__(self,instrument_id):
Command.__init__(self,10105)
self.instrument_id = instrument_id
class ORDER_EXECUTE_MODE:
CONCURRENT = 0
SEQUENT = 1
class Instruction(TradeCommand):
'''
PositionCommand的组成要素
'''
def __init__(self,order,position,priority):
"""
holder: OpenCommand/CloseCommand
"""
TradeCommand.__init__(self,priority)
self._done = False
self._order = order
self._position = position
self.order._trigger_time = 0 #驱动命令的tick的时间, 如果是时间驱动,则必须是最近一个tick的时间. 在下单时间错重新执行的时候用到
self._last_update_time = 0 #最后更新时间
@property
def order(self):
return self._order
@property
def position(self):
return self._position
@property
def contract_name(self):
return self._order.contract_name
@property
def contract(self):
return self._order.contract
@property
def planned(self):
return self._order.planned
@property
def approved(self):
return self._order.approved
@property
def done(self):
return self._done
@property
def trigger_time(self):
return self._order.trigger_time
@trigger_time.setter
def trigger_time(self,ctime): #驱动命令的tick的时间, 如果是时间驱动,则必须是最近一个tick的时间.
self._order.trigger_time = ctime
@property
def last_update_time(self):
return self._last_update_time
@last_update_time.setter
def last_update_time(self,ctime):
self._last_update_time = ctime
def set_callback(self,on_approved=Nope,on_reject=Nope,on_done=Nope,on_progress=Nope):
self._on_approved = on_approved
self._on_done = on_done
self._on_progress = on_progress
self._on_reject = on_reject
def execute(self,trader):
#print("Instr:",self.contract_name,self.order.approved)
if self._order.approved - self._order.accomplished2> 0:
#print("Instr put:",self.contract_name,self.order.approved)
trader.put_command(self)
def on_approved(self,astate=POSITION_APPROVE_STATUS.APPROVED):
"""
调用此处时,外部已设置好order.approved的数值
"""
self._on_approved(self,astate)
def on_progress(self,volume,price):
self._order.on_progress(volume,price)
self._on_progress(self,volume,price)
def on_done(self,volume_traded):
#print("INSTR:ON_DONE",self.order.contract_name)
self._done = True
self._order.on_done(volume_traded)
self._on_done(self,volume_traded)
def on_cancelled(self,volume_traded):
"""
Open/CloseCommand未完成被取消
"""
self.on_done(volume_traded)
def on_accept(self,trade_info):
"""
对Open/Close, 不应当存在不能accepted的情况,故此处也无必要挂接
"""
self._order.on_accept(trade_info)
def on_reject(self):
"""
不应当存在reject情况,也不知道该怎么处理
"""
self._order.on_reject()
self._on_reject(self)
def on_error(self):
"""
对Open/Close, 不应当存在Error的情况,故此处也无必要挂接
对Cancelled,为正常情况
"""
logging.info("执行错误: %s",type(self))
self._on_reject(self)
class OpenInstruction(Instruction):
def __init__(self,order,position):
Instruction.__init__(self,order,position,1002)
class CloseInstruction(Instruction):
def __init__(self,order,position):
Instruction.__init__(self,order,position,1001)
class CancelInstruction(TradeCommand):
"""
此处挂接Receiver,并无实际用处.
Cancel的效果需要由被其Cancel的命令的RtnOrder来表现.
"""
def __init__(self,source):
TradeCommand.__init__(self,1003)
self._source = source
@property
def order(self):
return self._source.order
@property
def contract_name(self):
return self._source.order.contract_name
@property
def instrument_id(self):
return self._source.order.contract_name
def execute(self,trader):
#trader.put_command(self)
if not self._source.order.done:
trader.put_command(self)
def on_approved(self,astate=POSITION_APPROVE_STATUS.APPROVED): pass # 不可能被调用
def on_progress(self,volume,price): pass # 不可能被调用
def on_accept(self,trade_info): pass # 不可能被调用
def on_reject(self): pass # 不可能被调用
def on_done(self,volume_traded): pass #不可能被调用
def on_cancelled(self,volume_traded): pass #不可能被调用
def on_error(self):
"""
对Open/Close, 不应当存在Error的情况,故此处也无必要挂接
对Cancelled,为正常情况
"""
logging.info("仅当Cancel命令时,到此处正常(表示原指令已全部成交): %s",type(self))
|
mit
|
jeffjirsa/cassandra
|
pylib/cqlshlib/tracing.py
|
11
|
3475
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timedelta
import time
from cassandra.query import QueryTrace, TraceUnavailable
from cqlshlib.displaying import MAGENTA
from cqlshlib.formatting import CqlType
def print_trace_session(shell, session, session_id, partial_session=False):
"""
Lookup a trace by session and trace session ID, then print it.
"""
trace = QueryTrace(session_id, session)
try:
wait_for_complete = not partial_session
trace.populate(wait_for_complete=wait_for_complete)
except TraceUnavailable:
shell.printerr("Session %s wasn't found." % session_id)
else:
print_trace(shell, trace)
def print_trace(shell, trace):
"""
Print an already populated cassandra.query.QueryTrace instance.
"""
rows = make_trace_rows(trace)
if not rows:
shell.printerr("No rows for session %s found." % (trace.trace_id,))
return
names = ['activity', 'timestamp', 'source', 'source_elapsed', 'client']
formatted_names = list(map(shell.myformat_colname, names))
formatted_values = [list(map(shell.myformat_value, row)) for row in rows]
shell.writeresult('')
shell.writeresult('Tracing session: ', color=MAGENTA, newline=False)
shell.writeresult(trace.trace_id)
shell.writeresult('')
shell.print_formatted_result(formatted_names, formatted_values)
shell.writeresult('')
def make_trace_rows(trace):
if not trace.events:
return []
rows = [[trace.request_type, str(datetime_from_utc_to_local(trace.started_at)), trace.coordinator, 0, trace.client]]
# append main rows (from events table).
for event in trace.events:
rows.append(["%s [%s]" % (event.description, event.thread_name),
str(datetime_from_utc_to_local(event.datetime)),
event.source,
total_micro_seconds(event.source_elapsed),
trace.client])
# append footer row (from sessions table).
if trace.duration:
finished_at = (datetime_from_utc_to_local(trace.started_at) + trace.duration)
rows.append(['Request complete', str(finished_at), trace.coordinator, total_micro_seconds(trace.duration), trace.client])
else:
finished_at = trace.duration = "--"
return rows
def total_micro_seconds(td):
"""
Convert a timedelta into total microseconds
"""
return int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6)) if td else "--"
def datetime_from_utc_to_local(utc_datetime):
now_timestamp = time.time()
offset = datetime.fromtimestamp(now_timestamp) - datetime.utcfromtimestamp(now_timestamp)
return utc_datetime + offset
|
apache-2.0
|
gsobczyk/hamster
|
waflib/Tools/qt5.py
|
35
|
23825
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)
"""
This tool helps with finding Qt5 tools and libraries,
and also provides syntactic sugar for using Qt5 tools.
The following snippet illustrates the tool usage::
def options(opt):
opt.load('compiler_cxx qt5')
def configure(conf):
conf.load('compiler_cxx qt5')
def build(bld):
bld(
features = 'qt5 cxx cxxprogram',
uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)
Here, the UI description and resource files will be processed
to generate code.
Usage
=====
Load the "qt5" tool.
You also need to edit your sources accordingly:
- the normal way of doing things is to have your C++ files
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
It also implies that the include paths have beenset properly.
- to have the include paths added automatically, use the following::
from waflib.TaskGen import feature, before_method, after_method
@feature('cxx')
@after_method('process_source')
@before_method('apply_incpaths')
def add_includes_paths(self):
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = sorted(incs)
Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.
A few options (--qt{dir,bin,...}) and environment variables
(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.
The detection uses pkg-config on Linux by default. To force static library detection use:
QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
"""
from __future__ import with_statement
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True
import os, sys, re
from waflib.Tools import cxx
from waflib import Build, Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension, before_method
from waflib.Configure import conf
from waflib import Logs
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
"""
File extensions associated to .moc files
"""
EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""
EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""
EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
"""
File extensions of C++ files that may require a .moc processing
"""
class qxx(Task.classes['cxx']):
"""
Each C++ file can have zero or several .moc files to create.
They are known only when the files are scanned (preprocessor)
To avoid scanning the c++ files each time (parsing C/C++), the results
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
The moc tasks are also created *dynamically* during the build.
"""
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0
def runnable_status(self):
"""
Compute the task signature to make sure the scanner was executed. Create the
moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
then postpone the task execution (there is no need to recompute the task signature).
"""
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)
def create_moc_task(self, h_node, m_node):
"""
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
and the moc tasks can be shared in a global cache.
"""
try:
moc_cache = self.generator.bld.moc_cache
except AttributeError:
moc_cache = self.generator.bld.moc_cache = {}
try:
return moc_cache[h_node]
except KeyError:
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)
tsk.env.append_unique('MOC_FLAGS', '-i')
if self.generator:
self.generator.tasks.append(tsk)
# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.append(tsk)
gen.total += 1
return tsk
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
def add_moc_tasks(self):
"""
Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
"""
node = self.inputs[0]
bld = self.generator.bld
# skip on uninstall due to generated files
if bld.is_install == Build.UNINSTALL:
return
try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
include_nodes = [node.parent] + self.generator.includes_nodes
moctasks = []
mocfiles = set()
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue
# process that base.moc only once
if d in mocfiles:
continue
mocfiles.add(d)
# find the source associated with the moc file
h_node = None
base2 = d[:-4]
# foo.moc from foo.cpp
prefix = node.name[:node.name.rfind('.')]
if base2 == prefix:
h_node = node
else:
# this deviates from the standard
# if bar.cpp includes foo.moc, then assume it is from foo.h
for x in include_nodes:
for e in MOC_H:
h_node = x.find_node(base2 + e)
if h_node:
break
else:
continue
break
if h_node:
m_node = h_node.change_ext('.moc')
else:
raise Errors.WafError('No source found for %r which is a moc file' % d)
# create the moc task
task = self.create_moc_task(h_node, m_node)
moctasks.append(task)
# simple scheduler dependency: run the moc task before others
self.run_after.update(set(moctasks))
self.moc_done = 1
class trans_update(Task.Task):
"""Updates a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'
class XMLHandler(ContentHandler):
"""
Parses ``.qrc`` files
"""
def __init__(self):
ContentHandler.__init__(self)
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)
@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Creates rcc and cxx tasks for ``.qrc`` files"
rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks = [cpptask]
return cpptask
@extension(*EXT_UI)
def create_uic_task(self, node):
"Create uic tasks for user interface ``.ui`` definition files"
"""
If UIC file is used in more than one bld, we would have a conflict in parallel execution
It is not possible to change the file names (like .self.idx. as for objects) as they have
to be referenced by the source file, but we can assume that the transformation will be identical
and the tasks can be shared in a global cache.
"""
try:
uic_cache = self.bld.uic_cache
except AttributeError:
uic_cache = self.bld.uic_cache = {}
if node not in uic_cache:
uictask = uic_cache[node] = self.create_task('ui5', node)
uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
@extension('.ts')
def add_lang(self, node):
"""Adds all the .ts file into ``self.lang``"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
@feature('qt5')
@before_method('process_source')
def process_mocs(self):
"""
Processes MOC files included in headers::
def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
is provided to avoid name clashes when the same headers are used by several targets.
"""
lst = self.to_nodes(getattr(self, 'moc', []))
self.source = self.to_list(getattr(self, 'source', []))
for x in lst:
prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
moc_node = x.parent.find_or_declare(moc_target)
self.source.append(moc_node)
self.create_task('moc', x, moc_node)
@feature('qt5')
@after_method('apply_link')
def apply_qt5(self):
"""
Adds MOC_FLAGS which may be necessary for moc::
def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
The additional parameters are:
:param lang: list of translation files (\\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
:type update: bool
:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
if getattr(self, 'update', None) and Options.options.trans_qt5:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)
if getattr(self, 'langname', None):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env.CXXFLAGS):
if len(flag) < 2:
continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
lst.append('-' + flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS', lst)
@extension(*EXT_QT5)
def cxx_hook(self, node):
"""
Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
"""
return self.create_compiled_task('qxx', node)
class rcc(Task.Task):
"""
Processes ``.qrc`` files
"""
color = 'BLUE'
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out = ['.h']
def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]
def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
return ([], [])
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
with open(self.inputs[0].abspath(), 'r') as f:
parser.parse(f)
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd:
nodes.append(nd)
else:
names.append(x)
return (nodes, names)
def quote_flag(self, x):
"""
Override Task.quote_flag. QT parses the argument files
differently than cl.exe and link.exe
:param x: flag
:type x: string
:return: quoted flag
:rtype: string
"""
return x
class moc(Task.Task):
"""
Creates ``.moc`` files
"""
color = 'BLUE'
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
def quote_flag(self, x):
"""
Override Task.quote_flag. QT parses the argument files
differently than cl.exe and link.exe
:param x: flag
:type x: string
:return: quoted flag
:rtype: string
"""
return x
class ui5(Task.Task):
"""
Processes ``.ui`` files
"""
color = 'BLUE'
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
ext_out = ['.h']
class ts2qm(Task.Task):
"""
Generates ``.qm`` files from ``.ts`` files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
class qm2rcc(Task.Task):
"""
Generates ``.qrc`` files from ``.qm`` files
"""
color = 'BLUE'
after = 'ts2qm'
def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)
def configure(self):
"""
Besides the configuration options, the environment variable QT5_ROOT may be used
to give the location of the qt5 libraries (absolute path).
The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
"""
self.find_qt5_binaries()
self.set_qt5_libs_dir()
self.set_qt5_libs_to_check()
self.set_qt5_defines()
self.find_qt5_libraries()
self.add_qt5_rpath()
self.simplify_qt5_libs()
# warn about this during the configuration too
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
if 'COMPILER_CXX' not in self.env:
self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
uses = 'QT5CORE QT5WIDGETS QT5GUI'
for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]:
msg = 'See if Qt files compile '
if flag:
msg += 'with %s' % flag
try:
self.check(features='qt5 cxx', use=uses, uselib_store='qt5', cxxflags=flag, fragment=frag, msg=msg)
except self.errors.ConfigurationError:
pass
else:
break
else:
self.fatal('Could not build a simple Qt application')
# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
if Utils.unversioned_sys_platform() == 'freebsd':
frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
try:
self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
except self.errors.ConfigurationError:
self.check(features='qt5 cxx cxxprogram', use=uses, uselib_store='qt5', libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
@conf
def find_qt5_binaries(self):
"""
Detects Qt programs such as qmake, moc, uic, lrelease
"""
env = self.env
opt = Options.options
qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')
paths = []
if qtdir:
qtbin = os.path.join(qtdir, 'bin')
# the qt directory has been given from QT5_ROOT - deduce the qt binary path
if not qtdir:
qtdir = self.environ.get('QT5_ROOT', '')
qtbin = self.environ.get('QT5_BIN') or os.path.join(qtdir, 'bin')
if qtbin:
paths = [qtbin]
# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = self.environ.get('PATH', '').split(os.pathsep)
paths.extend(['/usr/share/qt5/bin', '/usr/local/lib/qt5/bin'])
try:
lst = Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)
# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['5', '0', '0']
for qmk in ('qmake-qt5', 'qmake5', 'qmake'):
try:
qmake = self.find_program(qmk, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver
# qmake could not be found easily, rely on qtchooser
if not cand:
try:
self.find_program('qtchooser')
except self.errors.ConfigurationError:
pass
else:
cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake']
try:
version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
except self.errors.WafError:
pass
else:
cand = cmd
if cand:
self.env.QMAKE = cand
else:
self.fatal('Could not find qmake for qt5')
self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
paths.insert(0, qtbin)
def find_bin(lst, var):
if var in env:
return
for f in lst:
try:
ret = self.find_program(f, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
env[var]=ret
break
find_bin(['uic-qt5', 'uic'], 'QT_UIC')
if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt5')
self.start_msg('Checking for uic version')
uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
uicver = ''.join(uicver).strip()
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
find_bin(['moc-qt5', 'moc'], 'QT_MOC')
find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE')
env.UIC_ST = '%s -o %s'
env.MOC_ST = '-o'
env.ui_PATTERN = 'ui_%s.h'
env.QT_LRELEASE_FLAGS = ['-silent']
env.MOCCPPPATH_ST = '-I%s'
env.MOCDEFINES_ST = '-D%s'
@conf
def set_qt5_libs_dir(self):
env = self.env
qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT5_LIBDIR')
if not qtlibs:
try:
qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
except Errors.WafError:
qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt5 libraries in', qtlibs)
env.QTLIBS = qtlibs
@conf
def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
env = self.env
if force_static:
exts = ('.a', '.lib')
prefix = 'STLIB'
else:
exts = ('.so', '.lib')
prefix = 'LIB'
def lib_names():
for x in exts:
for k in ('', '5') if Utils.is_win32 else ['']:
for p in ('lib', ''):
yield (p, name, k, x)
for tup in lib_names():
k = ''.join(tup)
path = os.path.join(qtlibs, k)
if os.path.exists(path):
if env.DEST_OS == 'win32':
libval = ''.join(tup[:-1])
else:
libval = name
env.append_unique(prefix + '_' + uselib, libval)
env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt5', 'Qt')))
return k
return False
@conf
def find_qt5_libraries(self):
env = self.env
qtincludes = self.environ.get('QT5_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
force_static = self.environ.get('QT5_FORCE_STATIC')
try:
if self.environ.get('QT5_XCOMPILE'):
self.fatal('QT5_XCOMPILE Disables pkg-config detection')
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
for i in self.qt5_vars:
uselib = i.upper()
if Utils.unversioned_sys_platform() == 'darwin':
# Since at least qt 4.7.3 each library locates in separate directory
fwk = i.replace('Qt5', 'Qt')
frameworkName = fwk + '.framework'
qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
if os.path.exists(qtDynamicLib):
env.append_unique('FRAMEWORK_' + uselib, fwk)
env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
else:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
if not force_static and not ret:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
else:
path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
for i in self.qt5_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
@conf
def simplify_qt5_libs(self):
"""
Since library paths make really long command-lines,
and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
"""
env = self.env
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE':
continue
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core:
continue
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(self.qt5_vars, 'LIBPATH_QTCORE')
@conf
def add_qt5_rpath(self):
"""
Defines rpath entries for Qt libraries
"""
env = self.env
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_' + var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_' + var] = accu
process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')
@conf
def set_qt5_libs_to_check(self):
self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
if not self.qt5_vars:
dirlst = Utils.listdir(self.env.QTLIBS)
pat = self.env.cxxshlib_PATTERN
if Utils.is_win32:
pat = pat.replace('.dll', '.lib')
if self.environ.get('QT5_FORCE_STATIC'):
pat = self.env.cxxstlib_PATTERN
if Utils.unversioned_sys_platform() == 'darwin':
pat = r"%s\.framework"
re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
for x in dirlst:
m = re_qt.match(x)
if m:
self.qt5_vars.append("Qt5%s" % m.group('name'))
if not self.qt5_vars:
self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)
qtextralibs = getattr(Options.options, 'qtextralibs', None)
if qtextralibs:
self.qt5_vars.extend(qtextralibs.split(','))
@conf
def set_qt5_defines(self):
if sys.platform != 'win32':
return
for x in self.qt5_vars:
y=x.replace('Qt5', 'Qt')[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)
opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
|
gpl-3.0
|
pombreda/pydbgr
|
trepan/debugger.py
|
2
|
14215
|
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2010, 2013 Rocky Bernstein <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Debugger class and top-level debugger functions.
This module contains the `Debugger' class and some top-level routines
for creating and invoking a debugger. Most of this module serves as:
* a wrapper for `Debugger.core' routines,
* a place to define `Debugger' exceptions, and
* `Debugger' settings.
See also module `cli' which contains a command-line interface to debug
a Python script and `core' which contains the core debugging
start/stop and event-handling dispatcher and `client.py' which is a
user or client-side code for connecting to server'd debugged program.
"""
# Our local modules
from import_relative import import_relative
import_relative('processor', '.')
Mcore = import_relative('core', '.lib')
Mexcept = import_relative('exception', '.')
# Default settings used here
Mdefault = import_relative('lib.default', '.')
Muser = import_relative('interfaces.user', '.')
Mmisc = import_relative('misc', '.')
Msig = import_relative('sighandler', '.lib')
# Common Python packages
import sys, types
# External Egg packages
import tracer, tracefilter
debugger_obj = None
try:
from readline import get_line_buffer
except ImportError:
def get_line_buffer():
return None
pass
class Debugger:
# The following functions have to be defined before DEFAULT_INIT_OPTS which
# includes references to these.
# FIXME DRY run, run_exec, run_eval.
def run(self, cmd, start_opts=None, globals_=None, locals_=None):
""" Run debugger on string `cmd' using builtin function eval
and if that builtin exec. Arguments `globals_' and `locals_'
are the dictionaries to use for local and global variables. By
default, the value of globals is globals(), the current global
variables. If `locals_' is not given, it becomes a copy of
`globals_'.
Debugger.core.start settings are passed via optional
dictionary `start_opts'. Overall debugger settings are in
Debugger.settings which changed after an instance is created
. Also see `run_eval' if what you want to run is an
run_eval'able expression have that result returned and
`run_call' if you want to debug function run_call.
"""
if globals_ is None:
globals_ = globals()
if locals_ is None:
locals_ = globals_
if not isinstance(cmd, types.CodeType):
self.eval_string = cmd
cmd = cmd+'\n'
pass
retval = None
self.core.start(start_opts)
try:
retval = eval(cmd, globals_, locals_)
except SyntaxError:
try:
exec(cmd, globals_, locals_)
except Mexcept.DebuggerQuit:
pass
except Mexcept.DebuggerQuit:
pass
pass
except Mexcept.DebuggerQuit:
pass
self.core.stop()
return retval
def run_exec(self, cmd, start_opts=None, globals_=None, locals_=None):
""" Run debugger on string `cmd' which will executed via the
builtin function exec. Arguments `globals_' and `locals_' are
the dictionaries to use for local and global variables. By
default, the value of globals is globals(), the current global
variables. If `locals_' is not given, it becomes a copy of
`globals_'.
Debugger.core.start settings are passed via optional
dictionary `start_opts'. Overall debugger settings are in
Debugger.settings which changed after an instance is created
. Also see `run_eval' if what you want to run is an
run_eval'able expression have that result returned and
`run_call' if you want to debug function run_call.
"""
if globals_ is None:
globals_ = globals()
if locals_ is None:
locals_ = globals_
if not isinstance(cmd, types.CodeType):
cmd = cmd+'\n'
pass
self.core.start(start_opts)
try:
exec(cmd, globals_, locals_)
except Mexcept.DebuggerQuit:
pass
self.core.stop()
return
def run_call(self, func, start_opts=None, *args, **kwds):
""" Run debugger on function call: `func(*args, **kwds)'
See also `run_eval' if what you want to run is an eval'able
expression have that result returned and `run' if you want to
debug a statment via exec.
"""
res = None
self.core.start(opts=start_opts)
try:
res = func(*args, **kwds)
except Mexcept.DebuggerQuit:
pass
self.core.stop()
return res
def run_eval(self, expr, start_opts=None, globals_=None, locals_=None):
""" Run debugger on string `expr' which will executed via the
built-in Python function: eval; `globals_' and `locals_' are
the dictionaries to use for local and global variables. If
`globals' is not given, __main__.__dict__ (the current global
variables) is used. If `locals_' is not given, it becomes a
copy of `globals_'.
See also `run_call' if what you to debug a function call and
`run' if you want to debug general Python statements.
"""
if globals_ is None:
globals_ = globals()
if locals_ is None:
locals_ = globals_
if not isinstance(expr, types.CodeType):
self.eval_string = expr
expr = expr+'\n'
pass
retval = None
self.core.start(start_opts)
try:
retval = eval(expr, globals_, locals_)
except Mexcept.DebuggerQuit:
pass
self.core.stop()
return retval
def run_script(self, filename, start_opts=None, globals_=None,
locals_=None):
""" Run debugger on Python script `filename'. The script may
inspect sys.argv for command arguments. `globals_' and
`locals_' are the dictionaries to use for local and global
variables. If `globals' is not given, globals() (the current
global variables) is used. If `locals_' is not given, it
becomes a copy of `globals_'.
True is returned if the program terminated normally and False
if the debugger initiated a quit or the program did not normally
terminate.
See also `run_call' if what you to debug a function call,
`run_eval' if you want to debug an expression, and `run' if you
want to debug general Python statements not inside a file.
"""
self.mainpyfile = self.core.canonic(filename)
# Start with fresh empty copy of globals and locals and tell the script
# that it's being run as __main__ to avoid scripts being able to access
# the pydb.py namespace.
if globals_ is None:
import __main__
globals_ = {"__name__" : "__main__",
"__file__" : self.mainpyfile,
"__builtins__" : __builtins__
}
if locals_ is None:
locals_ = globals_
self.core.start(start_opts)
retval = False
self.core.execution_status = 'Running'
try:
exec(compile(open(self.mainpyfile).read(), self.mainpyfile, 'exec'), globals_, locals_)
retval = True
except SyntaxError:
print(sys.exc_info()[1])
retval = False
pass
except IOError:
print(sys.exc_info()[1])
except Mexcept.DebuggerQuit:
retval = False
pass
except Mexcept.DebuggerRestart:
self.core.execution_status = 'Restart requested'
raise Mexcept.DebuggerRestart
self.core.stop(options={'remove': True})
return retval
def restart_argv(self):
'''Return an array that would be execv-ed to restart the program'''
return self.orig_sys_argv or self.program_sys_argv
# Note: has to come after functions listed in ignore_filter.
DEFAULT_INIT_OPTS = {
# What routines will we not trace into?
'ignore_filter': tracefilter.TraceFilter(
[tracer.start, tracer.stop,
run_eval, run_call, run_eval, run_script]),
# sys.argv when not None contains sys.argv *before* debugger
# command processing. So sys.argv contains debugger options as
# well as debugged-program options. These options are used to
# do a "hard" or execv() restart.
# program_sys_argv is set by option save_sys_argv and contains
# sys.argv that we see now which may have debugger options
# stripped, or it may be that we were not called from a
# debugger front end but from inside the running
# program. These options are suitable for a "soft" or
# exception-handling DebuggerRestart kind of restart.
'orig_sys_argv' : None,
'save_sys_argv' : True,
# How is I/O for this debugger handled?
'activate' : False,
'interface' : None,
'input' : None,
'output' : None,
'processor' : None,
# Setting contains lots of debugger settings - a whole file
# full of them!
'settings' : Mdefault.DEBUGGER_SETTINGS,
'start_opts' : Mdefault.START_OPTS,
'step_ignore' : 0,
}
def __init__(self, opts=None):
"""Create a debugger object. But depending on the value of
key 'start' inside hash 'opts', we may or may not initially
start debugging.
See also Debugger.start and Debugger.stop.
"""
self.mainpyfile = None
self.thread = None
self.eval_string = None
get_option = lambda key: Mmisc.option_set(opts, key,
self.DEFAULT_INIT_OPTS)
completer = lambda text, state: self.complete(text, state)
# set the instance variables that come directly from options.
for opt in ['settings', 'orig_sys_argv']:
setattr(self, opt, get_option(opt))
pass
core_opts = {}
for opt in ('ignore_filter', 'proc_opts', 'processor', 'step_ignore',
'processor',):
core_opts[opt] = get_option(opt)
pass
# How is I/O for this debugger handled? This should
# be set before calling DebuggerCore.
interface_opts={'complete': completer}
# FIXME when I pass in opts=opts things break
interface = (get_option('interface') or
Muser.UserInterface(opts=interface_opts))
self.intf = [interface]
inp = get_option('input')
if inp:
self.intf[-1].input = inp
pass
out = get_option('output')
if out:
self.intf[-1].output = out
pass
self.core = Mcore.DebuggerCore(self, core_opts)
self.core.add_ignore(self.core.stop)
# When set True, we'll also suspend our debug-hook tracing.
# This gives us a way to prevent or allow self debugging.
self.core.trace_hook_suspend = False
if get_option('save_sys_argv'):
# Save the debugged program's sys.argv? We do this so that
# when the debugged script munges these, we have a good
# copy to use for an exec restart
self.program_sys_argv = list(sys.argv)
else:
self.program_sys_argv = None
pass
self.sigmgr = Msig.SignalManager(self)
# Were we requested to activate immediately?
if get_option('activate'):
self.core.start(get_option('start_opts'))
pass
return
def complete(self, last_token, state):
if hasattr(self.core.processor, 'completer'):
str = get_line_buffer() or last_token
results = self.core.processor.completer(str, state)
return results[state]
else:
return [None]
pass
# Demo it
if __name__=='__main__':
def foo():
y = 2
for i in range(2):
print("%d %d" % (i, y) )
pass
return 3
import debugger
d = debugger.Debugger()
d.settings['trace'] = True
d.settings['printset'] = tracer.ALL_EVENTS
d.core.step_ignore = -1
print('Issuing: run_eval("1+2")')
print(d.run_eval('1+2'))
print('Issuing: run_exec("x=1; y=2")')
d.run_exec('x=1; y=2')
print('Issuing: run("3*4")')
print(d.run('3*4'))
print('Issuing: run("x=3; y=4")')
d.run('x=3; y=4')
print('Issuing: run_call(foo)')
d.run_call(foo)
if len(sys.argv) > 1:
while True:
try:
print('started')
d.core.step_ignore = 0
d.core.start()
x = 4
x = foo()
for i in range(2):
print("%d" % (i+1)*10)
pass
d.core.stop()
def square(x): return x*x
print('calling: run_call(square,2)')
d.run_call(square, 2)
except Mexcept.DebuggerQuit:
print("That's all Folks!...")
break
except Mexcept.DebuggerRestart:
print('Restarting...')
pass
pass
pass
pass
|
gpl-3.0
|
cisco-openstack/neutron
|
neutron/plugins/cisco/network_plugin.py
|
32
|
6921
|
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_utils import importutils
import webob.exc as wexc
from neutron.api import extensions as neutron_extensions
from neutron.api.v2 import base
from neutron.db import db_base_plugin_v2
from neutron.plugins.cisco.common import cisco_exceptions as cexc
from neutron.plugins.cisco.common import config
from neutron.plugins.cisco.db import network_db_v2 as cdb
from neutron.plugins.cisco import extensions
LOG = logging.getLogger(__name__)
class PluginV2(db_base_plugin_v2.NeutronDbPluginV2):
"""Meta-Plugin with v2 API support for multiple sub-plugins."""
_supported_extension_aliases = ["credential", "Cisco qos"]
_methods_to_delegate = ['create_network',
'delete_network', 'update_network', 'get_network',
'get_networks',
'create_port', 'delete_port',
'update_port', 'get_port', 'get_ports',
'create_subnet',
'delete_subnet', 'update_subnet',
'get_subnet', 'get_subnets', ]
CISCO_FAULT_MAP = {
cexc.CredentialAlreadyExists: wexc.HTTPBadRequest,
cexc.CredentialNameNotFound: wexc.HTTPNotFound,
cexc.CredentialNotFound: wexc.HTTPNotFound,
cexc.NetworkSegmentIDNotFound: wexc.HTTPNotFound,
cexc.NetworkVlanBindingAlreadyExists: wexc.HTTPBadRequest,
cexc.NexusComputeHostNotConfigured: wexc.HTTPNotFound,
cexc.NexusConfigFailed: wexc.HTTPBadRequest,
cexc.NexusConnectFailed: wexc.HTTPServiceUnavailable,
cexc.NexusPortBindingNotFound: wexc.HTTPNotFound,
cexc.NoMoreNics: wexc.HTTPBadRequest,
cexc.PortIdForNexusSvi: wexc.HTTPBadRequest,
cexc.PortVnicBindingAlreadyExists: wexc.HTTPBadRequest,
cexc.PortVnicNotFound: wexc.HTTPNotFound,
cexc.QosNameAlreadyExists: wexc.HTTPBadRequest,
cexc.QosNotFound: wexc.HTTPNotFound,
cexc.SubnetNotSpecified: wexc.HTTPBadRequest,
cexc.VlanIDNotAvailable: wexc.HTTPNotFound,
cexc.VlanIDNotFound: wexc.HTTPNotFound,
}
@property
def supported_extension_aliases(self):
if not hasattr(self, '_aliases'):
aliases = self._supported_extension_aliases[:]
if hasattr(self._model, "supported_extension_aliases"):
aliases.extend(self._model.supported_extension_aliases)
self._aliases = aliases
return self._aliases
def __init__(self):
"""Load the model class."""
self._model_name = config.CISCO.model_class
self._model = importutils.import_object(self._model_name)
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._model.__class__.__name__)
self.__native_bulk_support = getattr(self._model,
native_bulk_attr_name, False)
neutron_extensions.append_api_extensions_path(extensions.__path__)
# Extend the fault map
self._extend_fault_map()
LOG.debug("Plugin initialization complete")
def __getattribute__(self, name):
"""Delegate core API calls to the model class.
Core API calls are delegated directly to the configured model class.
Note: Bulking calls will be handled by this class, and turned into
non-bulking calls to be considered for delegation.
"""
methods = object.__getattribute__(self, "_methods_to_delegate")
if name in methods:
return getattr(object.__getattribute__(self, "_model"),
name)
else:
return object.__getattribute__(self, name)
def __getattr__(self, name):
"""Delegate calls to the extensions.
This delegates the calls to the extensions explicitly implemented by
the model.
"""
if hasattr(self._model, name):
return getattr(self._model, name)
else:
# Must make sure we re-raise the error that led us here, since
# otherwise getattr() and even hasattr() doesn't work correctly.
raise AttributeError(
_("'%(model)s' object has no attribute '%(name)s'") %
{'model': self._model_name, 'name': name})
def _extend_fault_map(self):
"""Extend the Neutron Fault Map for Cisco exceptions.
Map exceptions which are specific to the Cisco Plugin
to standard HTTP exceptions.
"""
base.FAULT_MAP.update(self.CISCO_FAULT_MAP)
#
# Extension API implementation
#
def get_all_qoss(self, tenant_id):
"""Get all QoS levels."""
LOG.debug("get_all_qoss() called")
qoslist = cdb.get_all_qoss(tenant_id)
return qoslist
def get_qos_details(self, tenant_id, qos_id):
"""Get QoS Details."""
LOG.debug("get_qos_details() called")
return cdb.get_qos(tenant_id, qos_id)
def create_qos(self, tenant_id, qos_name, qos_desc):
"""Create a QoS level."""
LOG.debug("create_qos() called")
qos = cdb.add_qos(tenant_id, qos_name, str(qos_desc))
return qos
def delete_qos(self, tenant_id, qos_id):
"""Delete a QoS level."""
LOG.debug("delete_qos() called")
return cdb.remove_qos(tenant_id, qos_id)
def rename_qos(self, tenant_id, qos_id, new_name):
"""Rename QoS level."""
LOG.debug("rename_qos() called")
return cdb.update_qos(tenant_id, qos_id, new_name)
def get_all_credentials(self):
"""Get all credentials."""
LOG.debug("get_all_credentials() called")
credential_list = cdb.get_all_credentials()
return credential_list
def get_credential_details(self, credential_id):
"""Get a particular credential."""
LOG.debug("get_credential_details() called")
return cdb.get_credential(credential_id)
def rename_credential(self, credential_id, new_name, new_password):
"""Rename the particular credential resource."""
LOG.debug("rename_credential() called")
return cdb.update_credential(credential_id, new_name,
new_password=new_password)
|
apache-2.0
|
luogangyi/bcec-nova
|
nova/tests/compute/test_compute_xen.py
|
16
|
2602
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for expectations of behaviour from the Xen driver."""
from oslo.config import cfg
from nova.compute import power_state
from nova import context
from nova.objects import instance as instance_obj
from nova.openstack.common import importutils
from nova.tests import fake_instance
from nova.tests.virt.xenapi import stubs
from nova.virt.xenapi import vm_utils
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
CONF.import_opt('compute_driver', 'nova.virt.driver')
class ComputeXenTestCase(stubs.XenAPITestBaseNoDB):
def setUp(self):
super(ComputeXenTestCase, self).setUp()
self.flags(compute_driver='xenapi.XenAPIDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.compute = importutils.import_object(CONF.compute_manager)
def test_sync_power_states_instance_not_found(self):
db_instance = fake_instance.fake_db_instance()
ctxt = context.get_admin_context()
instance_list = instance_obj._make_instance_list(ctxt,
instance_obj.InstanceList(), [db_instance], None)
instance = instance_list[0]
self.mox.StubOutWithMock(instance_obj.InstanceList, 'get_by_host')
self.mox.StubOutWithMock(self.compute.driver, 'get_num_instances')
self.mox.StubOutWithMock(vm_utils, 'lookup')
self.mox.StubOutWithMock(self.compute, '_sync_instance_power_state')
instance_obj.InstanceList.get_by_host(ctxt,
self.compute.host, use_slave=True).AndReturn(instance_list)
self.compute.driver.get_num_instances().AndReturn(1)
vm_utils.lookup(self.compute.driver._session, instance['name'],
False).AndReturn(None)
self.compute._sync_instance_power_state(ctxt, instance,
power_state.NOSTATE)
self.mox.ReplayAll()
self.compute._sync_power_states(ctxt)
|
apache-2.0
|
bgaultier/laboitepro
|
laboite/apps/weather/migrations/0001_initial.py
|
1
|
1855
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-21 20:41
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('boites', '__first__'),
]
operations = [
migrations.CreateModel(
name='AppWeather',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enabled', models.BooleanField(default=True, help_text='Indique si cette app est activ\xe9e sur votre bo\xeete', verbose_name='App activ\xe9e ?')),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name='Date de cr\xe9ation')),
('last_activity', models.DateTimeField(auto_now=True, verbose_name='Derni\xe8re activit\xe9')),
('city_name', models.CharField(default='Paris', help_text='Veuillez saisir la ville o\xf9 se trouve votre bo\xeete', max_length=64, verbose_name='Ville')),
('temperature_now', models.PositiveSmallIntegerField(null=True, verbose_name='Temp\xe9rature actuelle')),
('humidity_now', models.PositiveSmallIntegerField(null=True, verbose_name='Humidit\xe9 actuelle')),
('icon_now', models.PositiveSmallIntegerField(choices=[(0, 'Temps clair'), (1, 'Nuages'), (2, 'Pluie'), (3, 'Brouillard'), (4, 'Neige')], default=1, verbose_name='Ic\xf4ne')),
('boite', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='boites.Boite', verbose_name='Bo\xeete')),
],
options={
'verbose_name': 'Configuration : m\xe9t\xe9o',
'verbose_name_plural': 'Configurations : m\xe9t\xe9o',
},
),
]
|
agpl-3.0
|
aronasorman/content-curation
|
contentcuration/contentcuration/production_settings.py
|
2
|
1440
|
import os
from .settings import *
STORAGE_ROOT = "/contentworkshop_content/storage/"
DB_ROOT = "/contentworkshop_content/databases/"
STATIC_ROOT = "/contentworkshop_static/"
MEDIA_ROOT = STORAGE_ROOT
SITE_ID = int(os.getenv("SITE_ID"))
SESSION_ENGINE = "django.contrib.sessions.backends.db"
if os.getenv("USE_DATADOG"):
INSTALLED_APPS = (
"ddtrace.contrib.django",
) + INSTALLED_APPS
MIDDLEWARE_CLASSES = (
'ddtrace.contrib.django.TraceMiddleware',
) + MIDDLEWARE_CLASSES
DATADOG_TRACE = {
'DEFAULT_SERVICE': 'contentworkshop',
'AGENT_PORT': int(os.getenv("DATADOG_STATSD_PORT") or 8126),
'AGENT_HOSTNAME': os.getenv("DATADOG_STATSD_HOSTNAME"),
'TAGS': {'env': 'production'},
}
DATABASES = {
'default': {
'ENGINE':
'django.db.backends.postgresql_psycopg2',
'NAME': os.getenv("DB_CREDENTIALS_DBNAME"),
'USER': os.getenv("DB_CREDENTIALS_USER"),
'PASSWORD': os.getenv("DB_CREDENTIALS_PASSWORD"),
'HOST': os.getenv("DB_CREDENTIALS_HOST"),
'PORT': int(os.getenv("DB_CREDENTIALS_PORT")),
'CONN_MAX_AGE': 600,
},
'export_staging': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'export_staging.sqlite3')
}
}
# email settings
EMAIL_BACKEND = "postmark.backends.PostmarkBackend"
POSTMARK_API_KEY = os.getenv("EMAIL_CREDENTIALS_POSTMARK_API_KEY")
|
mit
|
ehirt/odoo
|
addons/mass_mailing/models/mail_thread.py
|
66
|
5189
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
import re
from openerp.addons.mail.mail_message import decode
from openerp.addons.mail.mail_thread import decode_header
from openerp.osv import osv
_logger = logging.getLogger(__name__)
class MailThread(osv.AbstractModel):
""" Update MailThread to add the feature of bounced emails and replied emails
in message_process. """
_name = 'mail.thread'
_inherit = ['mail.thread']
def message_route_check_bounce(self, cr, uid, message, context=None):
""" Override to verify that the email_to is the bounce alias. If it is the
case, log the bounce, set the parent and related document as bounced and
return False to end the routing process. """
bounce_alias = self.pool['ir.config_parameter'].get_param(cr, uid, "mail.bounce.alias", context=context)
message_id = message.get('Message-Id')
email_from = decode_header(message, 'From')
email_to = decode_header(message, 'To')
# 0. Verify whether this is a bounced email (wrong destination,...) -> use it to collect data, such as dead leads
if bounce_alias and bounce_alias in email_to:
# Bounce regex
# Typical form of bounce is bounce_alias-128-crm.lead-34@domain
# group(1) = the mail ID; group(2) = the model (if any); group(3) = the record ID
bounce_re = re.compile("%s-(\d+)-?([\w.]+)?-?(\d+)?" % re.escape(bounce_alias), re.UNICODE)
bounce_match = bounce_re.search(email_to)
if bounce_match:
bounced_model, bounced_thread_id = None, False
bounced_mail_id = bounce_match.group(1)
stat_ids = self.pool['mail.mail.statistics'].set_bounced(cr, uid, mail_mail_ids=[bounced_mail_id], context=context)
for stat in self.pool['mail.mail.statistics'].browse(cr, uid, stat_ids, context=context):
bounced_model = stat.model
bounced_thread_id = stat.res_id
_logger.info('Routing mail from %s to %s with Message-Id %s: bounced mail from mail %s, model: %s, thread_id: %s',
email_from, email_to, message_id, bounced_mail_id, bounced_model, bounced_thread_id)
if bounced_model and bounced_model in self.pool and hasattr(self.pool[bounced_model], 'message_receive_bounce') and bounced_thread_id:
self.pool[bounced_model].message_receive_bounce(cr, uid, [bounced_thread_id], mail_id=bounced_mail_id, context=context)
return False
return True
def message_route(self, cr, uid, message, message_dict, model=None, thread_id=None,
custom_values=None, context=None):
if not self.message_route_check_bounce(cr, uid, message, context=context):
return []
return super(MailThread, self).message_route(cr, uid, message, message_dict, model, thread_id, custom_values, context)
def message_receive_bounce(self, cr, uid, ids, mail_id=None, context=None):
"""Called by ``message_process`` when a bounce email (such as Undelivered
Mail Returned to Sender) is received for an existing thread. The default
behavior is to check is an integer ``message_bounce`` column exists.
If it is the case, its content is incremented. """
if 'message_bounce' in self._fields:
for obj in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, [obj.id], {'message_bounce': obj.message_bounce + 1}, context=context)
def message_route_process(self, cr, uid, message, message_dict, routes, context=None):
""" Override to update the parent mail statistics. The parent is found
by using the References header of the incoming message and looking for
matching message_id in mail.mail.statistics. """
if message.get('References'):
message_ids = [x.strip() for x in decode(message['References']).split()]
self.pool['mail.mail.statistics'].set_replied(cr, uid, mail_message_ids=message_ids, context=context)
return super(MailThread, self).message_route_process(cr, uid, message, message_dict, routes, context=context)
|
agpl-3.0
|
SoftwareKing/zstack-dashboard
|
zstack_dashboard/static/templates/console/utils/websocket.py
|
9
|
34617
|
#!/usr/bin/env python
'''
Python WebSocket library with support for "wss://" encryption.
Copyright 2011 Joel Martin
Licensed under LGPL version 3 (see docs/LICENSE.LGPL-3)
Supports following protocol versions:
- http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-75
- http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
- http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-10
You can make a cert/key with openssl using:
openssl req -new -x509 -days 365 -nodes -out self.pem -keyout self.pem
as taken from http://docs.python.org/dev/library/ssl.html#certificates
'''
import os, sys, time, errno, signal, socket, traceback, select
import array, struct
from base64 import b64encode, b64decode
# Imports that vary by python version
# python 3.0 differences
if sys.hexversion > 0x3000000:
b2s = lambda buf: buf.decode('latin_1')
s2b = lambda s: s.encode('latin_1')
s2a = lambda s: s
else:
b2s = lambda buf: buf # No-op
s2b = lambda s: s # No-op
s2a = lambda s: [ord(c) for c in s]
try: from io import StringIO
except: from cStringIO import StringIO
try: from http.server import SimpleHTTPRequestHandler
except: from SimpleHTTPServer import SimpleHTTPRequestHandler
# python 2.6 differences
try: from hashlib import md5, sha1
except: from md5 import md5; from sha import sha as sha1
# python 2.5 differences
try:
from struct import pack, unpack_from
except:
from struct import pack
def unpack_from(fmt, buf, offset=0):
slice = buffer(buf, offset, struct.calcsize(fmt))
return struct.unpack(fmt, slice)
# Degraded functionality if these imports are missing
for mod, sup in [('numpy', 'HyBi protocol'), ('ssl', 'TLS/SSL/wss'),
('multiprocessing', 'Multi-Processing'),
('resource', 'daemonizing')]:
try:
globals()[mod] = __import__(mod)
except ImportError:
globals()[mod] = None
print("WARNING: no '%s' module, %s is slower or disabled" % (
mod, sup))
if multiprocessing and sys.platform == 'win32':
# make sockets pickle-able/inheritable
import multiprocessing.reduction
class WebSocketServer(object):
"""
WebSockets server class.
Must be sub-classed with new_client method definition.
"""
buffer_size = 65536
server_handshake_hixie = """HTTP/1.1 101 Web Socket Protocol Handshake\r
Upgrade: WebSocket\r
Connection: Upgrade\r
%sWebSocket-Origin: %s\r
%sWebSocket-Location: %s://%s%s\r
"""
server_handshake_hybi = """HTTP/1.1 101 Switching Protocols\r
Upgrade: websocket\r
Connection: Upgrade\r
Sec-WebSocket-Accept: %s\r
"""
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
policy_response = """<cross-domain-policy><allow-access-from domain="*" to-ports="*" /></cross-domain-policy>\n"""
# An exception before the WebSocket connection was established
class EClose(Exception):
pass
# An exception while the WebSocket client was connected
class CClose(Exception):
pass
def __init__(self, listen_host='', listen_port=None, source_is_ipv6=False,
verbose=False, cert='', key='', ssl_only=None,
daemon=False, record='', web='',
run_once=False, timeout=0, idle_timeout=0):
# settings
self.verbose = verbose
self.listen_host = listen_host
self.listen_port = listen_port
self.prefer_ipv6 = source_is_ipv6
self.ssl_only = ssl_only
self.daemon = daemon
self.run_once = run_once
self.timeout = timeout
self.idle_timeout = idle_timeout
self.launch_time = time.time()
self.ws_connection = False
self.handler_id = 1
# Make paths settings absolute
self.cert = os.path.abspath(cert)
self.key = self.web = self.record = ''
if key:
self.key = os.path.abspath(key)
if web:
self.web = os.path.abspath(web)
if record:
self.record = os.path.abspath(record)
if self.web:
os.chdir(self.web)
# Sanity checks
if not ssl and self.ssl_only:
raise Exception("No 'ssl' module and SSL-only specified")
if self.daemon and not resource:
raise Exception("Module 'resource' required to daemonize")
# Show configuration
print("WebSocket server settings:")
print(" - Listen on %s:%s" % (
self.listen_host, self.listen_port))
print(" - Flash security policy server")
if self.web:
print(" - Web server. Web root: %s" % self.web)
if ssl:
if os.path.exists(self.cert):
print(" - SSL/TLS support")
if self.ssl_only:
print(" - Deny non-SSL/TLS connections")
else:
print(" - No SSL/TLS support (no cert file)")
else:
print(" - No SSL/TLS support (no 'ssl' module)")
if self.daemon:
print(" - Backgrounding (daemon)")
if self.record:
print(" - Recording to '%s.*'" % self.record)
#
# WebSocketServer static methods
#
@staticmethod
def socket(host, port=None, connect=False, prefer_ipv6=False, unix_socket=None, use_ssl=False):
""" Resolve a host (and optional port) to an IPv4 or IPv6
address. Create a socket. Bind to it if listen is set,
otherwise connect to it. Return the socket.
"""
flags = 0
if host == '':
host = None
if connect and not (port or unix_socket):
raise Exception("Connect mode requires a port")
if use_ssl and not ssl:
raise Exception("SSL socket requested but Python SSL module not loaded.");
if not connect and use_ssl:
raise Exception("SSL only supported in connect mode (for now)")
if not connect:
flags = flags | socket.AI_PASSIVE
if not unix_socket:
addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM,
socket.IPPROTO_TCP, flags)
if not addrs:
raise Exception("Could not resolve host '%s'" % host)
addrs.sort(key=lambda x: x[0])
if prefer_ipv6:
addrs.reverse()
sock = socket.socket(addrs[0][0], addrs[0][1])
if connect:
sock.connect(addrs[0][4])
if use_ssl:
sock = ssl.wrap_socket(sock)
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addrs[0][4])
sock.listen(100)
else:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(unix_socket)
return sock
@staticmethod
def daemonize(keepfd=None, chdir='/'):
os.umask(0)
if chdir:
os.chdir(chdir)
else:
os.chdir('/')
os.setgid(os.getgid()) # relinquish elevations
os.setuid(os.getuid()) # relinquish elevations
# Double fork to daemonize
if os.fork() > 0: os._exit(0) # Parent exits
os.setsid() # Obtain new process group
if os.fork() > 0: os._exit(0) # Parent exits
# Signal handling
def terminate(a,b): os._exit(0)
signal.signal(signal.SIGTERM, terminate)
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Close open files
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY: maxfd = 256
for fd in reversed(range(maxfd)):
try:
if fd != keepfd:
os.close(fd)
except OSError:
_, exc, _ = sys.exc_info()
if exc.errno != errno.EBADF: raise
# Redirect I/O to /dev/null
os.dup2(os.open(os.devnull, os.O_RDWR), sys.stdin.fileno())
os.dup2(os.open(os.devnull, os.O_RDWR), sys.stdout.fileno())
os.dup2(os.open(os.devnull, os.O_RDWR), sys.stderr.fileno())
@staticmethod
def unmask(buf, hlen, plen):
pstart = hlen + 4
pend = pstart + plen
if numpy:
b = c = s2b('')
if plen >= 4:
mask = numpy.frombuffer(buf, dtype=numpy.dtype('<u4'),
offset=hlen, count=1)
data = numpy.frombuffer(buf, dtype=numpy.dtype('<u4'),
offset=pstart, count=int(plen / 4))
#b = numpy.bitwise_xor(data, mask).data
b = numpy.bitwise_xor(data, mask).tostring()
if plen % 4:
#print("Partial unmask")
mask = numpy.frombuffer(buf, dtype=numpy.dtype('B'),
offset=hlen, count=(plen % 4))
data = numpy.frombuffer(buf, dtype=numpy.dtype('B'),
offset=pend - (plen % 4),
count=(plen % 4))
c = numpy.bitwise_xor(data, mask).tostring()
return b + c
else:
# Slower fallback
mask = buf[hlen:hlen+4]
data = array.array('B')
mask = s2a(mask)
data.fromstring(buf[pstart:pend])
for i in range(len(data)):
data[i] ^= mask[i % 4]
return data.tostring()
@staticmethod
def encode_hybi(buf, opcode, base64=False):
""" Encode a HyBi style WebSocket frame.
Optional opcode:
0x0 - continuation
0x1 - text frame (base64 encode buf)
0x2 - binary frame (use raw buf)
0x8 - connection close
0x9 - ping
0xA - pong
"""
if base64:
buf = b64encode(buf)
b1 = 0x80 | (opcode & 0x0f) # FIN + opcode
payload_len = len(buf)
if payload_len <= 125:
header = pack('>BB', b1, payload_len)
elif payload_len > 125 and payload_len < 65536:
header = pack('>BBH', b1, 126, payload_len)
elif payload_len >= 65536:
header = pack('>BBQ', b1, 127, payload_len)
#print("Encoded: %s" % repr(header + buf))
return header + buf, len(header), 0
@staticmethod
def decode_hybi(buf, base64=False):
""" Decode HyBi style WebSocket packets.
Returns:
{'fin' : 0_or_1,
'opcode' : number,
'masked' : boolean,
'hlen' : header_bytes_number,
'length' : payload_bytes_number,
'payload' : decoded_buffer,
'left' : bytes_left_number,
'close_code' : number,
'close_reason' : string}
"""
f = {'fin' : 0,
'opcode' : 0,
'masked' : False,
'hlen' : 2,
'length' : 0,
'payload' : None,
'left' : 0,
'close_code' : 1000,
'close_reason' : ''}
blen = len(buf)
f['left'] = blen
if blen < f['hlen']:
return f # Incomplete frame header
b1, b2 = unpack_from(">BB", buf)
f['opcode'] = b1 & 0x0f
f['fin'] = (b1 & 0x80) >> 7
f['masked'] = (b2 & 0x80) >> 7
f['length'] = b2 & 0x7f
if f['length'] == 126:
f['hlen'] = 4
if blen < f['hlen']:
return f # Incomplete frame header
(f['length'],) = unpack_from('>xxH', buf)
elif f['length'] == 127:
f['hlen'] = 10
if blen < f['hlen']:
return f # Incomplete frame header
(f['length'],) = unpack_from('>xxQ', buf)
full_len = f['hlen'] + f['masked'] * 4 + f['length']
if blen < full_len: # Incomplete frame
return f # Incomplete frame header
# Number of bytes that are part of the next frame(s)
f['left'] = blen - full_len
# Process 1 frame
if f['masked']:
# unmask payload
f['payload'] = WebSocketServer.unmask(buf, f['hlen'],
f['length'])
else:
print("Unmasked frame: %s" % repr(buf))
f['payload'] = buf[(f['hlen'] + f['masked'] * 4):full_len]
if base64 and f['opcode'] in [1, 2]:
try:
f['payload'] = b64decode(f['payload'])
except:
print("Exception while b64decoding buffer: %s" %
repr(buf))
raise
if f['opcode'] == 0x08:
if f['length'] >= 2:
f['close_code'] = unpack_from(">H", f['payload'])[0]
if f['length'] > 3:
f['close_reason'] = f['payload'][2:]
return f
@staticmethod
def encode_hixie(buf):
return s2b("\x00" + b2s(b64encode(buf)) + "\xff"), 1, 1
@staticmethod
def decode_hixie(buf):
end = buf.find(s2b('\xff'))
return {'payload': b64decode(buf[1:end]),
'hlen': 1,
'masked': False,
'length': end - 1,
'left': len(buf) - (end + 1)}
@staticmethod
def gen_md5(keys):
""" Generate hash value for WebSockets hixie-76. """
key1 = keys['Sec-WebSocket-Key1']
key2 = keys['Sec-WebSocket-Key2']
key3 = keys['key3']
spaces1 = key1.count(" ")
spaces2 = key2.count(" ")
num1 = int("".join([c for c in key1 if c.isdigit()])) / spaces1
num2 = int("".join([c for c in key2 if c.isdigit()])) / spaces2
return b2s(md5(pack('>II8s',
int(num1), int(num2), key3)).digest())
#
# WebSocketServer logging/output functions
#
def traffic(self, token="."):
""" Show traffic flow in verbose mode. """
if self.verbose and not self.daemon:
sys.stdout.write(token)
sys.stdout.flush()
def msg(self, msg):
""" Output message with handler_id prefix. """
if not self.daemon:
print("% 3d: %s" % (self.handler_id, msg))
def vmsg(self, msg):
""" Same as msg() but only if verbose. """
if self.verbose:
self.msg(msg)
#
# Main WebSocketServer methods
#
def send_frames(self, bufs=None):
""" Encode and send WebSocket frames. Any frames already
queued will be sent first. If buf is not set then only queued
frames will be sent. Returns the number of pending frames that
could not be fully sent. If returned pending frames is greater
than 0, then the caller should call again when the socket is
ready. """
tdelta = int(time.time()*1000) - self.start_time
if bufs:
for buf in bufs:
if self.version.startswith("hybi"):
if self.base64:
encbuf, lenhead, lentail = self.encode_hybi(
buf, opcode=1, base64=True)
else:
encbuf, lenhead, lentail = self.encode_hybi(
buf, opcode=2, base64=False)
else:
encbuf, lenhead, lentail = self.encode_hixie(buf)
if self.rec:
self.rec.write("%s,\n" %
repr("{%s{" % tdelta
+ encbuf[lenhead:len(encbuf)-lentail]))
self.send_parts.append(encbuf)
while self.send_parts:
# Send pending frames
buf = self.send_parts.pop(0)
sent = self.client.send(buf)
if sent == len(buf):
self.traffic("<")
else:
self.traffic("<.")
self.send_parts.insert(0, buf[sent:])
break
return len(self.send_parts)
def recv_frames(self):
""" Receive and decode WebSocket frames.
Returns:
(bufs_list, closed_string)
"""
closed = False
bufs = []
tdelta = int(time.time()*1000) - self.start_time
buf = self.client.recv(self.buffer_size)
if len(buf) == 0:
closed = {'code': 1000, 'reason': "Client closed abruptly"}
return bufs, closed
if self.recv_part:
# Add partially received frames to current read buffer
buf = self.recv_part + buf
self.recv_part = None
while buf:
if self.version.startswith("hybi"):
frame = self.decode_hybi(buf, base64=self.base64)
#print("Received buf: %s, frame: %s" % (repr(buf), frame))
if frame['payload'] == None:
# Incomplete/partial frame
self.traffic("}.")
if frame['left'] > 0:
self.recv_part = buf[-frame['left']:]
break
else:
if frame['opcode'] == 0x8: # connection close
closed = {'code': frame['close_code'],
'reason': frame['close_reason']}
break
else:
if buf[0:2] == s2b('\xff\x00'):
closed = {'code': 1000,
'reason': "Client sent orderly close frame"}
break
elif buf[0:2] == s2b('\x00\xff'):
buf = buf[2:]
continue # No-op
elif buf.count(s2b('\xff')) == 0:
# Partial frame
self.traffic("}.")
self.recv_part = buf
break
frame = self.decode_hixie(buf)
self.traffic("}")
if self.rec:
start = frame['hlen']
end = frame['hlen'] + frame['length']
if frame['masked']:
recbuf = WebSocketServer.unmask(buf, frame['hlen'],
frame['length'])
else:
recbuf = buf[frame['hlen']:frame['hlen'] +
frame['length']]
self.rec.write("%s,\n" %
repr("}%s}" % tdelta + recbuf))
bufs.append(frame['payload'])
if frame['left']:
buf = buf[-frame['left']:]
else:
buf = ''
return bufs, closed
def send_close(self, code=1000, reason=''):
""" Send a WebSocket orderly close frame. """
if self.version.startswith("hybi"):
msg = pack(">H%ds" % len(reason), code, reason)
buf, h, t = self.encode_hybi(msg, opcode=0x08, base64=False)
self.client.send(buf)
elif self.version == "hixie-76":
buf = s2b('\xff\x00')
self.client.send(buf)
# No orderly close for 75
def do_websocket_handshake(self, headers, path):
h = self.headers = headers
self.path = path
prot = 'WebSocket-Protocol'
protocols = h.get('Sec-'+prot, h.get(prot, '')).split(',')
ver = h.get('Sec-WebSocket-Version')
if ver:
# HyBi/IETF version of the protocol
# HyBi-07 report version 7
# HyBi-08 - HyBi-12 report version 8
# HyBi-13 reports version 13
if ver in ['7', '8', '13']:
self.version = "hybi-%02d" % int(ver)
else:
raise self.EClose('Unsupported protocol version %s' % ver)
key = h['Sec-WebSocket-Key']
# Choose binary if client supports it
if 'binary' in protocols:
self.base64 = False
elif 'base64' in protocols:
self.base64 = True
else:
raise self.EClose("Client must support 'binary' or 'base64' protocol")
# Generate the hash value for the accept header
accept = b64encode(sha1(s2b(key + self.GUID)).digest())
response = self.server_handshake_hybi % b2s(accept)
if self.base64:
response += "Sec-WebSocket-Protocol: base64\r\n"
else:
response += "Sec-WebSocket-Protocol: binary\r\n"
response += "\r\n"
else:
# Hixie version of the protocol (75 or 76)
if h.get('key3'):
trailer = self.gen_md5(h)
pre = "Sec-"
self.version = "hixie-76"
else:
trailer = ""
pre = ""
self.version = "hixie-75"
# We only support base64 in Hixie era
self.base64 = True
response = self.server_handshake_hixie % (pre,
h['Origin'], pre, self.scheme, h['Host'], path)
if 'base64' in protocols:
response += "%sWebSocket-Protocol: base64\r\n" % pre
else:
self.msg("Warning: client does not report 'base64' protocol support")
response += "\r\n" + trailer
return response
def do_handshake(self, sock, address):
"""
do_handshake does the following:
- Peek at the first few bytes from the socket.
- If the connection is Flash policy request then answer it,
close the socket and return.
- If the connection is an HTTPS/SSL/TLS connection then SSL
wrap the socket.
- Read from the (possibly wrapped) socket.
- If we have received a HTTP GET request and the webserver
functionality is enabled, answer it, close the socket and
return.
- Assume we have a WebSockets connection, parse the client
handshake data.
- Send a WebSockets handshake server response.
- Return the socket for this WebSocket client.
"""
stype = ""
ready = select.select([sock], [], [], 3)[0]
if not ready:
raise self.EClose("ignoring socket not ready")
# Peek, but do not read the data so that we have a opportunity
# to SSL wrap the socket first
handshake = sock.recv(1024, socket.MSG_PEEK)
#self.msg("Handshake [%s]" % handshake)
if handshake == "":
raise self.EClose("ignoring empty handshake")
elif handshake.startswith(s2b("<policy-file-request/>")):
# Answer Flash policy request
handshake = sock.recv(1024)
sock.send(s2b(self.policy_response))
raise self.EClose("Sending flash policy response")
elif handshake[0] in ("\x16", "\x80", 22, 128):
# SSL wrap the connection
if not ssl:
raise self.EClose("SSL connection but no 'ssl' module")
if not os.path.exists(self.cert):
raise self.EClose("SSL connection but '%s' not found"
% self.cert)
retsock = None
try:
retsock = ssl.wrap_socket(
sock,
server_side=True,
certfile=self.cert,
keyfile=self.key)
except ssl.SSLError:
_, x, _ = sys.exc_info()
if x.args[0] == ssl.SSL_ERROR_EOF:
if len(x.args) > 1:
raise self.EClose(x.args[1])
else:
raise self.EClose("Got SSL_ERROR_EOF")
else:
raise
self.scheme = "wss"
stype = "SSL/TLS (wss://)"
elif self.ssl_only:
raise self.EClose("non-SSL connection received but disallowed")
else:
retsock = sock
self.scheme = "ws"
stype = "Plain non-SSL (ws://)"
wsh = WSRequestHandler(retsock, address, not self.web)
if wsh.last_code == 101:
# Continue on to handle WebSocket upgrade
pass
elif wsh.last_code == 405:
raise self.EClose("Normal web request received but disallowed")
elif wsh.last_code < 200 or wsh.last_code >= 300:
raise self.EClose(wsh.last_message)
elif self.verbose:
raise self.EClose(wsh.last_message)
else:
raise self.EClose("")
response = self.do_websocket_handshake(wsh.headers, wsh.path)
self.msg("%s: %s WebSocket connection" % (address[0], stype))
self.msg("%s: Version %s, base64: '%s'" % (address[0],
self.version, self.base64))
if self.path != '/':
self.msg("%s: Path: '%s'" % (address[0], self.path))
# Send server WebSockets handshake response
#self.msg("sending response [%s]" % response)
retsock.send(s2b(response))
# Return the WebSockets socket which may be SSL wrapped
return retsock
#
# Events that can/should be overridden in sub-classes
#
def started(self):
""" Called after WebSockets startup """
self.vmsg("WebSockets server started")
def poll(self):
""" Run periodically while waiting for connections. """
#self.vmsg("Running poll()")
pass
def fallback_SIGCHLD(self, sig, stack):
# Reap zombies when using os.fork() (python 2.4)
self.vmsg("Got SIGCHLD, reaping zombies")
try:
result = os.waitpid(-1, os.WNOHANG)
while result[0]:
self.vmsg("Reaped child process %s" % result[0])
result = os.waitpid(-1, os.WNOHANG)
except (OSError):
pass
def do_SIGINT(self, sig, stack):
self.msg("Got SIGINT, exiting")
sys.exit(0)
def top_new_client(self, startsock, address):
""" Do something with a WebSockets client connection. """
# Initialize per client settings
self.send_parts = []
self.recv_part = None
self.base64 = False
self.rec = None
self.start_time = int(time.time()*1000)
# handler process
try:
try:
self.client = self.do_handshake(startsock, address)
if self.record:
# Record raw frame data as JavaScript array
fname = "%s.%s" % (self.record,
self.handler_id)
self.msg("opening record file: %s" % fname)
self.rec = open(fname, 'w+')
encoding = "binary"
if self.base64: encoding = "base64"
self.rec.write("var VNC_frame_encoding = '%s';\n"
% encoding)
self.rec.write("var VNC_frame_data = [\n")
self.ws_connection = True
self.new_client()
except self.CClose:
# Close the client
_, exc, _ = sys.exc_info()
if self.client:
self.send_close(exc.args[0], exc.args[1])
except self.EClose:
_, exc, _ = sys.exc_info()
# Connection was not a WebSockets connection
if exc.args[0]:
self.msg("%s: %s" % (address[0], exc.args[0]))
except Exception:
_, exc, _ = sys.exc_info()
self.msg("handler exception: %s" % str(exc))
if self.verbose:
self.msg(traceback.format_exc())
finally:
if self.rec:
self.rec.write("'EOF'];\n")
self.rec.close()
if self.client and self.client != startsock:
# Close the SSL wrapped socket
# Original socket closed by caller
self.client.close()
def new_client(self):
""" Do something with a WebSockets client connection. """
raise("WebSocketServer.new_client() must be overloaded")
def start_server(self):
"""
Daemonize if requested. Listen for for connections. Run
do_handshake() method for each connection. If the connection
is a WebSockets client then call new_client() method (which must
be overridden) for each new client connection.
"""
lsock = self.socket(self.listen_host, self.listen_port, False, self.prefer_ipv6)
if self.daemon:
self.daemonize(keepfd=lsock.fileno(), chdir=self.web)
self.started() # Some things need to happen after daemonizing
# Allow override of SIGINT
signal.signal(signal.SIGINT, self.do_SIGINT)
if not multiprocessing:
# os.fork() (python 2.4) child reaper
signal.signal(signal.SIGCHLD, self.fallback_SIGCHLD)
last_active_time = self.launch_time
while True:
try:
try:
self.client = None
startsock = None
pid = err = 0
child_count = 0
if multiprocessing and self.idle_timeout:
child_count = len(multiprocessing.active_children())
time_elapsed = time.time() - self.launch_time
if self.timeout and time_elapsed > self.timeout:
self.msg('listener exit due to --timeout %s'
% self.timeout)
break
if self.idle_timeout:
idle_time = 0
if child_count == 0:
idle_time = time.time() - last_active_time
else:
idle_time = 0
last_active_time = time.time()
if idle_time > self.idle_timeout and child_count == 0:
self.msg('listener exit due to --idle-timeout %s'
% self.idle_timeout)
break
try:
self.poll()
ready = select.select([lsock], [], [], 1)[0]
if lsock in ready:
startsock, address = lsock.accept()
else:
continue
except Exception:
_, exc, _ = sys.exc_info()
if hasattr(exc, 'errno'):
err = exc.errno
elif hasattr(exc, 'args'):
err = exc.args[0]
else:
err = exc[0]
if err == errno.EINTR:
self.vmsg("Ignoring interrupted syscall")
continue
else:
raise
if self.run_once:
# Run in same process if run_once
self.top_new_client(startsock, address)
if self.ws_connection :
self.msg('%s: exiting due to --run-once'
% address[0])
break
elif multiprocessing:
self.vmsg('%s: new handler Process' % address[0])
p = multiprocessing.Process(
target=self.top_new_client,
args=(startsock, address))
p.start()
# child will not return
else:
# python 2.4
self.vmsg('%s: forking handler' % address[0])
pid = os.fork()
if pid == 0:
# child handler process
self.top_new_client(startsock, address)
break # child process exits
# parent process
self.handler_id += 1
except KeyboardInterrupt:
_, exc, _ = sys.exc_info()
print("In KeyboardInterrupt")
pass
except SystemExit:
_, exc, _ = sys.exc_info()
print("In SystemExit")
break
except Exception:
_, exc, _ = sys.exc_info()
self.msg("handler exception: %s" % str(exc))
if self.verbose:
self.msg(traceback.format_exc())
finally:
if startsock:
startsock.close()
# Close listen port
self.vmsg("Closing socket listening at %s:%s"
% (self.listen_host, self.listen_port))
lsock.close()
# HTTP handler with WebSocket upgrade support
class WSRequestHandler(SimpleHTTPRequestHandler):
def __init__(self, req, addr, only_upgrade=False):
self.only_upgrade = only_upgrade # only allow upgrades
SimpleHTTPRequestHandler.__init__(self, req, addr, object())
def do_GET(self):
if (self.headers.get('upgrade') and
self.headers.get('upgrade').lower() == 'websocket'):
if (self.headers.get('sec-websocket-key1') or
self.headers.get('websocket-key1')):
# For Hixie-76 read out the key hash
self.headers.__setitem__('key3', self.rfile.read(8))
# Just indicate that an WebSocket upgrade is needed
self.last_code = 101
self.last_message = "101 Switching Protocols"
elif self.only_upgrade:
# Normal web request responses are disabled
self.last_code = 405
self.last_message = "405 Method Not Allowed"
else:
SimpleHTTPRequestHandler.do_GET(self)
def send_response(self, code, message=None):
# Save the status code
self.last_code = code
SimpleHTTPRequestHandler.send_response(self, code, message)
def log_message(self, f, *args):
# Save instead of printing
self.last_message = f % args
|
apache-2.0
|
bealdav/OCB
|
addons/mail/wizard/__init__.py
|
438
|
1075
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import invite
import mail_compose_message
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
albertomurillo/ansible
|
lib/ansible/modules/network/cloudengine/ce_netstream_template.py
|
27
|
15739
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_netstream_template
version_added: "2.4"
short_description: Manages NetStream template configuration on HUAWEI CloudEngine switches.
description:
- Manages NetStream template configuration on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@QijunPan)
options:
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present', 'absent']
type:
description:
- Configure the type of netstream record.
required: true
choices: ['ip', 'vxlan']
record_name:
description:
- Configure the name of netstream record.
The value is a string of 1 to 32 case-insensitive characters.
match:
description:
- Configure flexible flow statistics template keywords.
choices: ['destination-address', 'destination-port', 'tos', 'protocol', 'source-address', 'source-port']
collect_counter:
description:
- Configure the number of packets and bytes that are included in the flexible flow statistics sent to NSC.
choices: ['bytes', 'packets']
collect_interface:
description:
- Configure the input or output interface that are included in the flexible flow statistics sent to NSC.
choices: ['input', 'output']
description:
description:
- Configure the description of netstream record.
The value is a string of 1 to 80 case-insensitive characters.
'''
EXAMPLES = '''
- name: netstream template module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Config ipv4 netstream record
ce_netstream_template:
state: present
type: ip
record_name: test
provider: "{{ cli }}"
- name: Undo ipv4 netstream record
ce_netstream_template:
state: absent
type: ip
record_name: test
provider: "{{ cli }}"
- name: Config ipv4 netstream record collect_counter
ce_netstream_template:
state: present
type: ip
record_name: test
collect_counter: bytes
provider: "{{ cli }}"
- name: Undo ipv4 netstream record collect_counter
ce_netstream_template:
state: absent
type: ip
record_name: test
collect_counter: bytes
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"record_name": "test",
"type": "ip",
"state": "present"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"record_name": "test",
"type": "ip"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["netstream record test ip"]
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_config, load_config
from ansible.module_utils.network.cloudengine.ce import ce_argument_spec
class NetstreamTemplate(object):
""" Manages netstream template configuration """
def __init__(self, **kwargs):
""" Netstream template module init """
# module
argument_spec = kwargs["argument_spec"]
self.spec = argument_spec
self.module = AnsibleModule(argument_spec=self.spec, supports_check_mode=True)
# netstream config
self.netstream_cfg = None
# module args
self.state = self.module.params['state'] or None
self.type = self.module.params['type'] or None
self.record_name = self.module.params['record_name'] or None
self.match = self.module.params['match'] or None
self.collect_counter = self.module.params['collect_counter'] or None
self.collect_interface = self.module.params['collect_interface'] or None
self.description = self.module.params['description'] or None
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def cli_load_config(self, commands):
""" Cli load configuration """
if not self.module.check_mode:
load_config(self.module, commands)
def cli_get_netstream_config(self):
""" Cli get netstream configuration """
if self.type == "ip":
cmd = "netstream record %s ip" % self.record_name
else:
cmd = "netstream record %s vxlan inner-ip" % self.record_name
flags = list()
regular = "| section include %s" % cmd
flags.append(regular)
self.netstream_cfg = get_config(self.module, flags)
def check_args(self):
""" Check module args """
if not self.type or not self.record_name:
self.module.fail_json(
msg='Error: Please input type and record_name.')
if self.record_name:
if len(self.record_name) < 1 or len(self.record_name) > 32:
self.module.fail_json(
msg='Error: The len of record_name is out of [1 - 32].')
if self.description:
if len(self.description) < 1 or len(self.description) > 80:
self.module.fail_json(
msg='Error: The len of description is out of [1 - 80].')
def get_proposed(self):
""" Get module proposed """
self.proposed["state"] = self.state
if self.type:
self.proposed["type"] = self.type
if self.record_name:
self.proposed["record_name"] = self.record_name
if self.match:
self.proposed["match"] = self.match
if self.collect_counter:
self.proposed["collect_counter"] = self.collect_counter
if self.collect_interface:
self.proposed["collect_interface"] = self.collect_interface
if self.description:
self.proposed["description"] = self.description
def get_existing(self):
""" Get existing configuration """
self.cli_get_netstream_config()
if self.netstream_cfg:
self.existing["type"] = self.type
self.existing["record_name"] = self.record_name
if self.description:
tmp_value = re.findall(r'description (.*)', self.netstream_cfg)
if tmp_value:
self.existing["description"] = tmp_value[0]
if self.match:
if self.type == "ip":
tmp_value = re.findall(r'match ip (.*)', self.netstream_cfg)
else:
tmp_value = re.findall(r'match inner-ip (.*)', self.netstream_cfg)
if tmp_value:
self.existing["match"] = tmp_value
if self.collect_counter:
tmp_value = re.findall(r'collect counter (.*)', self.netstream_cfg)
if tmp_value:
self.existing["collect_counter"] = tmp_value
if self.collect_interface:
tmp_value = re.findall(r'collect interface (.*)', self.netstream_cfg)
if tmp_value:
self.existing["collect_interface"] = tmp_value
def get_end_state(self):
""" Get end state """
self.cli_get_netstream_config()
if self.netstream_cfg:
self.end_state["type"] = self.type
self.end_state["record_name"] = self.record_name
if self.description:
tmp_value = re.findall(r'description (.*)', self.netstream_cfg)
if tmp_value:
self.end_state["description"] = tmp_value[0]
if self.match:
if self.type == "ip":
tmp_value = re.findall(r'match ip (.*)', self.netstream_cfg)
else:
tmp_value = re.findall(r'match inner-ip (.*)', self.netstream_cfg)
if tmp_value:
self.end_state["match"] = tmp_value
if self.collect_counter:
tmp_value = re.findall(r'collect counter (.*)', self.netstream_cfg)
if tmp_value:
self.end_state["collect_counter"] = tmp_value
if self.collect_interface:
tmp_value = re.findall(r'collect interface (.*)', self.netstream_cfg)
if tmp_value:
self.end_state["collect_interface"] = tmp_value
def present_netstream(self):
""" Present netstream configuration """
cmds = list()
need_create_record = False
if self.type == "ip":
cmd = "netstream record %s ip" % self.record_name
else:
cmd = "netstream record %s vxlan inner-ip" % self.record_name
cmds.append(cmd)
if not self.netstream_cfg:
self.updates_cmd.append(cmd)
need_create_record = True
if self.description:
cmd = "description %s" % self.description
if not self.netstream_cfg or cmd not in self.netstream_cfg:
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.match:
if self.type == "ip":
cmd = "match ip %s" % self.match
cfg = "match ip"
else:
cmd = "match inner-ip %s" % self.match
cfg = "match inner-ip"
if not self.netstream_cfg or cfg not in self.netstream_cfg or self.match != self.existing["match"][0]:
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.collect_counter:
cmd = "collect counter %s" % self.collect_counter
if not self.netstream_cfg or cmd not in self.netstream_cfg:
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.collect_interface:
cmd = "collect interface %s" % self.collect_interface
if not self.netstream_cfg or cmd not in self.netstream_cfg:
cmds.append(cmd)
self.updates_cmd.append(cmd)
if not need_create_record and len(cmds) == 1:
if self.type == "ip":
cmd = "netstream record %s ip" % self.record_name
else:
cmd = "netstream record %s vxlan inner-ip" % self.record_name
cmds.remove(cmd)
if cmds:
self.cli_load_config(cmds)
self.changed = True
def absent_netstream(self):
""" Absent netstream configuration """
cmds = list()
absent_netstream_attr = False
if not self.netstream_cfg:
return
if self.description or self.match or self.collect_counter or self.collect_interface:
absent_netstream_attr = True
if absent_netstream_attr:
if self.type == "ip":
cmd = "netstream record %s ip" % self.record_name
else:
cmd = "netstream record %s vxlan inner-ip" % self.record_name
cmds.append(cmd)
if self.description:
cfg = "description %s" % self.description
if self.netstream_cfg and cfg in self.netstream_cfg:
cmd = "undo description %s" % self.description
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.match:
if self.type == "ip":
cfg = "match ip %s" % self.match
else:
cfg = "match inner-ip %s" % self.match
if self.netstream_cfg and cfg in self.netstream_cfg:
if self.type == "ip":
cmd = "undo match ip %s" % self.match
else:
cmd = "undo match inner-ip %s" % self.match
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.collect_counter:
cfg = "collect counter %s" % self.collect_counter
if self.netstream_cfg and cfg in self.netstream_cfg:
cmd = "undo collect counter %s" % self.collect_counter
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.collect_interface:
cfg = "collect interface %s" % self.collect_interface
if self.netstream_cfg and cfg in self.netstream_cfg:
cmd = "undo collect interface %s" % self.collect_interface
cmds.append(cmd)
self.updates_cmd.append(cmd)
if len(cmds) > 1:
self.cli_load_config(cmds)
self.changed = True
else:
if self.type == "ip":
cmd = "undo netstream record %s ip" % self.record_name
else:
cmd = "undo netstream record %s vxlan inner-ip" % self.record_name
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def work(self):
""" Work function """
self.check_args()
self.get_proposed()
self.get_existing()
if self.state == "present":
self.present_netstream()
else:
self.absent_netstream()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
self.results['updates'] = self.updates_cmd
self.module.exit_json(**self.results)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
type=dict(choices=['ip', 'vxlan'], required=True),
record_name=dict(type='str'),
match=dict(choices=['destination-address', 'destination-port',
'tos', 'protocol', 'source-address', 'source-port']),
collect_counter=dict(choices=['bytes', 'packets']),
collect_interface=dict(choices=['input', 'output']),
description=dict(type='str')
)
argument_spec.update(ce_argument_spec)
module = NetstreamTemplate(argument_spec=argument_spec)
module.work()
if __name__ == '__main__':
main()
|
gpl-3.0
|
agentfog/qiime
|
qiime/sort.py
|
15
|
7369
|
#!/usr/bin/env python
# File created on 15 Feb 2011
from __future__ import division
import re
from operator import itemgetter
from numpy import array
from skbio.parse.sequences import parse_fasta
from qiime.parse import parse_mapping_file
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["Greg Caporaso", "Rob Knight", "Daniel McDonald",
"Yoshiki Vazquez Baeza"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Greg Caporaso"
__email__ = "[email protected]"
def _natsort_key(item, case_sensitivity=False):
"""Provides normalized version of item for sorting with digits.
From:
http://lists.canonical.org/pipermail/kragen-hacks/2005-October/000419.html
"""
item = str(item)
try:
chunks = re.split('(\d+(?:\.\d+)?)', item)
except TypeError:
# if item is a tuple or list (i.e., indexable, but not a string)
# work with the first element
chunks = re.split('(\d+(?:\.\d+)?)', item[0])
for ii in range(len(chunks)):
if chunks[ii] and chunks[ii][0] in '0123456789':
if '.' in chunks[ii]:
numtype = float
else:
numtype = int
# wrap in tuple with '0' to explicitly specify numbers come first
chunks[ii] = (0, numtype(chunks[ii]))
else:
chunks[ii] = (1, chunks[ii])
return (chunks, item)
def _natsort_key_case_insensitive(item):
"""Provides normalized version of item for sorting with digits.
From:
http://lists.canonical.org/pipermail/kragen-hacks/2005-October/000419.html
"""
# added the lower() call to allow for case-insensitive sorting
item = str(item).lower()
try:
chunks = re.split('(\d+(?:\.\d+)?)', item)
except TypeError:
# if item is a tuple or list (i.e., indexable, but not a string)
# work with the first element
chunks = re.split('(\d+(?:\.\d+)?)', item[0])
for ii in range(len(chunks)):
if chunks[ii] and chunks[ii][0] in '0123456789':
if '.' in chunks[ii]:
numtype = float
else:
numtype = int
# wrap in tuple with '0' to explicitly specify numbers come first
chunks[ii] = (0, numtype(chunks[ii]))
else:
chunks[ii] = (1, chunks[ii])
return (chunks, item)
def natsort(seq, case_sensitive=True):
"""Sort a sequence of text strings in a reasonable order.
From:
http://lists.canonical.org/pipermail/kragen-hacks/2005-October/000419.html
"""
if case_sensitive:
natsort_key = _natsort_key
else:
natsort_key = _natsort_key_case_insensitive
alist = list(seq)
alist.sort(key=natsort_key)
return alist
def natsort_case_insensitive(seq):
"""Sort a sequence of text strings in a reasonable order.
From:
http://lists.canonical.org/pipermail/kragen-hacks/2005-October/000419.html
"""
return natsort(seq, case_sensitive=False)
def sort_sample_ids_by_mapping_value(mapping_file, field, field_type_f=float):
""" Return list of sample ids sorted by ascending value from mapping file
"""
data, headers, comments = parse_mapping_file(mapping_file)
try:
column = headers.index(field)
except ValueError:
raise ValueError(
"Column (%s) not found in mapping file headers:\n %s" %
(field, ' '.join(headers)))
results = [(e[0], field_type_f(e[column])) for e in data]
results.sort(key=itemgetter(1))
return results
def sort_fasta_by_abundance(fasta_lines, fasta_out_f):
""" Sort seqs in fasta_line by abundance, write all seqs to fasta_out_f
Note that all sequences are written out, not just unique ones.
fasta_lines: input file handle (or similar object)
fasta_out_f: output file handle (or similar object)
** The current implementation works well for fairly large data sets,
(e.g., several combined 454 runs) but we may want to revisit if it
chokes on very large (e.g., Illumina) files. --Greg **
"""
seq_index = {}
count = 0
for seq_id, seq in parse_fasta(fasta_lines):
count += 1
try:
seq_index[seq].append(seq_id)
except KeyError:
seq_index[seq] = [seq_id]
seqs = []
for k, v in seq_index.items():
seqs.append((len(v), k, v))
del seq_index[k]
seqs.sort()
for count, seq, seq_ids in seqs[::-1]:
for seq_id in seq_ids:
fasta_out_f.write('>%s\n%s\n' % (seq_id, seq))
def sort_otu_table_by_mapping_field(otu_table_data,
mapping_file_data,
sort_field,
sort_f=natsort):
""" sort otu table based on the value of sort_field for each sample
"""
mapping_data, header_data, comments = mapping_file_data
mapping_field_index = header_data.index(sort_field)
sorted_sample_ids = [(e[mapping_field_index], e[0]) for e in mapping_data]
sorted_sample_ids = sort_f(sorted_sample_ids)
sorted_sample_ids = [e[1] for e in sorted_sample_ids]
return sort_otu_table(otu_table_data, sorted_sample_ids)
def sort_otu_table(otu_table, sorted_sample_ids):
"""Sort an OTU table by sorted sample ids"""
# sanity check
sorted_sample_ids_set = set(sorted_sample_ids)
if set(otu_table.ids()) - sorted_sample_ids_set:
raise KeyError("Sample IDs present in OTU table but not sorted sample id list: " +
' '.join(list(set(otu_table.ids()) - set(sorted_sample_ids))))
if len(sorted_sample_ids_set) != len(sorted_sample_ids):
raise ValueError(
"Duplicate sample IDs are present in sorted sample id list.")
# only keep the sample ids that are in the table
safe_sorted_sample_ids = []
for k in sorted_sample_ids:
if otu_table.exists(k):
safe_sorted_sample_ids.append(k)
sorted_table = otu_table.sort_order(safe_sorted_sample_ids)
return sorted_table
def signed_natsort(data):
"""sort an iterable considering the cases where elements are signed
data: list of tuples (with two strings as elements) or strings. When a
string is provided, the string will try to be type-casted to a float type,
if a tuple is provided, the first element will be used to sort the list. If
a dict is provided a sorted version of the keys will be returned.
output: sorted version of data
The elements will be assumed to be real numbers, if that assumption fails,
then the elements will be sorted using a natural sorting algorithm.
"""
# list is empty, do nothing
if not data:
return data
# deal with non-[tuple, dict, list] types of data
if not all([isinstance(element, tuple) or isinstance(element, list) or
isinstance(element, dict) for element in data]):
try:
return sorted(data, key=float)
except ValueError:
return natsort(data)
# deal with tuples type of data, the first element can be a real number or
# a string, the second element is a string that won't be accounted
try:
return sorted(data, key=lambda tup: float(tup[0]))
except ValueError:
return natsort(data)
|
gpl-2.0
|
dfc/beets
|
test/test_fetchart.py
|
25
|
2004
|
# This file is part of beets.
# Copyright 2015, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os
from test._common import unittest
from test.helper import TestHelper
class FetchartCliTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.load_plugins('fetchart')
self.config['fetchart']['cover_names'] = b'c\xc3\xb6ver.jpg'
self.config['art_filename'] = 'mycover'
self.album = self.add_album()
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def test_set_art_from_folder(self):
self.touch(b'c\xc3\xb6ver.jpg', dir=self.album.path, content='IMAGE')
self.run_command('fetchart')
cover_path = os.path.join(self.album.path, b'mycover.jpg')
self.album.load()
self.assertEqual(self.album['artpath'], cover_path)
with open(cover_path, 'r') as f:
self.assertEqual(f.read(), 'IMAGE')
def test_filesystem_does_not_pick_up_folder(self):
os.makedirs(os.path.join(self.album.path, b'mycover.jpg'))
self.run_command('fetchart')
self.album.load()
self.assertEqual(self.album['artpath'], None)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == b'__main__':
unittest.main(defaultTest='suite')
|
mit
|
lebrice/SimpleParsing
|
test/test_decoding.py
|
1
|
3455
|
import json
import logging
import textwrap
from collections import OrderedDict
from dataclasses import dataclass, fields
from pathlib import Path
from test.conftest import silent
from test.testutils import *
from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Type, Union
import pytest
import yaml
from simple_parsing import field, mutable_field
from simple_parsing.helpers import (
Serializable,
YamlSerializable,
dict_field,
list_field,
)
from simple_parsing.helpers.serialization.decoding import (
get_decoding_fn,
register_decoding_fn,
)
def test_encode_something(simple_attribute):
some_type, passed_value, expected_value = simple_attribute
@dataclass
class SomeClass(Serializable):
d: Dict[str, some_type] = dict_field()
l: List[Tuple[some_type, some_type]] = list_field()
t: Dict[str, Optional[some_type]] = dict_field()
# w: Dict[str, Union[some_type, int, str, None, str, None]] = dict_field()
b = SomeClass()
b.d.update({"hey": expected_value})
b.l.append((expected_value, expected_value))
b.t.update({"hey": None, "hey2": expected_value})
# b.w.update({
# "hey": None,
# "hey2": "heyo",
# "hey3": 1,
# "hey4": expected_value,
# })
assert SomeClass.loads(b.dumps()) == b
def test_typevar_decoding(simple_attribute):
@dataclass
class Item(Serializable, decode_into_subclasses=True):
name: str = "chair"
price: float = 399
stock: int = 10
@dataclass
class DiscountedItem(Item):
discount_factor: float = 0.5
I = TypeVar("I", bound=Item)
@dataclass
class Container(Serializable, Generic[I]):
items: List[I] = list_field()
chair = Item()
cheap_chair = DiscountedItem(name="Cheap chair")
c = Container(items=[chair, cheap_chair])
assert Container.loads(c.dumps()) == c
some_type, passed_value, expected_value = simple_attribute
@dataclass
class SomeClass(Serializable):
d: Dict[str, some_type] = dict_field()
l: List[Tuple[some_type, some_type]] = list_field()
t: Dict[str, Optional[some_type]] = dict_field()
# w: Dict[str, Union[some_type, int, str, None, str, None]] = dict_field()
b = SomeClass()
b.d.update({"hey": expected_value})
b.l.append((expected_value, expected_value))
b.t.update({"hey": None, "hey2": expected_value})
# b.w.update({
# "hey": None,
# "hey2": "heyo",
# "hey3": 1,
# "hey4": expected_value,
# })
assert SomeClass.loads(b.dumps()) == b
def test_super_nesting():
@dataclass
class Complicated(Serializable):
x: List[
List[List[Dict[int, Tuple[int, float, str, List[float]]]]]
] = list_field()
c = Complicated()
c.x = [[[{0: (2, 1.23, "bob", [1.2, 1.3])}]]]
assert Complicated.loads(c.dumps()) == c
assert c.dumps() == '{"x": [[[{"0": [2, 1.23, "bob", [1.2, 1.3]]}]]]}'
@pytest.mark.parametrize("some_type, encoded_value, expected_value", [
# (Tuple[int, float], json.loads(json.dumps([1, 2])), (1, 2.0)),
(List[Tuple[int, float]], json.loads(json.dumps([[1, 2], [3, 4]])), [(1, 2.0), (3, 4.0)]),
])
def test_decode_tuple(some_type: Type, encoded_value: Any, expected_value: Any):
decoding_function = get_decoding_fn(some_type)
actual = decoding_function(encoded_value)
assert actual == expected_value
|
mit
|
andykimpe/chromium-test-npapi
|
base/android/jni_generator/jni_generator.py
|
1
|
53217
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts native methods from a Java file and generates the JNI bindings.
If you change this, please run and update the tests."""
import collections
import errno
import optparse
import os
import re
import string
from string import Template
import subprocess
import sys
import textwrap
import zipfile
class ParseError(Exception):
"""Exception thrown when we can't parse the input file."""
def __init__(self, description, *context_lines):
Exception.__init__(self)
self.description = description
self.context_lines = context_lines
def __str__(self):
context = '\n'.join(self.context_lines)
return '***\nERROR: %s\n\n%s\n***' % (self.description, context)
class Param(object):
"""Describes a param for a method, either java or native."""
def __init__(self, **kwargs):
self.datatype = kwargs['datatype']
self.name = kwargs['name']
class NativeMethod(object):
"""Describes a C/C++ method that is called by Java code"""
def __init__(self, **kwargs):
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
if self.params:
assert type(self.params) is list
assert type(self.params[0]) is Param
if (self.params and
self.params[0].datatype == kwargs.get('ptr_type', 'int') and
self.params[0].name.startswith('native')):
self.type = 'method'
self.p0_type = self.params[0].name[len('native'):]
if kwargs.get('native_class_name'):
self.p0_type = kwargs['native_class_name']
else:
self.type = 'function'
self.method_id_var_name = kwargs.get('method_id_var_name', None)
class CalledByNative(object):
"""Describes a java method exported to c/c++"""
def __init__(self, **kwargs):
self.system_class = kwargs['system_class']
self.unchecked = kwargs['unchecked']
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
self.method_id_var_name = kwargs.get('method_id_var_name', None)
self.signature = kwargs.get('signature')
self.is_constructor = kwargs.get('is_constructor', False)
self.env_call = GetEnvCall(self.is_constructor, self.static,
self.return_type)
self.static_cast = GetStaticCastForReturnType(self.return_type)
class ConstantField(object):
def __init__(self, **kwargs):
self.name = kwargs['name']
self.value = kwargs['value']
def JavaDataTypeToC(java_type):
"""Returns a C datatype for the given java type."""
java_pod_type_map = {
'int': 'jint',
'byte': 'jbyte',
'char': 'jchar',
'short': 'jshort',
'boolean': 'jboolean',
'long': 'jlong',
'double': 'jdouble',
'float': 'jfloat',
}
java_type_map = {
'void': 'void',
'String': 'jstring',
'java/lang/String': 'jstring',
'java/lang/Class': 'jclass',
}
if java_type in java_pod_type_map:
return java_pod_type_map[java_type]
elif java_type in java_type_map:
return java_type_map[java_type]
elif java_type.endswith('[]'):
if java_type[:-2] in java_pod_type_map:
return java_pod_type_map[java_type[:-2]] + 'Array'
return 'jobjectArray'
elif java_type.startswith('Class'):
# Checking just the start of the name, rather than a direct comparison,
# in order to handle generics.
return 'jclass'
else:
return 'jobject'
def JavaDataTypeToCForCalledByNativeParam(java_type):
"""Returns a C datatype to be when calling from native."""
if java_type == 'int':
return 'JniIntWrapper'
else:
return JavaDataTypeToC(java_type)
def JavaReturnValueToC(java_type):
"""Returns a valid C return value for the given java type."""
java_pod_type_map = {
'int': '0',
'byte': '0',
'char': '0',
'short': '0',
'boolean': 'false',
'long': '0',
'double': '0',
'float': '0',
'void': ''
}
return java_pod_type_map.get(java_type, 'NULL')
class JniParams(object):
_imports = []
_fully_qualified_class = ''
_package = ''
_inner_classes = []
_remappings = []
_implicit_imports = []
@staticmethod
def SetFullyQualifiedClass(fully_qualified_class):
JniParams._fully_qualified_class = 'L' + fully_qualified_class
JniParams._package = '/'.join(fully_qualified_class.split('/')[:-1])
@staticmethod
def AddAdditionalImport(class_name):
assert class_name.endswith('.class')
raw_class_name = class_name[:-len('.class')]
if '.' in raw_class_name:
raise SyntaxError('%s cannot be used in @JNIAdditionalImport. '
'Only import unqualified outer classes.' % class_name)
new_import = 'L%s/%s' % (JniParams._package, raw_class_name)
if new_import in JniParams._imports:
raise SyntaxError('Do not use JNIAdditionalImport on an already '
'imported class: %s' % (new_import.replace('/', '.')))
JniParams._imports += [new_import]
@staticmethod
def ExtractImportsAndInnerClasses(contents):
if not JniParams._package:
raise RuntimeError('SetFullyQualifiedClass must be called before '
'ExtractImportsAndInnerClasses')
contents = contents.replace('\n', '')
re_import = re.compile(r'import.*?(?P<class>\S*?);')
for match in re.finditer(re_import, contents):
JniParams._imports += ['L' + match.group('class').replace('.', '/')]
re_inner = re.compile(r'(class|interface)\s+?(?P<name>\w+?)\W')
for match in re.finditer(re_inner, contents):
inner = match.group('name')
if not JniParams._fully_qualified_class.endswith(inner):
JniParams._inner_classes += [JniParams._fully_qualified_class + '$' +
inner]
re_additional_imports = re.compile(
r'@JNIAdditionalImport\(\s*{?(?P<class_names>.*?)}?\s*\)')
for match in re.finditer(re_additional_imports, contents):
for class_name in match.group('class_names').split(','):
JniParams.AddAdditionalImport(class_name.strip())
@staticmethod
def ParseJavaPSignature(signature_line):
prefix = 'Signature: '
return '"%s"' % signature_line[signature_line.index(prefix) + len(prefix):]
@staticmethod
def JavaToJni(param):
"""Converts a java param into a JNI signature type."""
pod_param_map = {
'int': 'I',
'boolean': 'Z',
'char': 'C',
'short': 'S',
'long': 'J',
'double': 'D',
'float': 'F',
'byte': 'B',
'void': 'V',
}
object_param_list = [
'Ljava/lang/Boolean',
'Ljava/lang/Integer',
'Ljava/lang/Long',
'Ljava/lang/Object',
'Ljava/lang/String',
'Ljava/lang/Class',
]
prefix = ''
# Array?
while param[-2:] == '[]':
prefix += '['
param = param[:-2]
# Generic?
if '<' in param:
param = param[:param.index('<')]
if param in pod_param_map:
return prefix + pod_param_map[param]
if '/' in param:
# Coming from javap, use the fully qualified param directly.
return prefix + 'L' + JniParams.RemapClassName(param) + ';'
for qualified_name in (object_param_list +
[JniParams._fully_qualified_class] +
JniParams._inner_classes):
if (qualified_name.endswith('/' + param) or
qualified_name.endswith('$' + param.replace('.', '$')) or
qualified_name == 'L' + param):
return prefix + JniParams.RemapClassName(qualified_name) + ';'
# Is it from an import? (e.g. referecing Class from import pkg.Class;
# note that referencing an inner class Inner from import pkg.Class.Inner
# is not supported).
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + param):
# Ensure it's not an inner class.
components = qualified_name.split('/')
if len(components) > 2 and components[-2][0].isupper():
raise SyntaxError('Inner class (%s) can not be imported '
'and used by JNI (%s). Please import the outer '
'class and use Outer.Inner instead.' %
(qualified_name, param))
return prefix + JniParams.RemapClassName(qualified_name) + ';'
# Is it an inner class from an outer class import? (e.g. referencing
# Class.Inner from import pkg.Class).
if '.' in param:
components = param.split('.')
outer = '/'.join(components[:-1])
inner = components[-1]
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + outer):
return (prefix + JniParams.RemapClassName(qualified_name) +
'$' + inner + ';')
raise SyntaxError('Inner class (%s) can not be '
'used directly by JNI. Please import the outer '
'class, probably:\n'
'import %s.%s;' %
(param, JniParams._package.replace('/', '.'),
outer.replace('/', '.')))
JniParams._CheckImplicitImports(param)
# Type not found, falling back to same package as this class.
return (prefix + 'L' +
JniParams.RemapClassName(JniParams._package + '/' + param) + ';')
@staticmethod
def _CheckImplicitImports(param):
# Ensure implicit imports, such as java.lang.*, are not being treated
# as being in the same package.
if not JniParams._implicit_imports:
# This file was generated from android.jar and lists
# all classes that are implicitly imported.
with file(os.path.join(os.path.dirname(sys.argv[0]),
'android_jar.classes'), 'r') as f:
JniParams._implicit_imports = f.readlines()
for implicit_import in JniParams._implicit_imports:
implicit_import = implicit_import.strip().replace('.class', '')
implicit_import = implicit_import.replace('/', '.')
if implicit_import.endswith('.' + param):
raise SyntaxError('Ambiguous class (%s) can not be used directly '
'by JNI.\nPlease import it, probably:\n\n'
'import %s;' %
(param, implicit_import))
@staticmethod
def Signature(params, returns, wrap):
"""Returns the JNI signature for the given datatypes."""
items = ['(']
items += [JniParams.JavaToJni(param.datatype) for param in params]
items += [')']
items += [JniParams.JavaToJni(returns)]
if wrap:
return '\n' + '\n'.join(['"' + item + '"' for item in items])
else:
return '"' + ''.join(items) + '"'
@staticmethod
def Parse(params):
"""Parses the params into a list of Param objects."""
if not params:
return []
ret = []
for p in [p.strip() for p in params.split(',')]:
items = p.split(' ')
if 'final' in items:
items.remove('final')
param = Param(
datatype=items[0],
name=(items[1] if len(items) > 1 else 'p%s' % len(ret)),
)
ret += [param]
return ret
@staticmethod
def RemapClassName(class_name):
"""Remaps class names using the jarjar mapping table."""
for old, new in JniParams._remappings:
if old.endswith('**') and old[:-2] in class_name:
return class_name.replace(old[:-2], new, 1)
if '*' not in old and class_name.endswith(old):
return class_name.replace(old, new, 1)
return class_name
@staticmethod
def SetJarJarMappings(mappings):
"""Parse jarjar mappings from a string."""
JniParams._remappings = []
for line in mappings.splitlines():
rule = line.split()
if rule[0] != 'rule':
continue
_, src, dest = rule
src = src.replace('.', '/')
dest = dest.replace('.', '/')
if src.endswith('**'):
src_real_name = src[:-2]
else:
assert not '*' in src
src_real_name = src
if dest.endswith('@0'):
JniParams._remappings.append((src, dest[:-2] + src_real_name))
elif dest.endswith('@1'):
assert '**' in src
JniParams._remappings.append((src, dest[:-2]))
else:
assert not '@' in dest
JniParams._remappings.append((src, dest))
def ExtractJNINamespace(contents):
re_jni_namespace = re.compile('.*?@JNINamespace\("(.*?)"\)')
m = re.findall(re_jni_namespace, contents)
if not m:
return ''
return m[0]
def ExtractFullyQualifiedJavaClassName(java_file_name, contents):
re_package = re.compile('.*?package (.*?);')
matches = re.findall(re_package, contents)
if not matches:
raise SyntaxError('Unable to find "package" line in %s' % java_file_name)
return (matches[0].replace('.', '/') + '/' +
os.path.splitext(os.path.basename(java_file_name))[0])
def ExtractNatives(contents, ptr_type):
"""Returns a list of dict containing information about a native method."""
contents = contents.replace('\n', '')
natives = []
re_native = re.compile(r'(@NativeClassQualifiedName'
'\(\"(?P<native_class_name>.*?)\"\)\s+)?'
'(@NativeCall(\(\"(?P<java_class_name>.*?)\"\))\s+)?'
'(?P<qualifiers>\w+\s\w+|\w+|\s+)\s*native '
'(?P<return_type>\S*) '
'(?P<name>native\w+)\((?P<params>.*?)\);')
for match in re.finditer(re_native, contents):
native = NativeMethod(
static='static' in match.group('qualifiers'),
java_class_name=match.group('java_class_name'),
native_class_name=match.group('native_class_name'),
return_type=match.group('return_type'),
name=match.group('name').replace('native', ''),
params=JniParams.Parse(match.group('params')),
ptr_type=ptr_type)
natives += [native]
return natives
def GetStaticCastForReturnType(return_type):
type_map = { 'String' : 'jstring',
'java/lang/String' : 'jstring',
'boolean[]': 'jbooleanArray',
'byte[]': 'jbyteArray',
'char[]': 'jcharArray',
'short[]': 'jshortArray',
'int[]': 'jintArray',
'long[]': 'jlongArray',
'float[]': 'jfloatArray',
'double[]': 'jdoubleArray' }
ret = type_map.get(return_type, None)
if ret:
return ret
if return_type.endswith('[]'):
return 'jobjectArray'
return None
def GetEnvCall(is_constructor, is_static, return_type):
"""Maps the types availabe via env->Call__Method."""
if is_constructor:
return 'NewObject'
env_call_map = {'boolean': 'Boolean',
'byte': 'Byte',
'char': 'Char',
'short': 'Short',
'int': 'Int',
'long': 'Long',
'float': 'Float',
'void': 'Void',
'double': 'Double',
'Object': 'Object',
}
call = env_call_map.get(return_type, 'Object')
if is_static:
call = 'Static' + call
return 'Call' + call + 'Method'
def GetMangledParam(datatype):
"""Returns a mangled identifier for the datatype."""
if len(datatype) <= 2:
return datatype.replace('[', 'A')
ret = ''
for i in range(1, len(datatype)):
c = datatype[i]
if c == '[':
ret += 'A'
elif c.isupper() or datatype[i - 1] in ['/', 'L']:
ret += c.upper()
return ret
def GetMangledMethodName(name, params, return_type):
"""Returns a mangled method name for the given signature.
The returned name can be used as a C identifier and will be unique for all
valid overloads of the same method.
Args:
name: string.
params: list of Param.
return_type: string.
Returns:
A mangled name.
"""
mangled_items = []
for datatype in [return_type] + [x.datatype for x in params]:
mangled_items += [GetMangledParam(JniParams.JavaToJni(datatype))]
mangled_name = name + '_'.join(mangled_items)
assert re.match(r'[0-9a-zA-Z_]+', mangled_name)
return mangled_name
def MangleCalledByNatives(called_by_natives):
"""Mangles all the overloads from the call_by_natives list."""
method_counts = collections.defaultdict(
lambda: collections.defaultdict(lambda: 0))
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
name = called_by_native.name
method_counts[java_class_name][name] += 1
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
method_name = called_by_native.name
method_id_var_name = method_name
if method_counts[java_class_name][method_name] > 1:
method_id_var_name = GetMangledMethodName(method_name,
called_by_native.params,
called_by_native.return_type)
called_by_native.method_id_var_name = method_id_var_name
return called_by_natives
# Regex to match the JNI return types that should be included in a
# ScopedJavaLocalRef.
RE_SCOPED_JNI_RETURN_TYPES = re.compile('jobject|jclass|jstring|.*Array')
# Regex to match a string like "@CalledByNative public void foo(int bar)".
RE_CALLED_BY_NATIVE = re.compile(
'@CalledByNative(?P<Unchecked>(Unchecked)*?)(?:\("(?P<annotation>.*)"\))?'
'\s+(?P<prefix>[\w ]*?)'
'\s*(?P<return_type>\S+?)'
'\s+(?P<name>\w+)'
'\s*\((?P<params>[^\)]*)\)')
def ExtractCalledByNatives(contents):
"""Parses all methods annotated with @CalledByNative.
Args:
contents: the contents of the java file.
Returns:
A list of dict with information about the annotated methods.
TODO(bulach): return a CalledByNative object.
Raises:
ParseError: if unable to parse.
"""
called_by_natives = []
for match in re.finditer(RE_CALLED_BY_NATIVE, contents):
called_by_natives += [CalledByNative(
system_class=False,
unchecked='Unchecked' in match.group('Unchecked'),
static='static' in match.group('prefix'),
java_class_name=match.group('annotation') or '',
return_type=match.group('return_type'),
name=match.group('name'),
params=JniParams.Parse(match.group('params')))]
# Check for any @CalledByNative occurrences that weren't matched.
unmatched_lines = re.sub(RE_CALLED_BY_NATIVE, '', contents).split('\n')
for line1, line2 in zip(unmatched_lines, unmatched_lines[1:]):
if '@CalledByNative' in line1:
raise ParseError('could not parse @CalledByNative method signature',
line1, line2)
return MangleCalledByNatives(called_by_natives)
class JNIFromJavaP(object):
"""Uses 'javap' to parse a .class file and generate the JNI header file."""
def __init__(self, contents, options):
self.contents = contents
self.namespace = options.namespace
for line in contents:
class_name = re.match(
'.*?(public).*?(class|interface) (?P<class_name>\S+?)( |\Z)',
line)
if class_name:
self.fully_qualified_class = class_name.group('class_name')
break
self.fully_qualified_class = self.fully_qualified_class.replace('.', '/')
# Java 7's javap includes type parameters in output, like HashSet<T>. Strip
# away the <...> and use the raw class name that Java 6 would've given us.
self.fully_qualified_class = self.fully_qualified_class.split('<', 1)[0]
JniParams.SetFullyQualifiedClass(self.fully_qualified_class)
self.java_class_name = self.fully_qualified_class.split('/')[-1]
if not self.namespace:
self.namespace = 'JNI_' + self.java_class_name
re_method = re.compile('(?P<prefix>.*?)(?P<return_type>\S+?) (?P<name>\w+?)'
'\((?P<params>.*?)\)')
self.called_by_natives = []
for lineno, content in enumerate(contents[2:], 2):
match = re.match(re_method, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static='static' in match.group('prefix'),
java_class_name='',
return_type=match.group('return_type').replace('.', '/'),
name=match.group('name'),
params=JniParams.Parse(match.group('params').replace('.', '/')),
signature=JniParams.ParseJavaPSignature(contents[lineno + 1]))]
re_constructor = re.compile('(.*?)public ' +
self.fully_qualified_class.replace('/', '.') +
'\((?P<params>.*?)\)')
for lineno, content in enumerate(contents[2:], 2):
match = re.match(re_constructor, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static=False,
java_class_name='',
return_type=self.fully_qualified_class,
name='Constructor',
params=JniParams.Parse(match.group('params').replace('.', '/')),
signature=JniParams.ParseJavaPSignature(contents[lineno + 1]),
is_constructor=True)]
self.called_by_natives = MangleCalledByNatives(self.called_by_natives)
self.constant_fields = []
re_constant_field = re.compile('.*?public static final int (?P<name>.*?);')
re_constant_field_value = re.compile(
'.*?Constant(Value| value): int (?P<value>(-*[0-9]+)?)')
for lineno, content in enumerate(contents[2:], 2):
match = re.match(re_constant_field, content)
if not match:
continue
value = re.match(re_constant_field_value, contents[lineno + 2])
if not value:
value = re.match(re_constant_field_value, contents[lineno + 3])
if value:
self.constant_fields.append(
ConstantField(name=match.group('name'),
value=value.group('value')))
self.inl_header_file_generator = InlHeaderFileGenerator(
self.namespace, self.fully_qualified_class, [],
self.called_by_natives, self.constant_fields, options)
def GetContent(self):
return self.inl_header_file_generator.GetContent()
@staticmethod
def CreateFromClass(class_file, options):
class_name = os.path.splitext(os.path.basename(class_file))[0]
p = subprocess.Popen(args=[options.javap, '-c', '-verbose',
'-s', class_name],
cwd=os.path.dirname(class_file),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate()
jni_from_javap = JNIFromJavaP(stdout.split('\n'), options)
return jni_from_javap
class JNIFromJavaSource(object):
"""Uses the given java source file to generate the JNI header file."""
# Match single line comments, multiline comments, character literals, and
# double-quoted strings.
_comment_remover_regex = re.compile(
r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
re.DOTALL | re.MULTILINE)
def __init__(self, contents, fully_qualified_class, options):
contents = self._RemoveComments(contents)
JniParams.SetFullyQualifiedClass(fully_qualified_class)
JniParams.ExtractImportsAndInnerClasses(contents)
jni_namespace = ExtractJNINamespace(contents) or options.namespace
natives = ExtractNatives(contents, options.ptr_type)
called_by_natives = ExtractCalledByNatives(contents)
if len(natives) == 0 and len(called_by_natives) == 0:
raise SyntaxError('Unable to find any JNI methods for %s.' %
fully_qualified_class)
inl_header_file_generator = InlHeaderFileGenerator(
jni_namespace, fully_qualified_class, natives, called_by_natives,
[], options)
self.content = inl_header_file_generator.GetContent()
@classmethod
def _RemoveComments(cls, contents):
# We need to support both inline and block comments, and we need to handle
# strings that contain '//' or '/*'.
# TODO(bulach): This is a bit hacky. It would be cleaner to use a real Java
# parser. Maybe we could ditch JNIFromJavaSource and just always use
# JNIFromJavaP; or maybe we could rewrite this script in Java and use APT.
# http://code.google.com/p/chromium/issues/detail?id=138941
def replacer(match):
# Replace matches that are comments with nothing; return literals/strings
# unchanged.
s = match.group(0)
if s.startswith('/'):
return ''
else:
return s
return cls._comment_remover_regex.sub(replacer, contents)
def GetContent(self):
return self.content
@staticmethod
def CreateFromFile(java_file_name, options):
contents = file(java_file_name).read()
fully_qualified_class = ExtractFullyQualifiedJavaClassName(java_file_name,
contents)
return JNIFromJavaSource(contents, fully_qualified_class, options)
class InlHeaderFileGenerator(object):
"""Generates an inline header file for JNI integration."""
def __init__(self, namespace, fully_qualified_class, natives,
called_by_natives, constant_fields, options):
self.namespace = namespace
self.fully_qualified_class = fully_qualified_class
self.class_name = self.fully_qualified_class.split('/')[-1]
self.natives = natives
self.called_by_natives = called_by_natives
self.header_guard = fully_qualified_class.replace('/', '_') + '_JNI'
self.constant_fields = constant_fields
self.options = options
self.init_native = self.ExtractInitNative(options)
def ExtractInitNative(self, options):
for native in self.natives:
if options.jni_init_native_name == 'native' + native.name:
self.natives.remove(native)
return native
return None
def GetContent(self):
"""Returns the content of the JNI binding file."""
template = Template("""\
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// ${SCRIPT_NAME}
// For
// ${FULLY_QUALIFIED_CLASS}
#ifndef ${HEADER_GUARD}
#define ${HEADER_GUARD}
#include <jni.h>
${INCLUDES}
#include "base/android/jni_int_wrapper.h"
// Step 1: forward declarations.
namespace {
$CLASS_PATH_DEFINITIONS
$METHOD_ID_DEFINITIONS
} // namespace
$OPEN_NAMESPACE
$FORWARD_DECLARATIONS
$CONSTANT_FIELDS
// Step 2: method stubs.
$METHOD_STUBS
// Step 3: RegisterNatives.
$JNI_NATIVE_METHODS
$REGISTER_NATIVES
$CLOSE_NAMESPACE
$JNI_REGISTER_NATIVES
#endif // ${HEADER_GUARD}
""")
values = {
'SCRIPT_NAME': self.options.script_name,
'FULLY_QUALIFIED_CLASS': self.fully_qualified_class,
'CLASS_PATH_DEFINITIONS': self.GetClassPathDefinitionsString(),
'METHOD_ID_DEFINITIONS': self.GetMethodIDDefinitionsString(),
'FORWARD_DECLARATIONS': self.GetForwardDeclarationsString(),
'CONSTANT_FIELDS': self.GetConstantFieldsString(),
'METHOD_STUBS': self.GetMethodStubsString(),
'OPEN_NAMESPACE': self.GetOpenNamespaceString(),
'JNI_NATIVE_METHODS': self.GetJNINativeMethodsString(),
'REGISTER_NATIVES': self.GetRegisterNativesString(),
'CLOSE_NAMESPACE': self.GetCloseNamespaceString(),
'HEADER_GUARD': self.header_guard,
'INCLUDES': self.GetIncludesString(),
'JNI_REGISTER_NATIVES': self.GetJNIRegisterNativesString()
}
return WrapOutput(template.substitute(values))
def GetClassPathDefinitionsString(self):
ret = []
ret += [self.GetClassPathDefinitions()]
return '\n'.join(ret)
def GetMethodIDDefinitionsString(self):
"""Returns the definition of method ids for the called by native methods."""
if not self.options.eager_called_by_natives:
return ''
template = Template("""\
jmethodID g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = NULL;""")
ret = []
for called_by_native in self.called_by_natives:
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetForwardDeclarationsString(self):
ret = []
for native in self.natives:
if native.type != 'method':
ret += [self.GetForwardDeclaration(native)]
return '\n'.join(ret)
def GetConstantFieldsString(self):
if not self.constant_fields:
return ''
ret = ['enum Java_%s_constant_fields {' % self.class_name]
for c in self.constant_fields:
ret += [' %s = %s,' % (c.name, c.value)]
ret += ['};']
return '\n'.join(ret)
def GetMethodStubsString(self):
"""Returns the code corresponding to method stubs."""
ret = []
for native in self.natives:
if native.type == 'method':
ret += [self.GetNativeMethodStubString(native)]
if self.options.eager_called_by_natives:
ret += self.GetEagerCalledByNativeMethodStubs()
else:
ret += self.GetLazyCalledByNativeMethodStubs()
return '\n'.join(ret)
def GetLazyCalledByNativeMethodStubs(self):
return [self.GetLazyCalledByNativeMethodStub(called_by_native)
for called_by_native in self.called_by_natives]
def GetEagerCalledByNativeMethodStubs(self):
ret = []
if self.called_by_natives:
ret += ['namespace {']
for called_by_native in self.called_by_natives:
ret += [self.GetEagerCalledByNativeMethodStub(called_by_native)]
ret += ['} // namespace']
return ret
def GetIncludesString(self):
if not self.options.includes:
return ''
includes = self.options.includes.split(',')
return '\n'.join('#include "%s"' % x for x in includes)
def GetKMethodsString(self, clazz):
ret = []
for native in self.natives:
if (native.java_class_name == clazz or
(not native.java_class_name and clazz == self.class_name)):
ret += [self.GetKMethodArrayEntry(native)]
return '\n'.join(ret)
def SubstituteNativeMethods(self, template):
"""Substitutes JAVA_CLASS and KMETHODS in the provided template."""
ret = []
all_classes = self.GetUniqueClasses(self.natives)
all_classes[self.class_name] = self.fully_qualified_class
for clazz in all_classes:
kmethods = self.GetKMethodsString(clazz)
if kmethods:
values = {'JAVA_CLASS': clazz,
'KMETHODS': kmethods}
ret += [template.substitute(values)]
if not ret: return ''
return '\n' + '\n'.join(ret)
def GetJNINativeMethodsString(self):
"""Returns the implementation of the array of native methods."""
template = Template("""\
static const JNINativeMethod kMethods${JAVA_CLASS}[] = {
${KMETHODS}
};
""")
return self.SubstituteNativeMethods(template)
def GetRegisterCalledByNativesImplString(self):
"""Returns the code for registering the called by native methods."""
if not self.options.eager_called_by_natives:
return ''
template = Template("""\
g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = ${GET_METHOD_ID_IMPL}
if (g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} == NULL) {
return false;
}
""")
ret = []
for called_by_native in self.called_by_natives:
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native),
}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetRegisterNativesString(self):
"""Returns the code for RegisterNatives."""
template = Template("""\
${REGISTER_NATIVES_SIGNATURE} {
${CLASSES}
${NATIVES}
${CALLED_BY_NATIVES}
return true;
}
""")
signature = 'static bool RegisterNativesImpl(JNIEnv* env'
if self.init_native:
signature += ', jclass clazz)'
else:
signature += ')'
natives = self.GetRegisterNativesImplString()
called_by_natives = self.GetRegisterCalledByNativesImplString()
values = {'REGISTER_NATIVES_SIGNATURE': signature,
'CLASSES': self.GetFindClasses(),
'NATIVES': natives,
'CALLED_BY_NATIVES': called_by_natives,
}
return template.substitute(values)
def GetRegisterNativesImplString(self):
"""Returns the shared implementation for RegisterNatives."""
template = Template("""\
const int kMethods${JAVA_CLASS}Size = arraysize(kMethods${JAVA_CLASS});
if (env->RegisterNatives(g_${JAVA_CLASS}_clazz,
kMethods${JAVA_CLASS},
kMethods${JAVA_CLASS}Size) < 0) {
jni_generator::HandleRegistrationError(
env, g_${JAVA_CLASS}_clazz, __FILE__);
return false;
}
""")
return self.SubstituteNativeMethods(template)
def GetJNIRegisterNativesString(self):
"""Returns the implementation for the JNI registration of native methods."""
if not self.init_native:
return ''
template = Template("""\
extern "C" JNIEXPORT bool JNICALL
Java_${FULLY_QUALIFIED_CLASS}_${INIT_NATIVE_NAME}(JNIEnv* env, jclass clazz) {
return ${NAMESPACE}RegisterNativesImpl(env, clazz);
}
""")
fully_qualified_class = self.fully_qualified_class.replace('/', '_')
namespace = ''
if self.namespace:
namespace = self.namespace + '::'
values = {'FULLY_QUALIFIED_CLASS': fully_qualified_class,
'INIT_NATIVE_NAME': 'native' + self.init_native.name,
'NAMESPACE': namespace,
'REGISTER_NATIVES_IMPL': self.GetRegisterNativesImplString()
}
return template.substitute(values)
def GetOpenNamespaceString(self):
if self.namespace:
all_namespaces = ['namespace %s {' % ns
for ns in self.namespace.split('::')]
return '\n'.join(all_namespaces)
return ''
def GetCloseNamespaceString(self):
if self.namespace:
all_namespaces = ['} // namespace %s' % ns
for ns in self.namespace.split('::')]
all_namespaces.reverse()
return '\n'.join(all_namespaces) + '\n'
return ''
def GetJNIFirstParam(self, native):
ret = []
if native.type == 'method':
ret = ['jobject jcaller']
elif native.type == 'function':
if native.static:
ret = ['jclass jcaller']
else:
ret = ['jobject jcaller']
return ret
def GetParamsInDeclaration(self, native):
"""Returns the params for the stub declaration.
Args:
native: the native dictionary describing the method.
Returns:
A string containing the params.
"""
return ',\n '.join(self.GetJNIFirstParam(native) +
[JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in native.params])
def GetCalledByNativeParamsInDeclaration(self, called_by_native):
return ',\n '.join([
JavaDataTypeToCForCalledByNativeParam(param.datatype) + ' ' +
param.name
for param in called_by_native.params])
def GetForwardDeclaration(self, native):
template = Template("""
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS});
""")
values = {'RETURN': JavaDataTypeToC(native.return_type),
'NAME': native.name,
'PARAMS': self.GetParamsInDeclaration(native)}
return template.substitute(values)
def GetNativeMethodStubString(self, native):
"""Returns stubs for native methods."""
template = Template("""\
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS_IN_DECLARATION}) {
${P0_TYPE}* native = reinterpret_cast<${P0_TYPE}*>(${PARAM0_NAME});
CHECK_NATIVE_PTR(env, jcaller, native, "${NAME}"${OPTIONAL_ERROR_RETURN});
return native->${NAME}(${PARAMS_IN_CALL})${POST_CALL};
}
""")
params = []
if not self.options.pure_native_methods:
params = ['env', 'jcaller']
params_in_call = ', '.join(params + [p.name for p in native.params[1:]])
return_type = JavaDataTypeToC(native.return_type)
optional_error_return = JavaReturnValueToC(native.return_type)
if optional_error_return:
optional_error_return = ', ' + optional_error_return
post_call = ''
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
post_call = '.Release()'
values = {
'RETURN': return_type,
'OPTIONAL_ERROR_RETURN': optional_error_return,
'NAME': native.name,
'PARAMS_IN_DECLARATION': self.GetParamsInDeclaration(native),
'PARAM0_NAME': native.params[0].name,
'P0_TYPE': native.p0_type,
'PARAMS_IN_CALL': params_in_call,
'POST_CALL': post_call
}
return template.substitute(values)
def GetArgument(self, param):
return ('as_jint(' + param.name + ')'
if param.datatype == 'int' else param.name)
def GetArgumentsInCall(self, params):
"""Return a string of arguments to call from native into Java"""
return [self.GetArgument(p) for p in params]
def GetCalledByNativeValues(self, called_by_native):
"""Fills in necessary values for the CalledByNative methods."""
if called_by_native.static or called_by_native.is_constructor:
first_param_in_declaration = ''
first_param_in_call = ('g_%s_clazz' %
(called_by_native.java_class_name or
self.class_name))
else:
first_param_in_declaration = ', jobject obj'
first_param_in_call = 'obj'
params_in_declaration = self.GetCalledByNativeParamsInDeclaration(
called_by_native)
if params_in_declaration:
params_in_declaration = ', ' + params_in_declaration
params_in_call = ', '.join(self.GetArgumentsInCall(called_by_native.params))
if params_in_call:
params_in_call = ', ' + params_in_call
pre_call = ''
post_call = ''
if called_by_native.static_cast:
pre_call = 'static_cast<%s>(' % called_by_native.static_cast
post_call = ')'
check_exception = ''
if not called_by_native.unchecked:
check_exception = 'jni_generator::CheckException(env);'
return_type = JavaDataTypeToC(called_by_native.return_type)
optional_error_return = JavaReturnValueToC(called_by_native.return_type)
if optional_error_return:
optional_error_return = ', ' + optional_error_return
return_declaration = ''
return_clause = ''
if return_type != 'void':
pre_call = ' ' + pre_call
return_declaration = return_type + ' ret ='
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
return_type = 'base::android::ScopedJavaLocalRef<' + return_type + '>'
return_clause = 'return ' + return_type + '(env, ret);'
else:
return_clause = 'return ret;'
return {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'RETURN_TYPE': return_type,
'OPTIONAL_ERROR_RETURN': optional_error_return,
'RETURN_DECLARATION': return_declaration,
'RETURN_CLAUSE': return_clause,
'FIRST_PARAM_IN_DECLARATION': first_param_in_declaration,
'PARAMS_IN_DECLARATION': params_in_declaration,
'PRE_CALL': pre_call,
'POST_CALL': post_call,
'ENV_CALL': called_by_native.env_call,
'FIRST_PARAM_IN_CALL': first_param_in_call,
'PARAMS_IN_CALL': params_in_call,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'CHECK_EXCEPTION': check_exception,
'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native)
}
def GetEagerCalledByNativeMethodStub(self, called_by_native):
"""Returns the implementation of the called by native method."""
template = Template("""
static ${RETURN_TYPE} ${METHOD_ID_VAR_NAME}(\
JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION}) {
${RETURN_DECLARATION}${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL},
g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}${PARAMS_IN_CALL})${POST_CALL};
${RETURN_CLAUSE}
}""")
values = self.GetCalledByNativeValues(called_by_native)
return template.substitute(values)
def GetLazyCalledByNativeMethodStub(self, called_by_native):
"""Returns a string."""
function_signature_template = Template("""\
static ${RETURN_TYPE} Java_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}(\
JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION})""")
function_header_template = Template("""\
${FUNCTION_SIGNATURE} {""")
function_header_with_unused_template = Template("""\
${FUNCTION_SIGNATURE} __attribute__ ((unused));
${FUNCTION_SIGNATURE} {""")
template = Template("""
static base::subtle::AtomicWord g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = 0;
${FUNCTION_HEADER}
/* Must call RegisterNativesImpl() */
CHECK_CLAZZ(env, ${FIRST_PARAM_IN_CALL},
g_${JAVA_CLASS}_clazz${OPTIONAL_ERROR_RETURN});
jmethodID method_id =
${GET_METHOD_ID_IMPL}
${RETURN_DECLARATION}
${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL},
method_id${PARAMS_IN_CALL})${POST_CALL};
${CHECK_EXCEPTION}
${RETURN_CLAUSE}
}""")
values = self.GetCalledByNativeValues(called_by_native)
values['FUNCTION_SIGNATURE'] = (
function_signature_template.substitute(values))
if called_by_native.system_class:
values['FUNCTION_HEADER'] = (
function_header_with_unused_template.substitute(values))
else:
values['FUNCTION_HEADER'] = function_header_template.substitute(values)
return template.substitute(values)
def GetKMethodArrayEntry(self, native):
template = Template("""\
{ "native${NAME}", ${JNI_SIGNATURE}, reinterpret_cast<void*>(${NAME}) },""")
values = {'NAME': native.name,
'JNI_SIGNATURE': JniParams.Signature(native.params,
native.return_type,
True)}
return template.substitute(values)
def GetUniqueClasses(self, origin):
ret = {self.class_name: self.fully_qualified_class}
for entry in origin:
class_name = self.class_name
jni_class_path = self.fully_qualified_class
if entry.java_class_name:
class_name = entry.java_class_name
jni_class_path = self.fully_qualified_class + '$' + class_name
ret[class_name] = jni_class_path
return ret
def GetClassPathDefinitions(self):
"""Returns the ClassPath constants."""
ret = []
template = Template("""\
const char k${JAVA_CLASS}ClassPath[] = "${JNI_CLASS_PATH}";""")
native_classes = self.GetUniqueClasses(self.natives)
called_by_native_classes = self.GetUniqueClasses(self.called_by_natives)
all_classes = native_classes
all_classes.update(called_by_native_classes)
for clazz in all_classes:
values = {
'JAVA_CLASS': clazz,
'JNI_CLASS_PATH': JniParams.RemapClassName(all_classes[clazz]),
}
ret += [template.substitute(values)]
ret += ''
for clazz in called_by_native_classes:
template = Template("""\
// Leaking this jclass as we cannot use LazyInstance from some threads.
jclass g_${JAVA_CLASS}_clazz = NULL;""")
values = {
'JAVA_CLASS': clazz,
}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetFindClasses(self):
"""Returns the imlementation of FindClass for all known classes."""
if self.init_native:
template = Template("""\
g_${JAVA_CLASS}_clazz = static_cast<jclass>(env->NewWeakGlobalRef(clazz));""")
else:
template = Template("""\
g_${JAVA_CLASS}_clazz = reinterpret_cast<jclass>(env->NewGlobalRef(
base::android::GetClass(env, k${JAVA_CLASS}ClassPath).obj()));""")
ret = []
for clazz in self.GetUniqueClasses(self.called_by_natives):
values = {'JAVA_CLASS': clazz}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetMethodIDImpl(self, called_by_native):
"""Returns the implementation of GetMethodID."""
if self.options.eager_called_by_natives:
template = Template("""\
env->Get${STATIC_METHOD_PART}MethodID(
g_${JAVA_CLASS}_clazz,
"${JNI_NAME}", ${JNI_SIGNATURE});""")
else:
template = Template("""\
base::android::MethodID::LazyGet<
base::android::MethodID::TYPE_${STATIC}>(
env, g_${JAVA_CLASS}_clazz,
"${JNI_NAME}",
${JNI_SIGNATURE},
&g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME});
""")
jni_name = called_by_native.name
jni_return_type = called_by_native.return_type
if called_by_native.is_constructor:
jni_name = '<init>'
jni_return_type = 'void'
if called_by_native.signature:
signature = called_by_native.signature
else:
signature = JniParams.Signature(called_by_native.params,
jni_return_type,
True)
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'JNI_NAME': jni_name,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'STATIC': 'STATIC' if called_by_native.static else 'INSTANCE',
'STATIC_METHOD_PART': 'Static' if called_by_native.static else '',
'JNI_SIGNATURE': signature,
}
return template.substitute(values)
def WrapOutput(output):
ret = []
for line in output.splitlines():
# Do not wrap lines under 80 characters or preprocessor directives.
if len(line) < 80 or line.lstrip()[:1] == '#':
stripped = line.rstrip()
if len(ret) == 0 or len(ret[-1]) or len(stripped):
ret.append(stripped)
else:
first_line_indent = ' ' * (len(line) - len(line.lstrip()))
subsequent_indent = first_line_indent + ' ' * 4
if line.startswith('//'):
subsequent_indent = '//' + subsequent_indent
wrapper = textwrap.TextWrapper(width=80,
subsequent_indent=subsequent_indent,
break_long_words=False)
ret += [wrapped.rstrip() for wrapped in wrapper.wrap(line)]
ret += ['']
return '\n'.join(ret)
def ExtractJarInputFile(jar_file, input_file, out_dir):
"""Extracts input file from jar and returns the filename.
The input file is extracted to the same directory that the generated jni
headers will be placed in. This is passed as an argument to script.
Args:
jar_file: the jar file containing the input files to extract.
input_files: the list of files to extract from the jar file.
out_dir: the name of the directories to extract to.
Returns:
the name of extracted input file.
"""
jar_file = zipfile.ZipFile(jar_file)
out_dir = os.path.join(out_dir, os.path.dirname(input_file))
try:
os.makedirs(out_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
extracted_file_name = os.path.join(out_dir, os.path.basename(input_file))
with open(extracted_file_name, 'w') as outfile:
outfile.write(jar_file.read(input_file))
return extracted_file_name
def GenerateJNIHeader(input_file, output_file, options):
try:
if os.path.splitext(input_file)[1] == '.class':
jni_from_javap = JNIFromJavaP.CreateFromClass(input_file, options)
content = jni_from_javap.GetContent()
else:
jni_from_java_source = JNIFromJavaSource.CreateFromFile(
input_file, options)
content = jni_from_java_source.GetContent()
except ParseError, e:
print e
sys.exit(1)
if output_file:
if not os.path.exists(os.path.dirname(os.path.abspath(output_file))):
os.makedirs(os.path.dirname(os.path.abspath(output_file)))
if options.optimize_generation and os.path.exists(output_file):
with file(output_file, 'r') as f:
existing_content = f.read()
if existing_content == content:
return
with file(output_file, 'w') as f:
f.write(content)
else:
print output
def GetScriptName():
script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
base_index = 0
for idx, value in enumerate(script_components):
if value == 'base' or value == 'third_party':
base_index = idx
break
return os.sep.join(script_components[base_index:])
def main(argv):
usage = """usage: %prog [OPTIONS]
This script will parse the given java source code extracting the native
declarations and print the header file to stdout (or a file).
See SampleForTests.java for more details.
"""
option_parser = optparse.OptionParser(usage=usage)
option_parser.add_option('-j', '--jar_file', dest='jar_file',
help='Extract the list of input files from'
' a specified jar file.'
' Uses javap to extract the methods from a'
' pre-compiled class. --input should point'
' to pre-compiled Java .class files.')
option_parser.add_option('-n', dest='namespace',
help='Uses as a namespace in the generated header '
'instead of the javap class name, or when there is '
'no JNINamespace annotation in the java source.')
option_parser.add_option('--input_file',
help='Single input file name. The output file name '
'will be derived from it. Must be used with '
'--output_dir.')
option_parser.add_option('--output_dir',
help='The output directory. Must be used with '
'--input')
option_parser.add_option('--optimize_generation', type="int",
default=0, help='Whether we should optimize JNI '
'generation by not regenerating files if they have '
'not changed.')
option_parser.add_option('--jarjar',
help='Path to optional jarjar rules file.')
option_parser.add_option('--script_name', default=GetScriptName(),
help='The name of this script in the generated '
'header.')
option_parser.add_option('--includes',
help='The comma-separated list of header files to '
'include in the generated header.')
option_parser.add_option('--pure_native_methods',
action='store_true', dest='pure_native_methods',
help='When true, the native methods will be called '
'without any JNI-specific arguments.')
option_parser.add_option('--ptr_type', default='int',
type='choice', choices=['int', 'long'],
help='The type used to represent native pointers in '
'Java code. For 32-bit, use int; '
'for 64-bit, use long.')
option_parser.add_option('--jni_init_native_name', default='',
help='The name of the JNI registration method that '
'is used to initialize all native methods. If a '
'method with this name is not present in the Java '
'source file, setting this option is a no-op. When '
'a method with this name is found however, the '
'naming convention Java_<packageName>_<className> '
'will limit the initialization to only the '
'top-level class.')
option_parser.add_option('--eager_called_by_natives',
action='store_true', dest='eager_called_by_natives',
help='When true, the called-by-native methods will '
'be initialized in a non-atomic way.')
option_parser.add_option('--cpp', default='cpp',
help='The path to cpp command.')
option_parser.add_option('--javap', default='javap',
help='The path to javap command.')
options, args = option_parser.parse_args(argv)
if options.jar_file:
input_file = ExtractJarInputFile(options.jar_file, options.input_file,
options.output_dir)
elif options.input_file:
input_file = options.input_file
else:
option_parser.print_help()
print '\nError: Must specify --jar_file or --input_file.'
return 1
output_file = None
if options.output_dir:
root_name = os.path.splitext(os.path.basename(input_file))[0]
output_file = os.path.join(options.output_dir, root_name) + '_jni.h'
if options.jarjar:
with open(options.jarjar) as f:
JniParams.SetJarJarMappings(f.read())
GenerateJNIHeader(input_file, output_file, options)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-3-clause
|
VirtueSecurity/aws-extender
|
BappModules/docutils/parsers/rst/languages/cs.py
|
128
|
4857
|
# $Id: cs.py 7119 2011-09-02 13:00:23Z milde $
# Author: Marek Blaha <[email protected]>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Czech-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'pozor': 'attention',
u'caution (translation required)': 'caution', # jak rozlisit caution a warning?
u'code (translation required)': 'code',
u'nebezpe\u010D\u00ED': 'danger',
u'chyba': 'error',
u'rada': 'hint',
u'd\u016Fle\u017Eit\u00E9': 'important',
u'pozn\u00E1mka': 'note',
u'tip (translation required)': 'tip',
u'varov\u00E1n\u00ED': 'warning',
u'admonition (translation required)': 'admonition',
u'sidebar (translation required)': 'sidebar',
u't\u00E9ma': 'topic',
u'line-block (translation required)': 'line-block',
u'parsed-literal (translation required)': 'parsed-literal',
u'odd\u00EDl': 'rubric',
u'moto': 'epigraph',
u'highlights (translation required)': 'highlights',
u'pull-quote (translation required)': 'pull-quote',
u'compound (translation required)': 'compound',
u'container (translation required)': 'container',
#'questions': 'questions',
#'qa': 'questions',
#'faq': 'questions',
u'table (translation required)': 'table',
u'csv-table (translation required)': 'csv-table',
u'list-table (translation required)': 'list-table',
u'math (translation required)': 'math',
u'meta (translation required)': 'meta',
#'imagemap': 'imagemap',
u'image (translation required)': 'image', # obrazek
u'figure (translation required)': 'figure', # a tady?
u'include (translation required)': 'include',
u'raw (translation required)': 'raw',
u'replace (translation required)': 'replace',
u'unicode (translation required)': 'unicode',
u'datum': 'date',
u't\u0159\u00EDda': 'class',
u'role (translation required)': 'role',
u'default-role (translation required)': 'default-role',
u'title (translation required)': 'title',
u'obsah': 'contents',
u'sectnum (translation required)': 'sectnum',
u'section-numbering (translation required)': 'sectnum',
u'header (translation required)': 'header',
u'footer (translation required)': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'target-notes (translation required)': 'target-notes',
u'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""Czech name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'abbreviation (translation required)': 'abbreviation',
u'ab (translation required)': 'abbreviation',
u'acronym (translation required)': 'acronym',
u'ac (translation required)': 'acronym',
u'code (translation required)': 'code',
u'index (translation required)': 'index',
u'i (translation required)': 'index',
u'subscript (translation required)': 'subscript',
u'sub (translation required)': 'subscript',
u'superscript (translation required)': 'superscript',
u'sup (translation required)': 'superscript',
u'title-reference (translation required)': 'title-reference',
u'title (translation required)': 'title-reference',
u't (translation required)': 'title-reference',
u'pep-reference (translation required)': 'pep-reference',
u'pep (translation required)': 'pep-reference',
u'rfc-reference (translation required)': 'rfc-reference',
u'rfc (translation required)': 'rfc-reference',
u'emphasis (translation required)': 'emphasis',
u'strong (translation required)': 'strong',
u'literal (translation required)': 'literal',
u'math (translation required)': 'math',
u'named-reference (translation required)': 'named-reference',
u'anonymous-reference (translation required)': 'anonymous-reference',
u'footnote-reference (translation required)': 'footnote-reference',
u'citation-reference (translation required)': 'citation-reference',
u'substitution-reference (translation required)': 'substitution-reference',
u'target (translation required)': 'target',
u'uri-reference (translation required)': 'uri-reference',
u'uri (translation required)': 'uri-reference',
u'url (translation required)': 'uri-reference',
u'raw (translation required)': 'raw',}
"""Mapping of Czech role names to canonical role names for interpreted text.
"""
|
mit
|
MaDKaTZe/phantomjs
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/commands/queues_unittest.py
|
115
|
26224
|
# Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import StringIO
from webkitpy.common.checkout.scm import CheckoutNeedsUpdate
from webkitpy.common.checkout.scm.scm_mock import MockSCM
from webkitpy.common.net.bugzilla import Attachment
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.layout_tests.models import test_results
from webkitpy.layout_tests.models import test_failures
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.commands.commandtest import CommandsTest
from webkitpy.tool.commands.queues import *
from webkitpy.tool.commands.queuestest import QueuesTest
from webkitpy.tool.commands.stepsequence import StepSequence
from webkitpy.common.net.statusserver_mock import MockStatusServer
from webkitpy.tool.mocktool import MockTool, MockOptions
class TestCommitQueue(CommitQueue):
def __init__(self, tool=None):
CommitQueue.__init__(self)
if tool:
self.bind_to_tool(tool)
self._options = MockOptions(confirm=False, parent_command="commit-queue", port=None)
def begin_work_queue(self):
output_capture = OutputCapture()
output_capture.capture_output()
CommitQueue.begin_work_queue(self)
output_capture.restore_output()
class TestQueue(AbstractPatchQueue):
name = "test-queue"
class TestReviewQueue(AbstractReviewQueue):
name = "test-review-queue"
class TestFeederQueue(FeederQueue):
_sleep_duration = 0
class AbstractQueueTest(CommandsTest):
def test_log_directory(self):
self.assertEqual(TestQueue()._log_directory(), os.path.join("..", "test-queue-logs"))
def _assert_run_webkit_patch(self, run_args, port=None):
queue = TestQueue()
tool = MockTool()
tool.status_server.bot_id = "gort"
tool.executive = Mock()
queue.bind_to_tool(tool)
queue._options = Mock()
queue._options.port = port
queue.run_webkit_patch(run_args)
expected_run_args = ["echo", "--status-host=example.com", "--bot-id=gort"]
if port:
expected_run_args.append("--port=%s" % port)
expected_run_args.extend(run_args)
tool.executive.run_command.assert_called_with(expected_run_args, cwd='/mock-checkout')
def test_run_webkit_patch(self):
self._assert_run_webkit_patch([1])
self._assert_run_webkit_patch(["one", 2])
self._assert_run_webkit_patch([1], port="mockport")
def test_iteration_count(self):
queue = TestQueue()
queue._options = Mock()
queue._options.iterations = 3
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertFalse(queue.should_continue_work_queue())
def test_no_iteration_count(self):
queue = TestQueue()
queue._options = Mock()
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
def _assert_log_message(self, script_error, log_message):
failure_log = AbstractQueue._log_from_script_error_for_upload(script_error, output_limit=10)
self.assertTrue(failure_log.read(), log_message)
def test_log_from_script_error_for_upload(self):
self._assert_log_message(ScriptError("test"), "test")
unicode_tor = u"WebKit \u2661 Tor Arne Vestb\u00F8!"
utf8_tor = unicode_tor.encode("utf-8")
self._assert_log_message(ScriptError(unicode_tor), utf8_tor)
script_error = ScriptError(unicode_tor, output=unicode_tor)
expected_output = "%s\nLast %s characters of output:\n%s" % (utf8_tor, 10, utf8_tor[-10:])
self._assert_log_message(script_error, expected_output)
class FeederQueueTest(QueuesTest):
def test_feeder_queue(self):
queue = TestFeederQueue()
tool = MockTool(log_executive=True)
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("feeder-queue"),
"process_work_item": """Warning, attachment 10001 on bug 50000 has invalid committer ([email protected])
Warning, attachment 10001 on bug 50000 has invalid committer ([email protected])
MOCK setting flag 'commit-queue' to '-' on attachment '10001' with comment 'Rejecting attachment 10001 from commit-queue.\n\[email protected] does not have committer permissions according to http://trac.webkit.org/browser/trunk/Tools/Scripts/webkitpy/common/config/committers.py.
- If you do not have committer rights please read http://webkit.org/coding/contributing.html for instructions on how to use bugzilla flags.
- If you have committer rights please correct the error in Tools/Scripts/webkitpy/common/config/committers.py by adding yourself to the file (no review needed). The commit-queue restarts itself every 2 hours. After restart the commit-queue will correctly respect your committer rights.'
MOCK: update_work_items: commit-queue [10005, 10000]
Feeding commit-queue items [10005, 10000]
Feeding EWS (1 r? patch, 1 new)
MOCK: submit_to_ews: 10002
""",
"handle_unexpected_error": "Mock error message\n",
}
self.assert_queue_outputs(queue, tool=tool, expected_logs=expected_logs)
class AbstractPatchQueueTest(CommandsTest):
def test_next_patch(self):
queue = AbstractPatchQueue()
tool = MockTool()
queue.bind_to_tool(tool)
queue._options = Mock()
queue._options.port = None
self.assertIsNone(queue._next_patch())
tool.status_server = MockStatusServer(work_items=[2, 10000, 10001])
expected_stdout = "MOCK: fetch_attachment: 2 is not a known attachment id\n" # A mock-only message to prevent us from making mistakes.
expected_logs = "MOCK: release_work_item: None 2\n"
patch = OutputCapture().assert_outputs(self, queue._next_patch, expected_stdout=expected_stdout, expected_logs=expected_logs)
# The patch.id() == 2 is ignored because it doesn't exist.
self.assertEqual(patch.id(), 10000)
self.assertEqual(queue._next_patch().id(), 10001)
self.assertEqual(queue._next_patch(), None) # When the queue is empty
class PatchProcessingQueueTest(CommandsTest):
def test_upload_results_archive_for_patch(self):
queue = PatchProcessingQueue()
queue.name = "mock-queue"
tool = MockTool()
queue.bind_to_tool(tool)
queue._options = Mock()
queue._options.port = None
patch = queue._tool.bugs.fetch_attachment(10001)
expected_logs = """MOCK add_attachment_to_bug: bug_id=50000, description=Archive of layout-test-results from bot for mac-snowleopard filename=layout-test-results.zip mimetype=None
-- Begin comment --
The attached test failures were seen while running run-webkit-tests on the mock-queue.
Port: mac-snowleopard Platform: MockPlatform 1.0
-- End comment --
"""
OutputCapture().assert_outputs(self, queue._upload_results_archive_for_patch, [patch, Mock()], expected_logs=expected_logs)
class NeedsUpdateSequence(StepSequence):
def _run(self, tool, options, state):
raise CheckoutNeedsUpdate([], 1, "", None)
class AlwaysCommitQueueTool(object):
def __init__(self):
self.status_server = MockStatusServer()
def command_by_name(self, name):
return CommitQueue
class SecondThoughtsCommitQueue(TestCommitQueue):
def __init__(self, tool=None):
self._reject_patch = False
TestCommitQueue.__init__(self, tool)
def run_command(self, command):
# We want to reject the patch after the first validation,
# so wait to reject it until after some other command has run.
self._reject_patch = True
return CommitQueue.run_command(self, command)
def refetch_patch(self, patch):
if not self._reject_patch:
return self._tool.bugs.fetch_attachment(patch.id())
attachment_dictionary = {
"id": patch.id(),
"bug_id": patch.bug_id(),
"name": "Rejected",
"is_obsolete": True,
"is_patch": False,
"review": "-",
"reviewer_email": "[email protected]",
"commit-queue": "-",
"committer_email": "[email protected]",
"attacher_email": "Contributer1",
}
return Attachment(attachment_dictionary, None)
class CommitQueueTest(QueuesTest):
def _mock_test_result(self, testname):
return test_results.TestResult(testname, [test_failures.FailureTextMismatch()])
def test_commit_queue(self):
tool = MockTool()
tool.filesystem.write_text_file('/tmp/layout-test-results/full_results.json', '') # Otherwise the commit-queue will hit a KeyError trying to read the results from the MockFileSystem.
tool.filesystem.write_text_file('/tmp/layout-test-results/webkit_unit_tests_output.xml', '')
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean --port=mac
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=mac
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com build --no-clean --no-update --build-style=release --port=mac
MOCK: update_status: commit-queue Built patch
Running: webkit-patch --status-host=example.com build-and-test --no-clean --no-update --test --non-interactive --port=mac
MOCK: update_status: commit-queue Passed tests
Running: webkit-patch --status-host=example.com land-attachment --force-clean --non-interactive --parent-command=commit-queue 10000 --port=mac
MOCK: update_status: commit-queue Landed patch
MOCK: update_status: commit-queue Pass
MOCK: release_work_item: commit-queue 10000
""",
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
self.assert_queue_outputs(CommitQueue(), tool=tool, expected_logs=expected_logs)
def test_commit_queue_failure(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """MOCK: update_status: commit-queue Cleaned working directory
MOCK: update_status: commit-queue Updated working directory
MOCK: update_status: commit-queue Patch does not apply
MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMOCK script error
Full output: http://dummy_url'
MOCK: update_status: commit-queue Fail
MOCK: release_work_item: commit-queue 10000
""",
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
queue = CommitQueue()
def mock_run_webkit_patch(command):
if command[0] == 'clean' or command[0] == 'update':
# We want cleaning to succeed so we can error out on a step
# that causes the commit-queue to reject the patch.
return
raise ScriptError('MOCK script error')
queue.run_webkit_patch = mock_run_webkit_patch
self.assert_queue_outputs(queue, expected_logs=expected_logs)
def test_commit_queue_failure_with_failing_tests(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """MOCK: update_status: commit-queue Cleaned working directory
MOCK: update_status: commit-queue Updated working directory
MOCK: update_status: commit-queue Patch does not apply
MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nNew failing tests:
mock_test_name.html
another_test_name.html
Full output: http://dummy_url'
MOCK: update_status: commit-queue Fail
MOCK: release_work_item: commit-queue 10000
""",
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
queue = CommitQueue()
def mock_run_webkit_patch(command):
if command[0] == 'clean' or command[0] == 'update':
# We want cleaning to succeed so we can error out on a step
# that causes the commit-queue to reject the patch.
return
queue._expected_failures.unexpected_failures_observed = lambda results: ["mock_test_name.html", "another_test_name.html"]
raise ScriptError('MOCK script error')
queue.run_webkit_patch = mock_run_webkit_patch
self.assert_queue_outputs(queue, expected_logs=expected_logs)
def test_rollout(self):
tool = MockTool()
tool.filesystem.write_text_file('/tmp/layout-test-results/full_results.json', '') # Otherwise the commit-queue will hit a KeyError trying to read the results from the MockFileSystem.
tool.filesystem.write_text_file('/tmp/layout-test-results/webkit_unit_tests_output.xml', '')
tool.buildbot.light_tree_on_fire()
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean --port=%(port)s
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=%(port)s
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000 --port=%(port)s
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10000 --port=%(port)s
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com build --no-clean --no-update --build-style=release --port=%(port)s
MOCK: update_status: commit-queue Built patch
Running: webkit-patch --status-host=example.com build-and-test --no-clean --no-update --test --non-interactive --port=%(port)s
MOCK: update_status: commit-queue Passed tests
Running: webkit-patch --status-host=example.com land-attachment --force-clean --non-interactive --parent-command=commit-queue 10000 --port=%(port)s
MOCK: update_status: commit-queue Landed patch
MOCK: update_status: commit-queue Pass
MOCK: release_work_item: commit-queue 10000
""" % {"port": "mac"},
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
self.assert_queue_outputs(CommitQueue(), tool=tool, expected_logs=expected_logs)
def test_rollout_lands(self):
tool = MockTool()
tool.buildbot.light_tree_on_fire()
rollout_patch = tool.bugs.fetch_attachment(10005) # _patch6, a rollout patch.
assert(rollout_patch.is_rollout())
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean --port=%(port)s
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=%(port)s
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10005 --port=%(port)s
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10005 --port=%(port)s
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com land-attachment --force-clean --non-interactive --parent-command=commit-queue 10005 --port=%(port)s
MOCK: update_status: commit-queue Landed patch
MOCK: update_status: commit-queue Pass
MOCK: release_work_item: commit-queue 10005
""" % {"port": "mac"},
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10005' with comment 'Rejecting attachment 10005 from commit-queue.\n\nMock error message'\n",
}
self.assert_queue_outputs(CommitQueue(), tool=tool, work_item=rollout_patch, expected_logs=expected_logs)
def test_auto_retry(self):
queue = CommitQueue()
options = Mock()
options.parent_command = "commit-queue"
tool = AlwaysCommitQueueTool()
sequence = NeedsUpdateSequence(None)
expected_logs = """Commit failed because the checkout is out of date. Please update and try again.
MOCK: update_status: commit-queue Tests passed, but commit failed (checkout out of date). Updating, then landing without building or re-running tests.
"""
state = {'patch': None}
OutputCapture().assert_outputs(self, sequence.run_and_handle_errors, [tool, options, state], expected_exception=TryAgain, expected_logs=expected_logs)
self.assertTrue(options.update)
self.assertFalse(options.build)
self.assertFalse(options.test)
def test_manual_reject_during_processing(self):
queue = SecondThoughtsCommitQueue(MockTool())
queue.begin_work_queue()
queue._tool.filesystem.write_text_file('/tmp/layout-test-results/full_results.json', '') # Otherwise the commit-queue will hit a KeyError trying to read the results from the MockFileSystem.
queue._tool.filesystem.write_text_file('/tmp/layout-test-results/webkit_unit_tests_output.xml', '')
queue._options = Mock()
queue._options.port = None
expected_logs = """Running: webkit-patch --status-host=example.com clean --port=mac
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=mac
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com build --no-clean --no-update --build-style=release --port=mac
MOCK: update_status: commit-queue Built patch
Running: webkit-patch --status-host=example.com build-and-test --no-clean --no-update --test --non-interactive --port=mac
MOCK: update_status: commit-queue Passed tests
MOCK: update_status: commit-queue Retry
MOCK: release_work_item: commit-queue 10000
"""
self.maxDiff = None
OutputCapture().assert_outputs(self, queue.process_work_item, [QueuesTest.mock_work_item], expected_logs=expected_logs)
def test_report_flaky_tests(self):
queue = TestCommitQueue(MockTool())
expected_logs = """MOCK bug comment: bug_id=50002, cc=None
--- Begin comment ---
The commit-queue just saw foo/bar.html flake (text diff) while processing attachment 10000 on bug 50000.
Port: MockPort Platform: MockPlatform 1.0
--- End comment ---
MOCK add_attachment_to_bug: bug_id=50002, description=Failure diff from bot filename=failure.diff mimetype=None
MOCK bug comment: bug_id=50002, cc=None
--- Begin comment ---
The commit-queue just saw bar/baz.html flake (text diff) while processing attachment 10000 on bug 50000.
Port: MockPort Platform: MockPlatform 1.0
--- End comment ---
bar/baz-diffs.txt does not exist in results archive, uploading entire archive.
MOCK add_attachment_to_bug: bug_id=50002, description=Archive of layout-test-results from bot filename=layout-test-results.zip mimetype=None
MOCK bug comment: bug_id=50000, cc=None
--- Begin comment ---
The commit-queue encountered the following flaky tests while processing attachment 10000:
foo/bar.html bug 50002 (author: [email protected])
bar/baz.html bug 50002 (author: [email protected])
The commit-queue is continuing to process your patch.
--- End comment ---
"""
test_names = ["foo/bar.html", "bar/baz.html"]
test_results = [self._mock_test_result(name) for name in test_names]
class MockZipFile(object):
def __init__(self):
self.fp = StringIO()
def read(self, path):
return ""
def namelist(self):
# This is intentionally missing one diffs.txt to exercise the "upload the whole zip" codepath.
return ['foo/bar-diffs.txt']
OutputCapture().assert_outputs(self, queue.report_flaky_tests, [QueuesTest.mock_work_item, test_results, MockZipFile()], expected_logs=expected_logs)
def test_did_pass_testing_ews(self):
tool = MockTool()
patch = tool.bugs.fetch_attachment(10000)
queue = TestCommitQueue(tool)
self.assertFalse(queue.did_pass_testing_ews(patch))
class StyleQueueTest(QueuesTest):
def test_style_queue_with_style_exception(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("style-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean
MOCK: update_status: style-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update
MOCK: update_status: style-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000
MOCK: update_status: style-queue Applied patch
Running: webkit-patch --status-host=example.com apply-watchlist-local 50000
MOCK: update_status: style-queue Watchlist applied
Running: webkit-patch --status-host=example.com check-style-local --non-interactive --quiet
MOCK: update_status: style-queue Style checked
MOCK: update_status: style-queue Pass
MOCK: release_work_item: style-queue 10000
""",
"handle_unexpected_error": "Mock error message\n",
"handle_script_error": "MOCK output\n",
}
tool = MockTool(executive_throws_when_run=set(['check-style']))
self.assert_queue_outputs(StyleQueue(), expected_logs=expected_logs, tool=tool)
def test_style_queue_with_watch_list_exception(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("style-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean
MOCK: update_status: style-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update
MOCK: update_status: style-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000
MOCK: update_status: style-queue Applied patch
Running: webkit-patch --status-host=example.com apply-watchlist-local 50000
Exception for ['echo', '--status-host=example.com', 'apply-watchlist-local', 50000]
MOCK command output
MOCK: update_status: style-queue Unabled to apply watchlist
Running: webkit-patch --status-host=example.com check-style-local --non-interactive --quiet
MOCK: update_status: style-queue Style checked
MOCK: update_status: style-queue Pass
MOCK: release_work_item: style-queue 10000
""",
"handle_unexpected_error": "Mock error message\n",
"handle_script_error": "MOCK output\n",
}
tool = MockTool(executive_throws_when_run=set(['apply-watchlist-local']))
self.assert_queue_outputs(StyleQueue(), expected_logs=expected_logs, tool=tool)
|
bsd-3-clause
|
jilir/shadowsocks
|
shadowsocks/crypto/rc4_md5.py
|
1042
|
1339
|
#!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import hashlib
from shadowsocks.crypto import openssl
__all__ = ['ciphers']
def create_cipher(alg, key, iv, op, key_as_bytes=0, d=None, salt=None,
i=1, padding=1):
md5 = hashlib.md5()
md5.update(key)
md5.update(iv)
rc4_key = md5.digest()
return openssl.OpenSSLCrypto(b'rc4', rc4_key, b'', op)
ciphers = {
'rc4-md5': (16, 16, create_cipher),
}
def test():
from shadowsocks.crypto import util
cipher = create_cipher('rc4-md5', b'k' * 32, b'i' * 16, 1)
decipher = create_cipher('rc4-md5', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test()
|
apache-2.0
|
oberlin/django
|
django/test/client.py
|
29
|
26734
|
from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core import urlresolvers
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
filename = os.path.basename(file.name) if hasattr(file, 'name') else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(path)
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(
lambda: urlresolvers.resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if (user and user.is_active and
apps.is_installed('django.contrib.sessions')):
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
if backend is None:
backend = settings.AUTHENTICATION_BACKENDS[0]
user.backend = backend
self._login(user)
def _login(self, user):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if 'application/json' not in response.get('Content-Type'):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
return json.loads(response.content.decode(), **extra)
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
|
bsd-3-clause
|
KevinGoodsell/sympy
|
sympy/physics/secondquant.py
|
4
|
14763
|
"""
Second quantization operators and states for bosons.
This follow the formulation of Fetter and Welecka, "Quantum Theory
of Many-Particle Systems."
"""
from sympy import (
Basic, Function, var, Mul, sympify, Integer, Add, sqrt,
Number, Matrix, zeros, Pow, I
)
from sympy.utilities.decorator import deprecated
__all__ = [
'Dagger',
'KroneckerDelta',
'BosonicOperator',
'AnnihilateBoson',
'CreateBoson',
'FockState',
'FockStateBra',
'FockStateKet',
'Bra',
'Ket',
'B',
'Bd',
'apply_operators',
'InnerProduct',
'BosonicBasis',
'VarBosonicBasis',
'FixedBosonicBasis',
'commutator',
'matrix_rep'
]
class Dagger(Basic):
"""
Hermitian conjugate of creation/annihilation operators.
"""
def __new__(cls, arg):
arg = sympify(arg)
r = cls.eval(arg)
if isinstance(r, Basic):
return r
obj = Basic.__new__(cls, arg)
return obj
@classmethod
@deprecated
def canonize(cls, arg):
return cls.eval(arg)
@classmethod
def eval(cls, arg):
try:
d = arg._dagger_()
except:
if isinstance(arg, Basic):
if arg.is_Add:
return Add(*tuple(map(Dagger, arg.args)))
if arg.is_Mul:
return Mul(*tuple(map(Dagger, reversed(arg.args))))
if arg.is_Number:
return arg
if arg.is_Pow:
return Pow(Dagger(arg.args[0]),arg.args[1])
if arg == I:
return -arg
else:
return None
else:
return d
def _eval_subs(self, old, new):
r = Dagger(self.args[0].subs(old, new))
return r
def _dagger_(self):
return self.args[0]
class KroneckerDelta(Basic):
"""
Discrete delta function.
"""
def __new__(cls, i, j):
i, j = map(sympify, (i, j))
r = cls.eval(i, j)
if isinstance(r, Basic):
return r
obj = Basic.__new__(cls, i, j, commutative=True)
return obj
@classmethod
@deprecated
def canonize(cls, i, j):
return cls.eval(i, j)
@classmethod
def eval(cls, i, j):
diff = i-j
if diff == 0:
return Integer(1)
elif diff.is_number:
return Integer(0)
def _eval_subs(self, old, new):
r = KroneckerDelta(self.args[0].subs(old, new), self.args[1].subs(old, new))
return r
def _dagger_():
return self
class BosonicOperator(Basic):
"""
Base class for bosonic operators.
"""
op_symbol = 'bo'
def __new__(cls, k):
obj = Basic.__new__(cls, sympify(k), commutative=False)
return obj
def _eval_subs(self, old, new):
r = self.__class__(self.args[0].subs(old, new))
return r
@property
def state(self):
return self.args[0]
@property
def is_symbolic(self):
if self.state.is_Integer:
return False
else:
return True
def __repr__(self):
return "%s(%r)" % (self.op_symbol, self.state)
def __str__(self):
return self.__repr__()
def apply_operator(self, state):
raise NotImplementedError('implement apply_operator in a subclass')
class AnnihilateBoson(BosonicOperator):
"""
Bosonic annihilation operator
"""
op_symbol = 'b'
def _dagger_(self):
return CreateBoson(self.state)
def apply_operator(self, state):
if not self.is_symbolic and isinstance(state, FockStateKet):
element = self.state
amp = sqrt(state[element])
return amp*state.down(element)
else:
return Mul(self,state)
class CreateBoson(BosonicOperator):
"""
Bosonic creation operator
"""
op_symbol = 'b+'
def _dagger_(self):
return AnnihilateBoson(self.state)
def apply_operator(self, state):
if not self.is_symbolic and isinstance(state, FockStateKet):
element = self.state
amp = sqrt(state[element] + 1)
return amp*state.up(element)
else:
return Mul(self,state)
B = AnnihilateBoson
Bd = CreateBoson
class FockState(Basic):
"""
Many particle Fock state with a sequence of occupation numbers.
Anywhere you can have a FockState, you can also have Integer(0).
All code must check for this!
"""
def __new__(cls, occupations):
o = map(sympify, occupations)
obj = Basic.__new__(cls, tuple(o), commutative=False)
return obj
def _eval_subs(self, old, new):
r = self.__class__([o.subs(old, new) for o in self.args[0]])
return r
def up(self, i):
i = int(i)
new_occs = list(self.args[0])
new_occs[i] = new_occs[i]+Integer(1)
return self.__class__(new_occs)
def down(self, i):
i = int(i)
new_occs = list(self.args[0])
if new_occs[i]==Integer(0):
return Integer(0)
else:
new_occs[i] = new_occs[i]-Integer(1)
return self.__class__(new_occs)
def __getitem__(self, i):
i = int(i)
return self.args[0][i]
def __repr__(self):
return ("FockState(%r)") % (self.args)
def __str__(self):
return self.__repr__()
def __len__(self):
return len(self.args[0])
class FockStateKet(FockState):
def _dagger_(self):
return FockStateBra(*self.args)
def __repr__(self):
return ("|%r>") % (self.args)
class FockStateBra(FockState):
def _dagger_(self):
return FockStateKet(*self.args)
def __repr__(self):
return ("<%r|") % (self.args)
def __mul__(self, other):
if isinstance(other, FockStateKet):
return InnerProduct(self, other)
else:
return Basic.__mul__(self, other)
Bra = FockStateBra
Ket = FockStateKet
def split_commutative_parts(m):
c_part = [p for p in m.args if p.is_commutative]
nc_part = [p for p in m.args if not p.is_commutative]
return c_part, nc_part
def apply_Mul(m):
"""
Take a Mul instance with operators and apply them to states.
This method applies all operators with integer state labels
to the actual states. For symbolic state labels, nothing is done.
When inner products of FockStates are encountered (like <a|b>),
the are converted to instances of InnerProduct.
This does not currently work on double inner products like,
<a|b><c|d>.
If the argument is not a Mul, it is simply returned as is.
"""
if not isinstance(m, Mul):
return m
c_part, nc_part = split_commutative_parts(m)
n_nc = len(nc_part)
if n_nc == 0 or n_nc == 1:
return m
else:
last = nc_part[-1]
next_to_last = nc_part[-2]
if isinstance(last, FockStateKet):
if isinstance(next_to_last, BosonicOperator):
if next_to_last.is_symbolic:
return m
else:
result = next_to_last.apply_operator(last)
if result == 0:
return 0
else:
return apply_Mul(Mul(*(c_part+nc_part[:-2]+[result])))
elif isinstance(next_to_last, Pow):
if isinstance(next_to_last.base, BosonicOperator) and \
next_to_last.exp.is_Integer:
if next_to_last.base.is_symbolic:
return m
else:
result = last
for i in range(next_to_last.exp):
result = next_to_last.base.apply_operator(result)
if result == 0: break
if result == 0:
return 0
else:
return apply_Mul(Mul(*(c_part+nc_part[:-2]+[result])))
else:
return m
elif isinstance(next_to_last, FockStateBra):
result = InnerProduct(next_to_last, last)
if result == 0:
return 0
else:
return apply_Mul(Mul(*(c_part+nc_part[:-2]+[result])))
else:
return m
else:
return m
def apply_operators(e):
"""
Take a sympy expression with operators and states and apply the operators.
"""
e = e.expand()
muls = e.atoms(Mul)
subs_list = [(m,apply_Mul(m)) for m in iter(muls)]
return e.subs(subs_list)
class InnerProduct(Basic):
"""
An unevaluated inner product between a bra and ket.
Currently this class just reduces things to a prouct of
KroneckerDeltas. In the future, we could introduce abstract
states like |a> and |b>, and leave the inner product unevaluated as
<a|b>.
"""
def __new__(cls, bra, ket):
assert isinstance(bra, FockStateBra), 'must be a bra'
assert isinstance(ket, FockStateKet), 'must be a key'
r = cls.eval(bra, ket)
if isinstance(r, Basic):
return r
obj = Basic.__new__(cls, *(bra, ket), **dict(commutative=True))
return obj
@classmethod
@deprecated
def canonize(cls, bra, ket):
return cls.eval(bra, ket)
@classmethod
def eval(cls, bra, ket):
result = Integer(1)
for i,j in zip(bra.args[0], ket.args[0]):
result *= KroneckerDelta(i,j)
if result == 0: break
return result
@property
def bra(self):
return self.args[0]
@property
def ket(self):
return self.args[1]
def _eval_subs(self, old, new):
r = self.__class__(self.bra.subs(old,new), self.ket.subs(old,new))
return r
def __repr__(self):
sbra = repr(self.bra)
sket = repr(self.ket)
return "%s|%s" % (sbra[:-1], sket[1:])
def __str__(self):
return self.__repr__()
def matrix_rep(op, basis):
"""
Find the representation of an operator in a basis.
"""
a = zeros((len(basis), len(basis)))
for i in range(len(basis)):
for j in range(len(basis)):
a[i,j] = apply_operators(Dagger(basis[i])*op*basis[j])
return a
class BosonicBasis(object):
"""
Base class for a basis set of bosonic Fock states.
"""
pass
class VarBosonicBasis(object):
"""
A single state, variable particle number basis set.
"""
def __init__(self, n_max):
self.n_max = n_max
self._build_states()
def _build_states(self):
self.basis = []
for i in range(self.n_max):
self.basis.append(FockStateKet([i]))
self.n_basis = len(self.basis)
def index(self, state):
return self.basis.index(state)
def state(self, i):
return self.basis[i]
def __getitem__(self, i):
return self.state(i)
def __len__(self):
return len(self.basis)
def __repr__(self):
return repr(self.basis)
class FixedBosonicBasis(BosonicBasis):
"""
Fixed particle number basis set.
"""
def __init__(self, n_particles, n_levels):
self.n_particles = n_particles
self.n_levels = n_levels
self._build_particle_locations()
self._build_states()
def _build_particle_locations(self):
tup = ["i"+str(i) for i in range(self.n_particles)]
first_loop = "for i0 in range(%i)" % self.n_levels
other_loops = ''
for i in range(len(tup)-1):
temp = "for %s in range(%s + 1) " % (tup[i+1],tup[i])
other_loops = other_loops + temp
var = "("
for i in tup[:-1]:
var = var + i + ","
var = var + tup[-1] + ")"
cmd = "result = [%s %s %s]" % (var, first_loop, other_loops)
exec cmd
if self.n_particles==1:
result = [(item,) for item in result]
self.particle_locations = result
def _build_states(self):
self.basis = []
for tuple_of_indices in self.particle_locations:
occ_numbers = self.n_levels*[0]
for level in tuple_of_indices:
occ_numbers[level] += 1
self.basis.append(FockStateKet(occ_numbers))
self.n_basis = len(self.basis)
def index(self, state):
return self.basis.index(state)
def state(self, i):
return self.basis[i]
def __getitem__(self, i):
return self.state(i)
def __len__(self):
return len(self.basis)
def __repr__(self):
return repr(self.basis)
# def move(e, i, d):
# """
# Takes the expression "e" and moves the operator at the position i by "d".
# """
# if e.is_Mul:
# if d == 1:
# # e = a*b*c*d
# a = Mul(*e.args[:i])
# b = e.args[i]
# c = e.args[i+1]
# d = Mul(*e.args[i+2:])
# if isinstance(b, Dagger) and not isinstance(c, Dagger):
# i, j = b.args[0].args[0], c.args[0]
# return a*c*b*d-a*KroneckerDelta(i, j)*d
# elif not isinstance(b, Dagger) and isinstance(c, Dagger):
# i, j = b.args[0], c.args[0].args[0]
# return a*c*b*d-a*KroneckerDelta(i, j)*d
# else:
# return a*c*b*d
# elif d == -1:
# # e = a*b*c*d
# a = Mul(*e.args[:i-1])
# b = e.args[i-1]
# c = e.args[i]
# d = Mul(*e.args[i+1:])
# if isinstance(b, Dagger) and not isinstance(c, Dagger):
# i, j = b.args[0].args[0], c.args[0]
# return a*c*b*d-a*KroneckerDelta(i, j)*d
# elif not isinstance(b, Dagger) and isinstance(c, Dagger):
# i, j = b.args[0], c.args[0].args[0]
# return a*c*b*d-a*KroneckerDelta(i, j)*d
# else:
# return a*c*b*d
# else:
# if d > 1:
# while d >= 1:
# e = move(e, i, 1)
# d -= 1
# i += 1
# return e
# elif d < -1:
# while d <= -1:
# e = move(e, i, -1)
# d += 1
# i -= 1
# return e
# elif isinstance(e, Add):
# a, b = e.as_two_terms()
# return move(a, i, d) + move(b, i, d)
# raise NotImplementedError()
def commutator(a, b):
"""
Return the commutator: [a, b] = a*b - b*a
"""
return a*b - b*a
|
bsd-3-clause
|
zhhf/charging
|
charging/db/migration/alembic_migrations/versions/338d7508968c_vpnaas_peer_address_.py
|
11
|
1656
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""vpnaas peer_address size increase
Revision ID: 338d7508968c
Revises: 4a666eb208c2
Create Date: 2013-09-16 11:31:39.410189
"""
# revision identifiers, used by Alembic.
revision = '338d7508968c'
down_revision = '4a666eb208c2'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.services.vpn.plugin.VPNDriverPlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.alter_column('ipsec_site_connections', 'peer_address',
type_=sa.String(255), existing_type=sa.String(64))
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.alter_column('ipsec_site_connections', 'peer_address',
type_=sa.String(64), existing_type=sa.String(255))
|
apache-2.0
|
nexiles/odoo
|
addons/mail/tests/__init__.py
|
261
|
1173
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_mail_group, test_mail_message, test_mail_features, test_mail_gateway, test_message_read, test_invite
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
tmpgit/intellij-community
|
python/lib/Lib/site-packages/django/contrib/auth/management/commands/createsuperuser.py
|
122
|
5743
|
"""
Management utility to create superusers.
"""
import getpass
import re
import sys
from optparse import make_option
from django.contrib.auth.models import User
from django.core import exceptions
from django.core.management.base import BaseCommand, CommandError
from django.utils.translation import ugettext as _
RE_VALID_USERNAME = re.compile('[\w.@+-]+$')
EMAIL_RE = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"' # quoted-string
r')@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}$', re.IGNORECASE) # domain
def is_valid_email(value):
if not EMAIL_RE.search(value):
raise exceptions.ValidationError(_('Enter a valid e-mail address.'))
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--username', dest='username', default=None,
help='Specifies the username for the superuser.'),
make_option('--email', dest='email', default=None,
help='Specifies the email address for the superuser.'),
make_option('--noinput', action='store_false', dest='interactive', default=True,
help=('Tells Django to NOT prompt the user for input of any kind. '
'You must use --username and --email with --noinput, and '
'superusers created with --noinput will not be able to log '
'in until they\'re given a valid password.')),
)
help = 'Used to create a superuser.'
def handle(self, *args, **options):
username = options.get('username', None)
email = options.get('email', None)
interactive = options.get('interactive')
verbosity = int(options.get('verbosity', 1))
# Do quick and dirty validation if --noinput
if not interactive:
if not username or not email:
raise CommandError("You must use --username and --email with --noinput.")
if not RE_VALID_USERNAME.match(username):
raise CommandError("Invalid username. Use only letters, digits, and underscores")
try:
is_valid_email(email)
except exceptions.ValidationError:
raise CommandError("Invalid email address.")
password = ''
# Try to determine the current system user's username to use as a default.
try:
default_username = getpass.getuser().replace(' ', '').lower()
except (ImportError, KeyError):
# KeyError will be raised by os.getpwuid() (called by getuser())
# if there is no corresponding entry in the /etc/passwd file
# (a very restricted chroot environment, for example).
default_username = ''
# Determine whether the default username is taken, so we don't display
# it as an option.
if default_username:
try:
User.objects.get(username=default_username)
except User.DoesNotExist:
pass
else:
default_username = ''
# Prompt for username/email/password. Enclose this whole thing in a
# try/except to trap for a keyboard interrupt and exit gracefully.
if interactive:
try:
# Get a username
while 1:
if not username:
input_msg = 'Username'
if default_username:
input_msg += ' (Leave blank to use %r)' % default_username
username = raw_input(input_msg + ': ')
if default_username and username == '':
username = default_username
if not RE_VALID_USERNAME.match(username):
sys.stderr.write("Error: That username is invalid. Use only letters, digits and underscores.\n")
username = None
continue
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
else:
sys.stderr.write("Error: That username is already taken.\n")
username = None
# Get an email
while 1:
if not email:
email = raw_input('E-mail address: ')
try:
is_valid_email(email)
except exceptions.ValidationError:
sys.stderr.write("Error: That e-mail address is invalid.\n")
email = None
else:
break
# Get a password
while 1:
if not password:
password = getpass.getpass()
password2 = getpass.getpass('Password (again): ')
if password != password2:
sys.stderr.write("Error: Your passwords didn't match.\n")
password = None
continue
if password.strip() == '':
sys.stderr.write("Error: Blank passwords aren't allowed.\n")
password = None
continue
break
except KeyboardInterrupt:
sys.stderr.write("\nOperation cancelled.\n")
sys.exit(1)
User.objects.create_superuser(username, email, password)
if verbosity >= 1:
self.stdout.write("Superuser created successfully.\n")
|
apache-2.0
|
vovanec/supervisor_checks
|
supervisor_checks/bin/xmlrpc_check.py
|
1
|
3794
|
#! /usr/bin/env python3
"""Example configuration:
[eventlistener:example_check]
command=/usr/local/bin/supervisor_xmlrpc_check -g example_service -n example_check -u /ping -r 3 -p 8080
events=TICK_60
"""
import argparse
import sys
from supervisor_checks import check_runner
from supervisor_checks.check_modules import xmlrpc
__author__ = '[email protected]'
def _make_argument_parser():
"""Create the option parser.
"""
parser = argparse.ArgumentParser(
description='Run XML RPC check program.')
parser.add_argument('-n', '--check-name', dest='check_name',
type=str, required=True, default=None,
help='Health check name.')
parser.add_argument('-g', '--process-group', dest='process_group',
type=str, default=None,
help='Supervisor process group name.')
parser.add_argument('-N', '--process-name', dest='process_name',
type=str, default=None,
help='Supervisor process name. Process group argument is ignored if this ' +
'is passed in')
parser.add_argument('-u', '--url', dest='url', type=str,
help='XML RPC check url', required=False, default=None)
parser.add_argument('-s', '--socket-path', dest='sock_path', type=str,
help='Full path to XML RPC server local socket',
required=False, default=None)
parser.add_argument('-S', '--socket-dir', dest='sock_dir', type=str,
help='Path to XML RPC server socket directory. Socket '
'name will be constructed using process name: '
'<process_name>.sock.',
required=False, default=None)
parser.add_argument('-m', '--method', dest='method', type=str,
help='XML RPC method name. Default is %s' % (
xmlrpc.DEFAULT_METHOD,), required=False,
default=xmlrpc.DEFAULT_METHOD)
parser.add_argument('-U', '--username', dest='username', type=str,
help='XMLRPC check username', required=False,
default=None)
parser.add_argument('-P', '--password', dest='password', type=str,
help='XMLRPC check password', required=False,
default=None)
parser.add_argument(
'-p', '--port', dest='port', type=str,
default=None, required=False,
help='Port to query. Can be integer or regular expression which '
'will be used to extract port from a process name.')
parser.add_argument(
'-r', '--num-retries', dest='num_retries', type=int,
default=xmlrpc.DEFAULT_RETRIES, required=False,
help='Connection retries. Default: %s' % (xmlrpc.DEFAULT_RETRIES,))
return parser
def main():
arg_parser = _make_argument_parser()
args = arg_parser.parse_args()
checks_config = [(xmlrpc.XMLRPCCheck, {'url': args.url,
'sock_path': args.sock_path,
'sock_dir': args.sock_dir,
'num_retries': args.num_retries,
'port': args.port,
'method': args.method,
'username': args.username,
'password': args.password,
})]
return check_runner.CheckRunner(
args.check_name, args.process_group, args.process_name, checks_config).run()
if __name__ == '__main__':
sys.exit(main())
|
mit
|
TeoV/cgrates
|
data/scripts/migrator/dbsmerge_redis.py
|
3
|
2673
|
#!/usr/bin/python
# depends:
# ^ redis # install via easy_install redis
# asserts:
# ^ destination redis is not password protected when connected from source
# redis server (https://github.com/antirez/redis/pull/2507)
# behaviour:
# ^ the script will not overwrite keys on the destination server/database
import redis
from_host = '127.0.0.1'
from_port = 6379
from_db = 11
from_pass = ''
to_host = '127.0.0.1'
to_port = 6379
to_db = 10
to_pass = '' # Not used
keymask = '*'
timeout = 2000
from_redis = redis.Redis(
host=from_host,
port=from_port,
password=from_pass,
db=from_db
)
to_redis = redis.Redis(
host=to_host,
port=to_port,
db=to_db
)
to_keys = to_redis.keys(keymask)
from_keys = from_redis.keys(keymask)
print('Found %d keys on source.' % len(from_keys))
print('Found %d keys on destination.' % len(to_keys))
# keys found
if len(from_keys) > 0:
# same server
if from_host == to_host and from_port == to_port:
print('Migrating on same server...')
i = 0
for key in from_keys:
i += 1
print('Moving key %s (%d of %d)...' % (key, i, len(from_keys)))
from_redis.execute_command('MOVE', key, to_db)
# different servers
else:
print('Migrating between different servers...')
i = 0
for key in from_keys:
i += 1
print('Moving key %s (%d of %d)...' % (key, i, len(from_keys))),
try:
from_redis.execute_command(
'MIGRATE',
to_host,
to_port,
key,
to_db,
timeout
)
except redis.exceptions.ResponseError as e:
if 'ERR Target key name is busy' not in str(e):
raise e
print('Done.')
# done
from_keys_after = from_redis.keys(keymask)
to_keys_after = to_redis.keys(keymask)
print('There are now %d keys on source.' % len(from_keys_after))
print('There are now %d keys on destination.' % len(to_keys_after))
print('%d keys were moved' % (len(to_keys_after) - len(to_keys)))
print('Migration complete.')
# no keys found
else:
print('No keys with keymask %s found in source database' % keymask)
|
gpl-3.0
|
sportorg/pysport
|
sportorg/modules/live/live.py
|
1
|
1390
|
from functools import partial
from threading import Thread
import requests
from sportorg.models.memory import race
from sportorg.modules.live import orgeo
class LiveClient:
@staticmethod
def is_enabled():
obj = race()
live_enabled = obj.get_setting('live_enabled', False)
urls = obj.get_setting('live_urls', [])
return live_enabled and urls
@staticmethod
def get_urls():
obj = race()
urls = obj.get_setting('live_urls', [])
return urls
def send(self, data):
if not self.is_enabled():
return
if not isinstance(data, list):
data = [data]
items = [item.to_dict() for item in data]
urls = self.get_urls()
race_data = race().to_dict()
for url in urls:
func = partial(orgeo.create, requests, url, items, race_data)
Thread(target=func, name='LiveThread').start()
def delete(self, data):
if not self.is_enabled():
return
if not isinstance(data, list):
data = [data]
items = [item.to_dict() for item in data]
urls = self.get_urls()
race_data = race().to_dict()
for url in urls:
func = partial(orgeo.delete, requests, url, items, race_data)
Thread(target=func, name='LiveThread').start()
live_client = LiveClient()
|
gpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.