repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
podhmo/boto | boto/route53/__init__.py | 145 | 3103 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# this is here for backward compatibility
# originally, the Route53Connection class was defined here
from boto.route53.connection import Route53Connection
from boto.regioninfo import RegionInfo, get_regions
class Route53RegionInfo(RegionInfo):
def connect(self, **kw_params):
"""
Connect to this Region's endpoint. Returns an connection
object pointing to the endpoint associated with this region.
You may pass any of the arguments accepted by the connection
class's constructor as keyword arguments and they will be
passed along to the connection object.
:rtype: Connection object
:return: The connection to this regions endpoint
"""
if self.connection_cls:
return self.connection_cls(host=self.endpoint, **kw_params)
def regions():
"""
Get all available regions for the Route53 service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo` instances
"""
regions = get_regions(
'route53',
region_cls=Route53RegionInfo,
connection_cls=Route53Connection
)
# For historical reasons, we had a "universal" endpoint as well.
regions.append(
Route53RegionInfo(
name='universal',
endpoint='route53.amazonaws.com',
connection_cls=Route53Connection
)
)
return regions
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.route53.connection.Route53Connection`.
:type: str
:param region_name: The name of the region to connect to.
:rtype: :class:`boto.route53.connection.Route53Connection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit | -6,313,769,921,509,523,000 | 34.666667 | 75 | 0.702224 | false |
JCBarahona/edX | lms/djangoapps/shoppingcart/migrations/0009_auto__del_coupons__add_courseregistrationcode__add_coupon__chg_field_c.py | 114 | 16498 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Coupons'
db.delete_table('shoppingcart_coupons')
# Adding model 'CourseRegistrationCode'
db.create_table('shoppingcart_courseregistrationcode', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('code', self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255, db_index=True)),
('transaction_group_name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, null=True, blank=True)),
('created_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='created_by_user', to=orm['auth.User'])),
('created_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 7, 1, 0, 0))),
('redeemed_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='redeemed_by_user', null=True, to=orm['auth.User'])),
('redeemed_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 7, 1, 0, 0), null=True)),
))
db.send_create_signal('shoppingcart', ['CourseRegistrationCode'])
# Adding model 'Coupon'
db.create_table('shoppingcart_coupon', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('code', self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255)),
('percentage_discount', self.gf('django.db.models.fields.IntegerField')(default=0)),
('created_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('created_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 7, 1, 0, 0))),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('shoppingcart', ['Coupon'])
# Changing field 'CouponRedemption.coupon'
db.alter_column('shoppingcart_couponredemption', 'coupon_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shoppingcart.Coupon']))
# Deleting field 'OrderItem.discount_price'
db.delete_column('shoppingcart_orderitem', 'discount_price')
# Adding field 'OrderItem.list_price'
db.add_column('shoppingcart_orderitem', 'list_price',
self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=30, decimal_places=2),
keep_default=False)
def backwards(self, orm):
# Adding model 'Coupons'
db.create_table('shoppingcart_coupons', (
('code', self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)),
('percentage_discount', self.gf('django.db.models.fields.IntegerField')(default=0)),
('description', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 6, 24, 0, 0))),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('shoppingcart', ['Coupons'])
# Deleting model 'CourseRegistrationCode'
db.delete_table('shoppingcart_courseregistrationcode')
# Deleting model 'Coupon'
db.delete_table('shoppingcart_coupon')
# Changing field 'CouponRedemption.coupon'
db.alter_column('shoppingcart_couponredemption', 'coupon_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shoppingcart.Coupons']))
# Adding field 'OrderItem.discount_price'
db.add_column('shoppingcart_orderitem', 'discount_price',
self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=30, decimal_places=2),
keep_default=False)
# Deleting field 'OrderItem.list_price'
db.delete_column('shoppingcart_orderitem', 'list_price')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.coupon': {
'Meta': {'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 1, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'shoppingcart.couponredemption': {
'Meta': {'object_name': 'CouponRedemption'},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.courseregistrationcode': {
'Meta': {'object_name': 'CourseRegistrationCode'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 1, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 1, 0, 0)', 'null': 'True'}),
'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'redeemed_by_user'", 'null': 'True', 'to': "orm['auth.User']"}),
'transaction_group_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
| agpl-3.0 | -4,076,909,390,197,027,000 | 75.37963 | 182 | 0.578373 | false |
nhicher/ansible | test/runner/injector/injector.py | 7 | 7490 | #!/usr/bin/env python
"""Interpreter and code coverage injector for use with ansible-test.
The injector serves two main purposes:
1) Control the python interpreter used to run test tools and ansible code.
2) Provide optional code coverage analysis of ansible code.
The injector is executed one of two ways:
1) On the controller via a symbolic link such as ansible or pytest.
This is accomplished by prepending the injector directory to the PATH by ansible-test.
2) As the python interpreter when running ansible modules.
This is only supported when connecting to the local host.
Otherwise set the ANSIBLE_TEST_REMOTE_INTERPRETER environment variable.
It can be empty to auto-detect the python interpreter on the remote host.
If not empty it will be used to set ansible_python_interpreter.
NOTE: Running ansible-test with the --tox option or inside a virtual environment
may prevent the injector from working for tests which use connection
types other than local, or which use become, due to lack of permissions
to access the interpreter for the virtual environment.
"""
from __future__ import absolute_import, print_function
import json
import os
import sys
import pipes
import logging
import getpass
import resource
logger = logging.getLogger('injector') # pylint: disable=locally-disabled, invalid-name
# pylint: disable=locally-disabled, invalid-name
config = None # type: InjectorConfig
class InjectorConfig(object):
"""Mandatory configuration."""
def __init__(self, config_path):
"""Initialize config."""
with open(config_path) as config_fd:
_config = json.load(config_fd)
self.python_interpreter = _config['python_interpreter']
self.coverage_file = _config['coverage_file']
# Read from the environment instead of config since it needs to be changed by integration test scripts.
# It also does not need to flow from the controller to the remote. It is only used on the controller.
self.remote_interpreter = os.environ.get('ANSIBLE_TEST_REMOTE_INTERPRETER', None)
self.arguments = [to_text(c) for c in sys.argv]
def to_text(value):
"""
:type value: str | None
:rtype: str | None
"""
if value is None:
return None
if isinstance(value, bytes):
return value.decode('utf-8')
return u'%s' % value
def main():
"""Main entry point."""
global config # pylint: disable=locally-disabled, global-statement
formatter = logging.Formatter('%(asctime)s %(process)d %(levelname)s %(message)s')
log_name = 'ansible-test-coverage.%s.log' % getpass.getuser()
self_dir = os.path.dirname(os.path.abspath(__file__))
handler = logging.FileHandler(os.path.join('/tmp', log_name))
handler.setFormatter(formatter)
logger.addHandler(handler)
handler = logging.FileHandler(os.path.abspath(os.path.join(self_dir, '..', 'logs', log_name)))
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
try:
logger.debug('Self: %s', __file__)
# to achieve a consistent nofile ulimit, set to 16k here, this can affect performance in subprocess.Popen when
# being called with close_fds=True on Python (8x the time on some environments)
nofile_limit = 16 * 1024
current_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
new_limit = (nofile_limit, nofile_limit)
if current_limit > new_limit:
logger.debug('RLIMIT_NOFILE: %s -> %s', current_limit, new_limit)
resource.setrlimit(resource.RLIMIT_NOFILE, (nofile_limit, nofile_limit))
else:
logger.debug('RLIMIT_NOFILE: %s', current_limit)
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'injector.json')
try:
config = InjectorConfig(config_path)
except IOError:
logger.exception('Error reading config: %s', config_path)
exit('No injector config found. Set ANSIBLE_TEST_REMOTE_INTERPRETER if the test is not connecting to the local host.')
logger.debug('Arguments: %s', ' '.join(pipes.quote(c) for c in config.arguments))
logger.debug('Python interpreter: %s', config.python_interpreter)
logger.debug('Remote interpreter: %s', config.remote_interpreter)
logger.debug('Coverage file: %s', config.coverage_file)
if os.path.basename(__file__) == 'injector.py':
args, env = runner() # code coverage collection is baked into the AnsiballZ wrapper when needed
elif os.path.basename(__file__) == 'python.py':
args, env = python() # run arbitrary python commands using the correct python and with optional code coverage
else:
args, env = injector()
logger.debug('Run command: %s', ' '.join(pipes.quote(c) for c in args))
for key in sorted(env.keys()):
logger.debug('%s=%s', key, env[key])
os.execvpe(args[0], args, env)
except Exception as ex:
logger.fatal(ex)
raise
def python():
"""
:rtype: list[str], dict[str, str]
"""
if config.coverage_file:
args, env = coverage_command()
else:
args, env = [config.python_interpreter], os.environ.copy()
args += config.arguments[1:]
return args, env
def injector():
"""
:rtype: list[str], dict[str, str]
"""
command = os.path.basename(__file__)
executable = find_executable(command)
if config.coverage_file:
args, env = coverage_command()
else:
args, env = [config.python_interpreter], os.environ.copy()
args += [executable]
if command in ('ansible', 'ansible-playbook', 'ansible-pull'):
if config.remote_interpreter is None:
interpreter = os.path.join(os.path.dirname(__file__), 'injector.py')
elif config.remote_interpreter == '':
interpreter = None
else:
interpreter = config.remote_interpreter
if interpreter:
args += ['--extra-vars', 'ansible_python_interpreter=' + interpreter]
args += config.arguments[1:]
return args, env
def runner():
"""
:rtype: list[str], dict[str, str]
"""
args, env = [config.python_interpreter], os.environ.copy()
args += config.arguments[1:]
return args, env
def coverage_command():
"""
:rtype: list[str], dict[str, str]
"""
self_dir = os.path.dirname(os.path.abspath(__file__))
args = [
config.python_interpreter,
'-m',
'coverage.__main__',
'run',
'--rcfile',
os.path.join(self_dir, '.coveragerc'),
]
env = os.environ.copy()
env['COVERAGE_FILE'] = config.coverage_file
return args, env
def find_executable(executable):
"""
:type executable: str
:rtype: str
"""
self = os.path.abspath(__file__)
path = os.environ.get('PATH', os.path.defpath)
seen_dirs = set()
for path_dir in path.split(os.path.pathsep):
if path_dir in seen_dirs:
continue
seen_dirs.add(path_dir)
candidate = os.path.abspath(os.path.join(path_dir, executable))
if candidate == self:
continue
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
return candidate
raise Exception('Executable "%s" not found in path: %s' % (executable, path))
if __name__ == '__main__':
main()
| gpl-3.0 | -4,592,787,728,634,407,400 | 30.603376 | 130 | 0.642056 | false |
DonHilborn/DataGenerator | faker/providers/internet.py | 1 | 4491 | # coding=utf-8
from __future__ import unicode_literals
from . import BaseProvider
import random
from faker.providers.lorem import Provider as Lorem
from faker.utils.decorators import slugify, slugify_domain
class Provider(BaseProvider):
safe_email_tlds = ('org', 'com', 'net')
free_email_domains = ('gmail.com', 'yahoo.com', 'hotmail.com')
tlds = ('com', 'com', 'com', 'com', 'com', 'com', 'biz', 'info', 'net', 'org')
uri_pages = (
'index', 'home', 'search', 'main', 'post', 'homepage', 'category', 'register', 'login', 'faq', 'about', 'terms',
'privacy', 'author')
uri_paths = (
'app', 'main', 'wp-content', 'search', 'category', 'tag', 'categories', 'tags', 'blog', 'posts', 'list', 'explore')
uri_extensions = ('.html', '.html', '.html', '.htm', '.htm', '.php', '.php', '.jsp', '.asp')
user_name_formats = (
'{{last_name}}.{{first_name}}',
'{{first_name}}.{{last_name}}',
'{{first_name}}##',
'?{{last_name}}',
)
email_formats = (
'{{user_name}}@{{domain_name}}',
'{{user_name}}@{{free_email_domain}}',
)
url_formats = (
'http://www.{{domain_name}}/',
'http://{{domain_name}}/',
)
uri_formats = (
'{{url}}',
'{{url}}{{uri_page}}/',
'{{url}}{{uri_page}}{{uri_extension}}',
'{{url}}{{uri_path}}/{{uri_page}}/',
'{{url}}{{uri_path}}/{{uri_page}}{{uri_extension}}',
)
image_placeholder_services = (
'http://placekitten.com/{width}/{height}',
'http://placehold.it/{width}x{height}',
'http://www.lorempixum.com/{width}/{height}',
'http://dummyimage.com/{width}x{height}',
)
def email(self):
pattern = self.random_element(self.email_formats)
return "".join(self.generator.parse(pattern).split(" "))
def safe_email(self):
return self.user_name() + '@example.' + self.random_element(self.safe_email_tlds)
def free_email(self):
return self.user_name() + '@' + self.free_email_domain()
def company_email(self):
return self.user_name() + '@' + self.domain_name()
@classmethod
def free_email_domain(cls):
return cls.random_element(cls.free_email_domains)
@slugify_domain
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self.bothify(self.generator.parse(pattern))
def domain_name(self):
return self.domain_word() + '.' + self.tld()
@slugify
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = company_elements.pop(0)
return company
def tld(self):
return self.random_element(self.tlds)
def url(self):
pattern = self.random_element(self.url_formats)
return self.generator.parse(pattern)
def ipv4(self):
"""
Convert 32-bit integer to dotted IPv4 address.
"""
return ".".join(map(lambda n: str(random.randint(-2147483648, 2147483647) >> n & 0xFF), [24, 16, 8, 0]))
def ipv6(self):
res = [hex(random.randint(0, 65535))[2:].zfill(4) for i in range(0, 8)]
return ":".join(res)
def mac_address(self):
mac = [random.randint(0x00, 0xff) for i in range(0, 6)]
return ":".join(map(lambda x: "%02x" % x, mac))
@classmethod
def uri_page(cls):
return cls.random_element(cls.uri_pages)
@classmethod
def uri_path(cls, deep=None):
deep = deep if deep else random.randint(1, 3)
return "/".join([cls.random_element(cls.uri_paths) for _ in range(0, deep)])
@classmethod
def uri_extension(cls):
return cls.random_element(cls.uri_extensions)
def uri(self):
pattern = self.random_element(self.uri_formats)
return self.generator.parse(pattern)
@classmethod
@slugify
def slug(cls, value=None):
"""
Django algorithm
"""
if value is None:
value = Lorem.text(20)
return value
@classmethod
def image_url(cls, width=None, height=None):
"""
Returns URL to placeholder image
Example: http://placehold.it/640x480
"""
width_ = width or cls.random_int(max=1024)
height_ = height or cls.random_int(max=1024)
placeholder_url = cls.random_element(cls.image_placeholder_services)
return placeholder_url.format(width=width_, height=height_)
| mit | 4,032,403,439,560,295,400 | 31.078571 | 119 | 0.574482 | false |
drewmiller/tornado | docs/conf.py | 9 | 2900 | # Ensure we get the local copy of tornado instead of what's on the standard path
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
import tornado
master_doc = "index"
project = "Tornado"
copyright = "2011, Facebook"
version = release = tornado.version
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
]
primary_domain = 'py'
default_role = 'py:obj'
autodoc_member_order = "bysource"
autoclass_content = "both"
# Without this line sphinx includes a copy of object.__init__'s docstring
# on any class that doesn't define __init__.
# https://bitbucket.org/birkenfeld/sphinx/issue/1337/autoclass_content-both-uses-object__init__
autodoc_docstring_signature = False
coverage_skip_undoc_in_source = True
coverage_ignore_modules = [
"tornado.platform.asyncio",
"tornado.platform.caresresolver",
"tornado.platform.twisted",
]
# I wish this could go in a per-module file...
coverage_ignore_classes = [
# tornado.concurrent
"TracebackFuture",
# tornado.gen
"Runner",
# tornado.ioloop
"PollIOLoop",
# tornado.web
"ChunkedTransferEncoding",
"GZipContentEncoding",
"OutputTransform",
"TemplateModule",
"url",
# tornado.websocket
"WebSocketProtocol",
"WebSocketProtocol13",
"WebSocketProtocol76",
]
coverage_ignore_functions = [
# various modules
"doctests",
"main",
# tornado.escape
# parse_qs_bytes should probably be documented but it's complicated by
# having different implementations between py2 and py3.
"parse_qs_bytes",
]
html_favicon = 'favicon.ico'
latex_documents = [
('documentation', 'tornado.tex', 'Tornado Documentation', 'Facebook', 'manual', False),
]
# HACK: sphinx has limited support for substitutions with the |version|
# variable, but there doesn't appear to be any way to use this in a link
# target.
# http://stackoverflow.com/questions/1227037/substitutions-inside-links-in-rest-sphinx
# The extlink extension can be used to do link substitutions, but it requires a
# portion of the url to be literally contained in the document. Therefore,
# this link must be referenced as :current_tarball:`z`
extlinks = {
'current_tarball': (
'https://pypi.python.org/packages/source/t/tornado/tornado-%s.tar.g%%s' % version,
'tornado-%s.tar.g' % version),
}
intersphinx_mapping = {
'python': ('https://docs.python.org/3.4/', None),
}
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# On RTD we can't import sphinx_rtd_theme, but it will be applied by
# default anyway. This block will use the same theme when building locally
# as on RTD.
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
| apache-2.0 | -840,884,663,241,199,500 | 26.358491 | 95 | 0.691379 | false |
gfyoung/pandas | pandas/tests/indexes/datetimes/test_scalar_compat.py | 2 | 12213 | """
Tests for DatetimeIndex methods behaving like their Timestamp counterparts
"""
from datetime import datetime
import numpy as np
import pytest
from pandas._libs.tslibs import OutOfBoundsDatetime, to_offset
from pandas._libs.tslibs.offsets import INVALID_FREQ_ERR_MSG
import pandas as pd
from pandas import DatetimeIndex, Timestamp, date_range
import pandas._testing as tm
class TestDatetimeIndexOps:
def test_dti_time(self):
rng = date_range("1/1/2000", freq="12min", periods=10)
result = pd.Index(rng).time
expected = [t.time() for t in rng]
assert (result == expected).all()
def test_dti_date(self):
rng = date_range("1/1/2000", freq="12H", periods=10)
result = pd.Index(rng).date
expected = [t.date() for t in rng]
assert (result == expected).all()
@pytest.mark.parametrize("data", [["1400-01-01"], [datetime(1400, 1, 1)]])
def test_dti_date_out_of_range(self, data):
# GH#1475
msg = "Out of bounds nanosecond timestamp: 1400-01-01 00:00:00"
with pytest.raises(OutOfBoundsDatetime, match=msg):
DatetimeIndex(data)
@pytest.mark.parametrize(
"field",
[
"dayofweek",
"day_of_week",
"dayofyear",
"day_of_year",
"quarter",
"days_in_month",
"is_month_start",
"is_month_end",
"is_quarter_start",
"is_quarter_end",
"is_year_start",
"is_year_end",
],
)
def test_dti_timestamp_fields(self, field):
# extra fields from DatetimeIndex like quarter and week
idx = tm.makeDateIndex(100)
expected = getattr(idx, field)[-1]
result = getattr(Timestamp(idx[-1]), field)
assert result == expected
def test_dti_timestamp_isocalendar_fields(self):
idx = tm.makeDateIndex(100)
expected = tuple(idx.isocalendar().iloc[-1].to_list())
result = idx[-1].isocalendar()
assert result == expected
def test_dti_timestamp_freq_fields(self):
# extra fields from DatetimeIndex like quarter and week
idx = tm.makeDateIndex(100)
assert idx.freq == Timestamp(idx[-1], idx.freq).freq
assert idx.freqstr == Timestamp(idx[-1], idx.freq).freqstr
# ----------------------------------------------------------------
# DatetimeIndex.round
def test_round_daily(self):
dti = date_range("20130101 09:10:11", periods=5)
result = dti.round("D")
expected = date_range("20130101", periods=5)
tm.assert_index_equal(result, expected)
dti = dti.tz_localize("UTC").tz_convert("US/Eastern")
result = dti.round("D")
expected = date_range("20130101", periods=5).tz_localize("US/Eastern")
tm.assert_index_equal(result, expected)
result = dti.round("s")
tm.assert_index_equal(result, dti)
@pytest.mark.parametrize(
"freq, error_msg",
[
("Y", "<YearEnd: month=12> is a non-fixed frequency"),
("M", "<MonthEnd> is a non-fixed frequency"),
("foobar", "Invalid frequency: foobar"),
],
)
def test_round_invalid(self, freq, error_msg):
dti = date_range("20130101 09:10:11", periods=5)
dti = dti.tz_localize("UTC").tz_convert("US/Eastern")
with pytest.raises(ValueError, match=error_msg):
dti.round(freq)
def test_round(self, tz_naive_fixture):
tz = tz_naive_fixture
rng = date_range(start="2016-01-01", periods=5, freq="30Min", tz=tz)
elt = rng[1]
expected_rng = DatetimeIndex(
[
Timestamp("2016-01-01 00:00:00", tz=tz, freq="30T"),
Timestamp("2016-01-01 00:00:00", tz=tz, freq="30T"),
Timestamp("2016-01-01 01:00:00", tz=tz, freq="30T"),
Timestamp("2016-01-01 02:00:00", tz=tz, freq="30T"),
Timestamp("2016-01-01 02:00:00", tz=tz, freq="30T"),
]
)
expected_elt = expected_rng[1]
tm.assert_index_equal(rng.round(freq="H"), expected_rng)
assert elt.round(freq="H") == expected_elt
msg = INVALID_FREQ_ERR_MSG
with pytest.raises(ValueError, match=msg):
rng.round(freq="foo")
with pytest.raises(ValueError, match=msg):
elt.round(freq="foo")
msg = "<MonthEnd> is a non-fixed frequency"
with pytest.raises(ValueError, match=msg):
rng.round(freq="M")
with pytest.raises(ValueError, match=msg):
elt.round(freq="M")
# GH#14440 & GH#15578
index = DatetimeIndex(["2016-10-17 12:00:00.0015"], tz=tz)
result = index.round("ms")
expected = DatetimeIndex(["2016-10-17 12:00:00.002000"], tz=tz)
tm.assert_index_equal(result, expected)
for freq in ["us", "ns"]:
tm.assert_index_equal(index, index.round(freq))
index = DatetimeIndex(["2016-10-17 12:00:00.00149"], tz=tz)
result = index.round("ms")
expected = DatetimeIndex(["2016-10-17 12:00:00.001000"], tz=tz)
tm.assert_index_equal(result, expected)
index = DatetimeIndex(["2016-10-17 12:00:00.001501031"])
result = index.round("10ns")
expected = DatetimeIndex(["2016-10-17 12:00:00.001501030"])
tm.assert_index_equal(result, expected)
with tm.assert_produces_warning(False):
ts = "2016-10-17 12:00:00.001501031"
DatetimeIndex([ts]).round("1010ns")
def test_no_rounding_occurs(self, tz_naive_fixture):
# GH 21262
tz = tz_naive_fixture
rng = date_range(start="2016-01-01", periods=5, freq="2Min", tz=tz)
expected_rng = DatetimeIndex(
[
Timestamp("2016-01-01 00:00:00", tz=tz, freq="2T"),
Timestamp("2016-01-01 00:02:00", tz=tz, freq="2T"),
Timestamp("2016-01-01 00:04:00", tz=tz, freq="2T"),
Timestamp("2016-01-01 00:06:00", tz=tz, freq="2T"),
Timestamp("2016-01-01 00:08:00", tz=tz, freq="2T"),
]
)
tm.assert_index_equal(rng.round(freq="2T"), expected_rng)
@pytest.mark.parametrize(
"test_input, rounder, freq, expected",
[
(["2117-01-01 00:00:45"], "floor", "15s", ["2117-01-01 00:00:45"]),
(["2117-01-01 00:00:45"], "ceil", "15s", ["2117-01-01 00:00:45"]),
(
["2117-01-01 00:00:45.000000012"],
"floor",
"10ns",
["2117-01-01 00:00:45.000000010"],
),
(
["1823-01-01 00:00:01.000000012"],
"ceil",
"10ns",
["1823-01-01 00:00:01.000000020"],
),
(["1823-01-01 00:00:01"], "floor", "1s", ["1823-01-01 00:00:01"]),
(["1823-01-01 00:00:01"], "ceil", "1s", ["1823-01-01 00:00:01"]),
(["2018-01-01 00:15:00"], "ceil", "15T", ["2018-01-01 00:15:00"]),
(["2018-01-01 00:15:00"], "floor", "15T", ["2018-01-01 00:15:00"]),
(["1823-01-01 03:00:00"], "ceil", "3H", ["1823-01-01 03:00:00"]),
(["1823-01-01 03:00:00"], "floor", "3H", ["1823-01-01 03:00:00"]),
(
("NaT", "1823-01-01 00:00:01"),
"floor",
"1s",
("NaT", "1823-01-01 00:00:01"),
),
(
("NaT", "1823-01-01 00:00:01"),
"ceil",
"1s",
("NaT", "1823-01-01 00:00:01"),
),
],
)
def test_ceil_floor_edge(self, test_input, rounder, freq, expected):
dt = DatetimeIndex(list(test_input))
func = getattr(dt, rounder)
result = func(freq)
expected = DatetimeIndex(list(expected))
assert expected.equals(result)
@pytest.mark.parametrize(
"start, index_freq, periods",
[("2018-01-01", "12H", 25), ("2018-01-01 0:0:0.124999", "1ns", 1000)],
)
@pytest.mark.parametrize(
"round_freq",
[
"2ns",
"3ns",
"4ns",
"5ns",
"6ns",
"7ns",
"250ns",
"500ns",
"750ns",
"1us",
"19us",
"250us",
"500us",
"750us",
"1s",
"2s",
"3s",
"12H",
"1D",
],
)
def test_round_int64(self, start, index_freq, periods, round_freq):
dt = date_range(start=start, freq=index_freq, periods=periods)
unit = to_offset(round_freq).nanos
# test floor
result = dt.floor(round_freq)
diff = dt.asi8 - result.asi8
mod = result.asi8 % unit
assert (mod == 0).all(), f"floor not a {round_freq} multiple"
assert (0 <= diff).all() and (diff < unit).all(), "floor error"
# test ceil
result = dt.ceil(round_freq)
diff = result.asi8 - dt.asi8
mod = result.asi8 % unit
assert (mod == 0).all(), f"ceil not a {round_freq} multiple"
assert (0 <= diff).all() and (diff < unit).all(), "ceil error"
# test round
result = dt.round(round_freq)
diff = abs(result.asi8 - dt.asi8)
mod = result.asi8 % unit
assert (mod == 0).all(), f"round not a {round_freq} multiple"
assert (diff <= unit // 2).all(), "round error"
if unit % 2 == 0:
assert (
result.asi8[diff == unit // 2] % 2 == 0
).all(), "round half to even error"
# ----------------------------------------------------------------
# DatetimeIndex.normalize
def test_normalize(self):
rng = date_range("1/1/2000 9:30", periods=10, freq="D")
result = rng.normalize()
expected = date_range("1/1/2000", periods=10, freq="D")
tm.assert_index_equal(result, expected)
arr_ns = np.array([1380585623454345752, 1380585612343234312]).astype(
"datetime64[ns]"
)
rng_ns = DatetimeIndex(arr_ns)
rng_ns_normalized = rng_ns.normalize()
arr_ns = np.array([1380585600000000000, 1380585600000000000]).astype(
"datetime64[ns]"
)
expected = DatetimeIndex(arr_ns)
tm.assert_index_equal(rng_ns_normalized, expected)
assert result.is_normalized
assert not rng.is_normalized
def test_normalize_nat(self):
dti = DatetimeIndex([pd.NaT, Timestamp("2018-01-01 01:00:00")])
result = dti.normalize()
expected = DatetimeIndex([pd.NaT, Timestamp("2018-01-01")])
tm.assert_index_equal(result, expected)
class TestDateTimeIndexToJulianDate:
def test_1700(self):
dr = date_range(start=Timestamp("1710-10-01"), periods=5, freq="D")
r1 = pd.Index([x.to_julian_date() for x in dr])
r2 = dr.to_julian_date()
assert isinstance(r2, pd.Float64Index)
tm.assert_index_equal(r1, r2)
def test_2000(self):
dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="D")
r1 = pd.Index([x.to_julian_date() for x in dr])
r2 = dr.to_julian_date()
assert isinstance(r2, pd.Float64Index)
tm.assert_index_equal(r1, r2)
def test_hour(self):
dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="H")
r1 = pd.Index([x.to_julian_date() for x in dr])
r2 = dr.to_julian_date()
assert isinstance(r2, pd.Float64Index)
tm.assert_index_equal(r1, r2)
def test_minute(self):
dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="T")
r1 = pd.Index([x.to_julian_date() for x in dr])
r2 = dr.to_julian_date()
assert isinstance(r2, pd.Float64Index)
tm.assert_index_equal(r1, r2)
def test_second(self):
dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="S")
r1 = pd.Index([x.to_julian_date() for x in dr])
r2 = dr.to_julian_date()
assert isinstance(r2, pd.Float64Index)
tm.assert_index_equal(r1, r2)
| bsd-3-clause | -4,901,913,757,879,301,000 | 34.606414 | 79 | 0.531565 | false |
meredith-digops/ansible | docs/bin/dump_keywords.py | 33 | 2403 | #!/usr/bin/env python
import optparse
import yaml
from jinja2 import Environment, FileSystemLoader
from ansible.playbook import Play
from ansible.playbook.block import Block
from ansible.playbook.role import Role
from ansible.playbook.task import Task
template_file = 'playbooks_keywords.rst.j2'
oblist = {}
clist = []
class_list = [ Play, Role, Block, Task ]
p = optparse.OptionParser(
version='%prog 1.0',
usage='usage: %prog [options]',
description='Generate module documentation from metadata',
)
p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="../templates", help="directory containing Jinja2 templates")
p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files")
p.add_option("-d", "--docs-source", action="store", dest="docs", default=None, help="Source for attribute docs")
(options, args) = p.parse_args()
for aclass in class_list:
aobj = aclass()
name = type(aobj).__name__
if options.docs:
with open(options.docs) as f:
docs = yaml.safe_load(f)
else:
docs = {}
# build ordered list to loop over and dict with attributes
clist.append(name)
oblist[name] = dict((x, aobj.__dict__['_attributes'][x]) for x in aobj.__dict__['_attributes'] if 'private' not in x or not x.private)
# pick up docs if they exist
for a in oblist[name]:
if a in docs:
oblist[name][a] = docs[a]
else:
oblist[name][a] = ' UNDOCUMENTED!! '
# loop is really with_ for users
if name == 'Task':
oblist[name]['with_<lookup_plugin>'] = 'with_ is how loops are defined, it can use any available lookup plugin to generate the item list'
# local_action is implicit with action
if 'action' in oblist[name]:
oblist[name]['local_action'] = 'Same as action but also implies ``delegate_to: localhost``'
# remove unusable (used to be private?)
for nouse in ('loop', 'loop_args'):
if nouse in oblist[name]:
del oblist[name][nouse]
env = Environment(loader=FileSystemLoader(options.template_dir), trim_blocks=True,)
template = env.get_template(template_file)
outputname = options.output_dir + template_file.replace('.j2','')
tempvars = { 'oblist': oblist, 'clist': clist }
with open( outputname, 'w') as f:
f.write(template.render(tempvars))
| gpl-3.0 | -8,022,833,435,978,108,000 | 33.826087 | 145 | 0.660008 | false |
xiaohaidao007/pandoraBox-SDK-mt7620 | staging_dir/host/lib/scons-2.5.0/SCons/Job.py | 3 | 16096 | """SCons.Job
This module defines the Serial and Parallel classes that execute tasks to
complete a build. The Jobs class provides a higher level interface to start,
stop, and wait on jobs.
"""
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Job.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import SCons.compat
import os
import signal
import SCons.Errors
# The default stack size (in kilobytes) of the threads used to execute
# jobs in parallel.
#
# We use a stack size of 256 kilobytes. The default on some platforms
# is too large and prevents us from creating enough threads to fully
# parallelized the build. For example, the default stack size on linux
# is 8 MBytes.
explicit_stack_size = None
default_stack_size = 256
interrupt_msg = 'Build interrupted.'
class InterruptState(object):
def __init__(self):
self.interrupted = False
def set(self):
self.interrupted = True
def __call__(self):
return self.interrupted
class Jobs(object):
"""An instance of this class initializes N jobs, and provides
methods for starting, stopping, and waiting on all N jobs.
"""
def __init__(self, num, taskmaster):
"""
Create 'num' jobs using the given taskmaster.
If 'num' is 1 or less, then a serial job will be used,
otherwise a parallel job with 'num' worker threads will
be used.
The 'num_jobs' attribute will be set to the actual number of jobs
allocated. If more than one job is requested but the Parallel
class can't do it, it gets reset to 1. Wrapping interfaces that
care should check the value of 'num_jobs' after initialization.
"""
self.job = None
if num > 1:
stack_size = explicit_stack_size
if stack_size is None:
stack_size = default_stack_size
try:
self.job = Parallel(taskmaster, num, stack_size)
self.num_jobs = num
except NameError:
pass
if self.job is None:
self.job = Serial(taskmaster)
self.num_jobs = 1
def run(self, postfunc=lambda: None):
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
def were_interrupted(self):
"""Returns whether the jobs were interrupted by a signal."""
return self.job.interrupted()
def _setup_sig_handler(self):
"""Setup an interrupt handler so that SCons can shutdown cleanly in
various conditions:
a) SIGINT: Keyboard interrupt
b) SIGTERM: kill or system shutdown
c) SIGHUP: Controlling shell exiting
We handle all of these cases by stopping the taskmaster. It
turns out that it's very difficult to stop the build process
by throwing asynchronously an exception such as
KeyboardInterrupt. For example, the python Condition
variables (threading.Condition) and queues do not seem to be
asynchronous-exception-safe. It would require adding a whole
bunch of try/finally block and except KeyboardInterrupt all
over the place.
Note also that we have to be careful to handle the case when
SCons forks before executing another process. In that case, we
want the child to exit immediately.
"""
def handler(signum, stack, self=self, parentpid=os.getpid()):
if os.getpid() == parentpid:
self.job.taskmaster.stop()
self.job.interrupted.set()
else:
os._exit(2)
self.old_sigint = signal.signal(signal.SIGINT, handler)
self.old_sigterm = signal.signal(signal.SIGTERM, handler)
try:
self.old_sighup = signal.signal(signal.SIGHUP, handler)
except AttributeError:
pass
def _reset_sig_handler(self):
"""Restore the signal handlers to their previous state (before the
call to _setup_sig_handler()."""
signal.signal(signal.SIGINT, self.old_sigint)
signal.signal(signal.SIGTERM, self.old_sigterm)
try:
signal.signal(signal.SIGHUP, self.old_sighup)
except AttributeError:
pass
class Serial(object):
"""This class is used to execute tasks in series, and is more efficient
than Parallel, but is only appropriate for non-parallel builds. Only
one instance of this class should be in existence at a time.
This class is not thread safe.
"""
def __init__(self, taskmaster):
"""Create a new serial job given a taskmaster.
The taskmaster's next_task() method should return the next task
that needs to be executed, or None if there are no more tasks. The
taskmaster's executed() method will be called for each task when it
is successfully executed, or failed() will be called if it failed to
execute (e.g. execute() raised an exception)."""
self.taskmaster = taskmaster
self.interrupted = InterruptState()
def start(self):
"""Start the job. This will begin pulling tasks from the taskmaster
and executing them, and return when there are no more tasks. If a task
fails to execute (i.e. execute() raises an exception), then the job will
stop."""
while True:
task = self.taskmaster.next_task()
if task is None:
break
try:
task.prepare()
if task.needs_execute():
task.execute()
except:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except:
task.exception_set()
else:
task.exception_set()
# Let the failed() callback function arrange for the
# build to stop if that's appropriate.
task.failed()
else:
task.executed()
task.postprocess()
self.taskmaster.cleanup()
# Trap import failure so that everything in the Job module but the
# Parallel class (and its dependent classes) will work if the interpreter
# doesn't support threads.
try:
import queue
import threading
except ImportError:
pass
else:
class Worker(threading.Thread):
"""A worker thread waits on a task to be posted to its request queue,
dequeues the task, executes it, and posts a tuple including the task
and a boolean indicating whether the task executed successfully. """
def __init__(self, requestQueue, resultsQueue, interrupted):
threading.Thread.__init__(self)
self.setDaemon(1)
self.requestQueue = requestQueue
self.resultsQueue = resultsQueue
self.interrupted = interrupted
self.start()
def run(self):
while True:
task = self.requestQueue.get()
if task is None:
# The "None" value is used as a sentinel by
# ThreadPool.cleanup(). This indicates that there
# are no more tasks, so we should quit.
break
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except:
task.exception_set()
ok = False
else:
ok = True
self.resultsQueue.put((task, ok))
class ThreadPool(object):
"""This class is responsible for spawning and managing worker threads."""
def __init__(self, num, stack_size, interrupted):
"""Create the request and reply queues, and 'num' worker threads.
One must specify the stack size of the worker threads. The
stack size is specified in kilobytes.
"""
self.requestQueue = queue.Queue(0)
self.resultsQueue = queue.Queue(0)
try:
prev_size = threading.stack_size(stack_size*1024)
except AttributeError, e:
# Only print a warning if the stack size has been
# explicitly set.
if not explicit_stack_size is None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError, e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
# Create worker threads
self.workers = []
for _ in range(num):
worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
self.workers.append(worker)
if 'prev_size' in locals():
threading.stack_size(prev_size)
def put(self, task):
"""Put task into request queue."""
self.requestQueue.put(task)
def get(self):
"""Remove and return a result tuple from the results queue."""
return self.resultsQueue.get()
def preparation_failed(self, task):
self.resultsQueue.put((task, False))
def cleanup(self):
"""
Shuts down the thread pool, giving each worker thread a
chance to shut down gracefully.
"""
# For each worker thread, put a sentinel "None" value
# on the requestQueue (indicating that there's no work
# to be done) so that each worker thread will get one and
# terminate gracefully.
for _ in self.workers:
self.requestQueue.put(None)
# Wait for all of the workers to terminate.
#
# If we don't do this, later Python versions (2.4, 2.5) often
# seem to raise exceptions during shutdown. This happens
# in requestQueue.get(), as an assertion failure that
# requestQueue.not_full is notified while not acquired,
# seemingly because the main thread has shut down (or is
# in the process of doing so) while the workers are still
# trying to pull sentinels off the requestQueue.
#
# Normally these terminations should happen fairly quickly,
# but we'll stick a one-second timeout on here just in case
# someone gets hung.
for worker in self.workers:
worker.join(1.0)
self.workers = []
class Parallel(object):
"""This class is used to execute tasks in parallel, and is somewhat
less efficient than Serial, but is appropriate for parallel builds.
This class is thread safe.
"""
def __init__(self, taskmaster, num, stack_size):
"""Create a new parallel job given a taskmaster.
The taskmaster's next_task() method should return the next
task that needs to be executed, or None if there are no more
tasks. The taskmaster's executed() method will be called
for each task when it is successfully executed, or failed()
will be called if the task failed to execute (i.e. execute()
raised an exception).
Note: calls to taskmaster are serialized, but calls to
execute() on distinct tasks are not serialized, because
that is the whole point of parallel jobs: they can execute
multiple tasks simultaneously. """
self.taskmaster = taskmaster
self.interrupted = InterruptState()
self.tp = ThreadPool(num, stack_size, self.interrupted)
self.maxjobs = num
def start(self):
"""Start the job. This will begin pulling tasks from the
taskmaster and executing them, and return when there are no
more tasks. If a task fails to execute (i.e. execute() raises
an exception), then the job will stop."""
jobs = 0
while True:
# Start up as many available tasks as we're
# allowed to.
while jobs < self.maxjobs:
task = self.taskmaster.next_task()
if task is None:
break
try:
# prepare task for execution
task.prepare()
except:
task.exception_set()
task.failed()
task.postprocess()
else:
if task.needs_execute():
# dispatch task
self.tp.put(task)
jobs = jobs + 1
else:
task.executed()
task.postprocess()
if not task and not jobs: break
# Let any/all completed tasks finish up before we go
# back and put the next batch of tasks on the queue.
while True:
task, ok = self.tp.get()
jobs = jobs - 1
if ok:
task.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except:
task.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
task.failed()
task.postprocess()
if self.tp.resultsQueue.empty():
break
self.tp.cleanup()
self.taskmaster.cleanup()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 | -3,971,081,428,687,374,300 | 36.002299 | 98 | 0.573434 | false |
SwainLi/TeamTalk | win-client/3rdParty/src/json/amalgamate.py | 127 | 6807 | """Amalgate json-cpp library sources into a single source and header file.
Requires Python 2.6
Example of invocation (must be invoked from json-cpp top directory):
python amalgate.py
"""
import os
import os.path
import sys
class AmalgamationFile:
def __init__( self, top_dir ):
self.top_dir = top_dir
self.blocks = []
def add_text( self, text ):
if not text.endswith( '\n' ):
text += '\n'
self.blocks.append( text )
def add_file( self, relative_input_path, wrap_in_comment=False ):
def add_marker( prefix ):
self.add_text( '' )
self.add_text( '// ' + '/'*70 )
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
self.add_text( '// ' + '/'*70 )
self.add_text( '' )
add_marker( 'Beginning' )
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
content = f.read()
if wrap_in_comment:
content = '/*\n' + content + '\n*/'
self.add_text( content )
f.close()
add_marker( 'End' )
self.add_text( '\n\n\n\n' )
def get_value( self ):
return ''.join( self.blocks ).replace('\r\n','\n')
def write_to( self, output_path ):
output_dir = os.path.dirname( output_path )
if output_dir and not os.path.isdir( output_dir ):
os.makedirs( output_dir )
f = open( output_path, 'wb' )
f.write( self.get_value() )
f.close()
def amalgamate_source( source_top_dir=None,
target_source_path=None,
header_include_path=None ):
"""Produces amalgated source.
Parameters:
source_top_dir: top-directory
target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path.
"""
print 'Amalgating header...'
header = AmalgamationFile( source_top_dir )
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
header.add_file( 'LICENSE', wrap_in_comment=True )
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
header.add_text( '/// If defined, indicates that the source file is amalgated' )
header.add_text( '/// to prevent private header inclusion.' )
header.add_text( '#define JSON_IS_AMALGAMATION' )
#header.add_file( 'include/json/version.h' )
header.add_file( 'include/json/config.h' )
header.add_file( 'include/json/forwards.h' )
header.add_file( 'include/json/features.h' )
header.add_file( 'include/json/value.h' )
header.add_file( 'include/json/reader.h' )
header.add_file( 'include/json/writer.h' )
header.add_file( 'include/json/assertions.h' )
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
print 'Writing amalgated header to %r' % target_header_path
header.write_to( target_header_path )
base, ext = os.path.splitext( header_include_path )
forward_header_include_path = base + '-forwards' + ext
print 'Amalgating forward header...'
header = AmalgamationFile( source_top_dir )
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
header.add_file( 'LICENSE', wrap_in_comment=True )
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
header.add_text( '/// If defined, indicates that the source file is amalgated' )
header.add_text( '/// to prevent private header inclusion.' )
header.add_text( '#define JSON_IS_AMALGAMATION' )
header.add_file( 'include/json/config.h' )
header.add_file( 'include/json/forwards.h' )
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
forward_header_include_path )
print 'Writing amalgated forward header to %r' % target_forward_header_path
header.write_to( target_forward_header_path )
print 'Amalgating source...'
source = AmalgamationFile( source_top_dir )
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
source.add_text( '/// It is intented to be used with #include "%s"' % target_source_path )
source.add_file( 'LICENSE', wrap_in_comment=True )
source.add_text( '' )
source.add_text( '#include "%s"' % header_include_path )
source.add_text( '' )
lib_json = 'src/lib_json'
source.add_file( os.path.join(lib_json, 'json_tool.h') )
source.add_file( os.path.join(lib_json, 'json_reader.cpp') )
source.add_file( os.path.join(lib_json, 'json_batchallocator.h') )
source.add_file( os.path.join(lib_json, 'json_valueiterator.inl') )
source.add_file( os.path.join(lib_json, 'json_value.cpp') )
source.add_file( os.path.join(lib_json, 'json_writer.cpp') )
print 'Writing amalgated source to %r' % target_source_path
source.write_to( target_source_path )
def main():
usage = """%prog [options]
Generate a single amalgated source and header file from the sources.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
help="""Output .cpp source path. [Default: %default]""")
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
help="""Source top-directory. [Default: %default]""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
msg = amalgamate_source( source_top_dir=options.top_dir,
target_source_path=options.target_source_path,
header_include_path=options.header_include_path )
if msg:
sys.stderr.write( msg + '\n' )
sys.exit( 1 )
else:
print 'Source succesfully amalagated'
if __name__ == '__main__':
main()
| apache-2.0 | 8,084,577,438,781,655,000 | 44.38 | 119 | 0.627736 | false |
eesatfan/openpli-enigma2 | lib/python/Tools/NumericalTextInput.py | 16 | 3334 | # -*- coding: UTF-8 -*-
from enigma import eTimer
from Components.Language import language
# Dict languageCode -> array of strings
MAP_SEARCH = (
u"%_0",
u" 1",
u"abc2",
u"def3",
u"ghi4",
u"jkl5",
u"mno6",
u"pqrs7",
u"tuv8",
u"wxyz9",
)
MAP_SEARCH_UPCASE = (
U"0%_",
U"1 ",
U"ABC2",
U"DEF3",
U"GHI4",
U"JKL5",
U"MNO6",
U"PQRS7",
U"TUV8",
U"WXYZ9",
)
MAP_DEFAULT = (
u"0,?!&@=*'+\"()$~%",
u" 1.:;/-_",
u"abc2ABC",
u"def3DEF",
u"ghi4GHI",
u"jkl5JKL",
u"mno6MNO",
u"pqrs7PQRS",
u"tuv8TUV",
u"wxyz9WXYZ",
)
MAP_DE = (
u"0,?!&@=*'+\"()$~%",
u" 1.:;/-_",
u"abcä2ABCÄ",
u"def3DEF",
u"ghi4GHI",
u"jkl5JKL",
u"mnoö6MNOÖ",
u"pqrsß7PQRSß",
u"tuvü8TUVÜ",
u"wxyz9WXYZ",
)
MAP_ES = (
u"0,?!&@=*'+\"()$~%",
u" 1.:;/-_",
u"abcáà2ABCÁÀ",
u"deéèf3DEFÉÈ",
u"ghiíì4GHIÍÌ",
u"jkl5JKL",
u"mnñoóò6MNÑOÓÒ",
u"pqrs7PQRS",
u"tuvúù8TUVÚÙ",
u"wxyz9WXYZ",
)
MAP_SE = (
u"0,?!&@=*'+\"()$~%",
u" 1.:;/-_",
u"abcåä2ABCÅÄ",
u"defé3DEFÉ",
u"ghi4GHI",
u"jkl5JKL",
u"mnoö6MNOÖ",
u"pqrs7PQRS",
u"tuv8TUV",
u"wxyz9WXYZ",
)
MAP_CZ = (
u"0,?'+\"()@$!=&*%",
u" 1.:;/-_",
u"abc2áäčABCÁÄČ",
u"def3ďéěDEFĎÉĚ",
u"ghi4íGHIÍ",
u"jkl5ľĺJKLĽĹ",
u"mno6ňóöôMNOŇÓÖÔ",
u"pqrs7řŕšPQRSŘŔŠ",
u"tuv8ťúůüTUVŤÚŮÜ",
u"wxyz9ýžWXYZÝŽ",
)
MAP_PL = (
u"0,?'+\"()@$!=&*%",
u" 1.:;/-_",
u"abcąć2ABCĄĆ",
u"defę3DEFĘ",
u"ghi4GHI",
u"jklł5JKLŁ",
u"mnońó6MNOŃÓ",
u"pqrsś7PQRSŚ",
u"tuv8TUV",
u"wxyzźż9WXYZŹŻ",
)
MAP_RU = (
u"0,?'+\"()@$!=&*%",
u" 1.:;/-_",
u"abcабвг2ABCАБВГ",
u"defдежз3DEFДЕЖЗ",
u"ghiийкл4GHIИЙКЛ",
u"jklмноп5JKLМНОП",
u"mnoрсту6MNOРСТУ",
u"pqrsфхцч7PQRSФХЦЧ",
u"tuvшщьы8TUVШЩЬЫ",
u"wxyzъэюя9WXYZЪЭЮЯ",
)
MAPPINGS = {
'de_DE': MAP_DE,
'es_ES': MAP_ES,
'sv_SE': MAP_SE,
'fi_FI': MAP_SE,
'cs_CZ': MAP_CZ,
'sk_SK': MAP_CZ,
'pl_PL': MAP_PL,
'ru_RU': MAP_RU,
}
class NumericalTextInput:
def __init__(self, nextFunc=None, handleTimeout = True, search = False, mapping = None):
self.useableChars=None
self.nextFunction=nextFunc
if handleTimeout:
self.timer = eTimer()
self.timer.callback.append(self.timeout)
else:
self.timer = None
self.lastKey = -1
self.pos = -1
if mapping is not None:
self.mapping = mapping
elif search:
self.mapping = MAP_SEARCH
else:
self.mapping = MAPPINGS.get(language.getLanguage(), MAP_DEFAULT)
def setUseableChars(self, useable):
self.useableChars = unicode(useable)
def getKey(self, num):
cnt=0
if self.lastKey != num:
if self.lastKey != -1:
self.nextChar()
self.lastKey = num
self.pos = -1
if self.timer is not None:
self.timer.start(1000, True)
while True:
self.pos += 1
if len(self.mapping[num]) <= self.pos:
self.pos = 0
if self.useableChars:
pos = self.useableChars.find(self.mapping[num][self.pos])
if pos == -1:
cnt += 1
if cnt < len(self.mapping[num]):
continue
else:
return None
break
return self.mapping[num][self.pos]
def nextKey(self):
if self.timer is not None:
self.timer.stop()
self.lastKey = -1
def nextChar(self):
self.nextKey()
if self.nextFunction:
self.nextFunction()
def timeout(self):
if self.lastKey != -1:
self.nextChar()
| gpl-2.0 | 8,630,109,322,025,628,000 | 16.417582 | 89 | 0.598107 | false |
glove747/liberty-neutron | neutron/tests/unit/extensions/test_external_net.py | 6 | 7741 | # Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_log import log as logging
from oslo_utils import uuidutils
import testtools
from webob import exc
from neutron import context
from neutron.db import models_v2
from neutron.extensions import external_net as external_net
from neutron import manager
from neutron.tests.unit.api.v2 import test_base
from neutron.tests.unit.db import test_db_base_plugin_v2
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
_get_path = test_base._get_path
class ExtNetTestExtensionManager(object):
def get_resources(self):
return []
def get_actions(self):
return []
def get_request_extensions(self):
return []
class ExtNetDBTestCase(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
def _create_network(self, fmt, name, admin_state_up, **kwargs):
"""Override the routine for allowing the router:external attribute."""
# attributes containing a colon should be passed with
# a double underscore
new_args = dict(zip(map(lambda x: x.replace('__', ':'), kwargs),
kwargs.values()))
arg_list = new_args.pop('arg_list', ()) + (external_net.EXTERNAL,)
return super(ExtNetDBTestCase, self)._create_network(
fmt, name, admin_state_up, arg_list=arg_list, **new_args)
def setUp(self):
plugin = 'neutron.tests.unit.extensions.test_l3.TestNoL3NatPlugin'
ext_mgr = ExtNetTestExtensionManager()
super(ExtNetDBTestCase, self).setUp(plugin=plugin, ext_mgr=ext_mgr)
def _set_net_external(self, net_id):
self._update('networks', net_id,
{'network': {external_net.EXTERNAL: True}})
def test_list_nets_external(self):
with self.network() as n1:
self._set_net_external(n1['network']['id'])
with self.network():
body = self._list('networks')
self.assertEqual(len(body['networks']), 2)
body = self._list('networks',
query_params="%s=True" %
external_net.EXTERNAL)
self.assertEqual(len(body['networks']), 1)
body = self._list('networks',
query_params="%s=False" %
external_net.EXTERNAL)
self.assertEqual(len(body['networks']), 1)
def test_list_nets_external_pagination(self):
if self._skip_native_pagination:
self.skipTest("Skip test for not implemented pagination feature")
with self.network(name='net1') as n1, self.network(name='net3') as n3:
self._set_net_external(n1['network']['id'])
self._set_net_external(n3['network']['id'])
with self.network(name='net2') as n2:
self._test_list_with_pagination(
'network', (n1, n3), ('name', 'asc'), 1, 3,
query_params='router:external=True')
self._test_list_with_pagination(
'network', (n2, ), ('name', 'asc'), 1, 2,
query_params='router:external=False')
def test_get_network_succeeds_without_filter(self):
plugin = manager.NeutronManager.get_plugin()
ctx = context.Context(None, None, is_admin=True)
result = plugin.get_networks(ctx, filters=None)
self.assertEqual(result, [])
def test_update_network_set_external_non_admin_fails(self):
# Assert that a non-admin user cannot update the
# router:external attribute
with self.network(tenant_id='noadmin') as network:
data = {'network': {'router:external': True}}
req = self.new_update_request('networks',
data,
network['network']['id'])
req.environ['neutron.context'] = context.Context('', 'noadmin')
res = req.get_response(self.api)
self.assertEqual(exc.HTTPForbidden.code, res.status_int)
def test_network_filter_hook_admin_context(self):
plugin = manager.NeutronManager.get_plugin()
ctx = context.Context(None, None, is_admin=True)
model = models_v2.Network
conditions = plugin._network_filter_hook(ctx, model, [])
self.assertEqual(conditions, [])
def test_network_filter_hook_nonadmin_context(self):
plugin = manager.NeutronManager.get_plugin()
ctx = context.Context('edinson', 'cavani')
model = models_v2.Network
txt = "externalnetworks.network_id IS NOT NULL"
conditions = plugin._network_filter_hook(ctx, model, [])
self.assertEqual(conditions.__str__(), txt)
# Try to concatenate conditions
conditions = plugin._network_filter_hook(ctx, model, conditions)
self.assertEqual(conditions.__str__(), "%s OR %s" % (txt, txt))
def test_create_port_external_network_non_admin_fails(self):
with self.network(router__external=True) as ext_net:
with self.subnet(network=ext_net) as ext_subnet:
with testtools.ExpectedException(
exc.HTTPClientError) as ctx_manager:
with self.port(subnet=ext_subnet,
set_context='True',
tenant_id='noadmin'):
pass
self.assertEqual(ctx_manager.exception.code, 403)
def test_create_port_external_network_admin_succeeds(self):
with self.network(router__external=True) as ext_net:
with self.subnet(network=ext_net) as ext_subnet:
with self.port(subnet=ext_subnet) as port:
self.assertEqual(port['port']['network_id'],
ext_net['network']['id'])
def test_create_external_network_non_admin_fails(self):
with testtools.ExpectedException(exc.HTTPClientError) as ctx_manager:
with self.network(router__external=True,
set_context='True',
tenant_id='noadmin'):
pass
self.assertEqual(ctx_manager.exception.code, 403)
def test_create_external_network_admin_succeeds(self):
with self.network(router__external=True) as ext_net:
self.assertTrue(ext_net['network'][external_net.EXTERNAL])
def test_delete_network_check_disassociated_floatingips(self):
with mock.patch.object(manager.NeutronManager,
'get_service_plugins') as srv_plugins:
l3_mock = mock.Mock()
srv_plugins.return_value = {'L3_ROUTER_NAT': l3_mock}
with self.network() as net:
req = self.new_delete_request('networks', net['network']['id'])
res = req.get_response(self.api)
self.assertEqual(res.status_int, exc.HTTPNoContent.code)
(l3_mock.delete_disassociated_floatingips
.assert_called_once_with(mock.ANY, net['network']['id']))
| apache-2.0 | 7,486,398,218,559,680,000 | 42.982955 | 79 | 0.594884 | false |
lgarren/spack | lib/spack/external/_pytest/recwarn.py | 10 | 7361 | """ recording warnings during test function execution. """
import inspect
import _pytest._code
import py
import sys
import warnings
import pytest
@pytest.yield_fixture
def recwarn(request):
"""Return a WarningsRecorder instance that provides these methods:
* ``pop(category=None)``: return last warning matching the category.
* ``clear()``: clear list of warnings
See http://docs.python.org/library/warnings.html for information
on warning categories.
"""
wrec = WarningsRecorder()
with wrec:
warnings.simplefilter('default')
yield wrec
def pytest_namespace():
return {'deprecated_call': deprecated_call,
'warns': warns}
def deprecated_call(func=None, *args, **kwargs):
""" assert that calling ``func(*args, **kwargs)`` triggers a
``DeprecationWarning`` or ``PendingDeprecationWarning``.
This function can be used as a context manager::
>>> import warnings
>>> def api_call_v2():
... warnings.warn('use v3 of this api', DeprecationWarning)
... return 200
>>> with deprecated_call():
... assert api_call_v2() == 200
Note: we cannot use WarningsRecorder here because it is still subject
to the mechanism that prevents warnings of the same type from being
triggered twice for the same module. See #1190.
"""
if not func:
return WarningsChecker(expected_warning=DeprecationWarning)
categories = []
def warn_explicit(message, category, *args, **kwargs):
categories.append(category)
old_warn_explicit(message, category, *args, **kwargs)
def warn(message, category=None, *args, **kwargs):
if isinstance(message, Warning):
categories.append(message.__class__)
else:
categories.append(category)
old_warn(message, category, *args, **kwargs)
old_warn = warnings.warn
old_warn_explicit = warnings.warn_explicit
warnings.warn_explicit = warn_explicit
warnings.warn = warn
try:
ret = func(*args, **kwargs)
finally:
warnings.warn_explicit = old_warn_explicit
warnings.warn = old_warn
deprecation_categories = (DeprecationWarning, PendingDeprecationWarning)
if not any(issubclass(c, deprecation_categories) for c in categories):
__tracebackhide__ = True
raise AssertionError("%r did not produce DeprecationWarning" % (func,))
return ret
def warns(expected_warning, *args, **kwargs):
"""Assert that code raises a particular class of warning.
Specifically, the input @expected_warning can be a warning class or
tuple of warning classes, and the code must return that warning
(if a single class) or one of those warnings (if a tuple).
This helper produces a list of ``warnings.WarningMessage`` objects,
one for each warning raised.
This function can be used as a context manager, or any of the other ways
``pytest.raises`` can be used::
>>> with warns(RuntimeWarning):
... warnings.warn("my warning", RuntimeWarning)
"""
wcheck = WarningsChecker(expected_warning)
if not args:
return wcheck
elif isinstance(args[0], str):
code, = args
assert isinstance(code, str)
frame = sys._getframe(1)
loc = frame.f_locals.copy()
loc.update(kwargs)
with wcheck:
code = _pytest._code.Source(code).compile()
py.builtin.exec_(code, frame.f_globals, loc)
else:
func = args[0]
with wcheck:
return func(*args[1:], **kwargs)
class RecordedWarning(object):
def __init__(self, message, category, filename, lineno, file, line):
self.message = message
self.category = category
self.filename = filename
self.lineno = lineno
self.file = file
self.line = line
class WarningsRecorder(object):
"""A context manager to record raised warnings.
Adapted from `warnings.catch_warnings`.
"""
def __init__(self, module=None):
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
self._list = []
@property
def list(self):
"""The list of recorded warnings."""
return self._list
def __getitem__(self, i):
"""Get a recorded warning by index."""
return self._list[i]
def __iter__(self):
"""Iterate through the recorded warnings."""
return iter(self._list)
def __len__(self):
"""The number of recorded warnings."""
return len(self._list)
def pop(self, cls=Warning):
"""Pop the first recorded warning, raise exception if not exists."""
for i, w in enumerate(self._list):
if issubclass(w.category, cls):
return self._list.pop(i)
__tracebackhide__ = True
raise AssertionError("%r not found in warning list" % cls)
def clear(self):
"""Clear the list of recorded warnings."""
self._list[:] = []
def __enter__(self):
if self._entered:
__tracebackhide__ = True
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
def showwarning(message, category, filename, lineno,
file=None, line=None):
self._list.append(RecordedWarning(
message, category, filename, lineno, file, line))
# still perform old showwarning functionality
self._showwarning(
message, category, filename, lineno, file=file, line=line)
self._module.showwarning = showwarning
# allow the same warning to be raised more than once
self._module.simplefilter('always')
return self
def __exit__(self, *exc_info):
if not self._entered:
__tracebackhide__ = True
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
class WarningsChecker(WarningsRecorder):
def __init__(self, expected_warning=None, module=None):
super(WarningsChecker, self).__init__(module=module)
msg = ("exceptions must be old-style classes or "
"derived from Warning, not %s")
if isinstance(expected_warning, tuple):
for exc in expected_warning:
if not inspect.isclass(exc):
raise TypeError(msg % type(exc))
elif inspect.isclass(expected_warning):
expected_warning = (expected_warning,)
elif expected_warning is not None:
raise TypeError(msg % type(expected_warning))
self.expected_warning = expected_warning
def __exit__(self, *exc_info):
super(WarningsChecker, self).__exit__(*exc_info)
# only check if we're not currently handling an exception
if all(a is None for a in exc_info):
if self.expected_warning is not None:
if not any(r.category in self.expected_warning for r in self):
__tracebackhide__ = True
pytest.fail("DID NOT WARN")
| lgpl-2.1 | -3,810,861,104,813,229,600 | 31.570796 | 79 | 0.616764 | false |
jjanssen/django-cms-patches | cms/admin/useradmin.py | 2 | 2246 | from django.conf import settings
from cms.admin.forms import PageUserForm, PageUserGroupForm
from cms.admin.permissionadmin import GenericCmsPermissionAdmin
from cms.exceptions import NoPermissionsException
from cms.models import PageUser, PageUserGroup
from cms.utils.permissions import get_subordinate_users
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext as _
class PageUserAdmin(UserAdmin, GenericCmsPermissionAdmin):
form = PageUserForm
model = PageUser
list_display = ('username', 'email', 'first_name', 'last_name', 'created_by')
# get_fieldsets method may add fieldsets depending on user
fieldsets = [
(None, {'fields': ('username', ('password1', 'password2'), 'notify_user')}),
(_('User details'), {'fields': (('first_name', 'last_name'), 'email')}),
(_('Groups'), {'fields': ('groups',)}),
]
def get_fieldsets(self, request, obj=None):
fieldsets = self.update_permission_fieldsets(request, obj)
if not '/add' in request.path:
fieldsets[0] = (None, {'fields': ('username', 'notify_user')})
fieldsets.append((_('Password'), {'fields': ('password1', 'password2'), 'classes': ('collapse',)}))
return fieldsets
def queryset(self, request):
qs = super(PageUserAdmin, self).queryset(request)
try:
user_id_set = get_subordinate_users(request.user).values_list('id', flat=True)
return qs.filter(pk__in=user_id_set)
except NoPermissionsException:
return self.model.objects.get_empty_query_set()
def add_view(self, request):
return super(UserAdmin, self).add_view(request)
class PageUserGroupAdmin(admin.ModelAdmin, GenericCmsPermissionAdmin):
form = PageUserGroupForm
list_display = ('name', 'created_by')
fieldsets = [
(None, {'fields': ('name',)}),
]
def get_fieldsets(self, request, obj=None):
return self.update_permission_fieldsets(request, obj)
if settings.CMS_PERMISSION:
admin.site.register(PageUser, PageUserAdmin)
admin.site.register(PageUserGroup, PageUserGroupAdmin) | bsd-3-clause | 9,025,991,281,404,371,000 | 37.741379 | 111 | 0.660285 | false |
UniMOOC/AAClassroom | modules/dashboard/course_settings.py | 5 | 15703 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes supporting updates to basic course settings."""
__author__ = 'Abhinav Khandelwal ([email protected])'
import cgi
import urllib
from common import schema_fields
from controllers.utils import ApplicationHandler
from controllers.utils import BaseRESTHandler
from controllers.utils import XsrfTokenManager
from models import courses
from models import models
from models import roles
from models import transforms
from modules.dashboard import filer
from modules.dashboard import messages
from modules.oeditor import oeditor
class CourseSettingsRights(object):
"""Manages view/edit rights for files."""
@classmethod
def can_view(cls, handler):
return roles.Roles.is_course_admin(handler.app_context)
@classmethod
def can_edit(cls, handler):
return roles.Roles.is_course_admin(handler.app_context)
@classmethod
def can_delete(cls, handler):
return cls.can_edit(handler)
@classmethod
def can_add(cls, handler):
return cls.can_edit(handler)
class CourseSettingsHandler(ApplicationHandler):
"""Course settings handler."""
EXTRA_CSS_FILES = []
EXTRA_JS_FILES = []
ADDITIONAL_DIRS = []
def post_course_availability(self):
course = self.get_course()
settings = course.get_environ(self.app_context)
availability = self.request.get('availability') == 'True'
settings['course']['now_available'] = availability
course.save_settings(settings)
self.redirect('/dashboard')
def post_course_browsability(self):
course = self.get_course()
settings = course.get_environ(self.app_context)
browsability = self.request.get('browsability') == 'True'
settings['course']['browsable'] = browsability
course.save_settings(settings)
self.redirect('/dashboard')
def post_edit_course_settings(self):
"""Handles editing of course.yaml."""
filer.create_course_file_if_not_exists(self)
extra_args = {}
for name in ('section_names', 'tab', 'tab_title'):
value = self.request.get(name)
if value:
extra_args[name] = value
self.redirect(self.get_action_url(
'edit_basic_settings', key='/course.yaml', extra_args=extra_args))
def get_edit_basic_settings(self):
"""Shows editor for course.yaml."""
key = self.request.get('key')
# The editor for all course settings is getting rather large. Here,
# prune out all sections except the one named. Names can name either
# entire sub-registries, or a single item. E.g., "course" selects all
# items under the 'course' sub-registry, while
# "base:before_head_tag_ends" selects just that one field.
registry = self.get_course().create_settings_schema()
section_names = urllib.unquote(self.request.get('section_names'))
if section_names:
registry = registry.clone_only_items_named(section_names.split(','))
tab = self.request.get('tab')
exit_url = self.canonicalize_url('/dashboard?action=settings&tab=%s' %
tab)
rest_url = self.canonicalize_url(CourseSettingsRESTHandler.URI)
form_html = oeditor.ObjectEditor.get_html_for(
self, registry.get_json_schema(), registry.get_schema_dict(),
key, rest_url, exit_url, extra_css_files=self.EXTRA_CSS_FILES,
extra_js_files=self.EXTRA_JS_FILES,
additional_dirs=self.ADDITIONAL_DIRS,
required_modules=CourseSettingsRESTHandler.REQUIRED_MODULES)
template_values = {
'page_title': self.format_title(
'Settings > %s' %
urllib.unquote(self.request.get('tab_title'))),
'page_description': messages.EDIT_SETTINGS_DESCRIPTION,
'main_content': form_html,
}
self.render_page(template_values, in_action='settings')
class CourseYamlRESTHandler(BaseRESTHandler):
"""Common base for REST handlers in this file."""
def get_course_dict(self):
return self.get_course().get_environ(self.app_context)
def get(self):
"""Handles REST GET verb and returns an object as JSON payload."""
assert self.app_context.is_editable_fs()
key = self.request.get('key')
if not CourseSettingsRights.can_view(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
# Load data if possible.
fs = self.app_context.fs.impl
filename = fs.physical_to_logical('/course.yaml')
try:
stream = fs.get(filename)
except: # pylint: disable=bare-except
stream = None
if not stream:
transforms.send_json_response(
self, 404, 'Object not found.', {'key': key})
return
# Prepare data.
json_payload = self.process_get()
transforms.send_json_response(
self, 200, 'Success.',
payload_dict=json_payload,
xsrf_token=XsrfTokenManager.create_xsrf_token(self.XSRF_ACTION))
def put(self):
"""Handles REST PUT verb with JSON payload."""
assert self.app_context.is_editable_fs()
request_param = self.request.get('request')
if not request_param:
transforms.send_json_response(
self, 400, 'Missing "request" parameter.')
return
try:
request = transforms.loads(request_param)
except ValueError:
transforms.send_json_response(
self, 400, 'Malformed "request" parameter.')
return
key = request.get('key')
if not key:
transforms.send_json_response(
self, 400, 'Request missing "key" parameter.')
return
payload_param = request.get('payload')
if not payload_param:
transforms.send_json_response(
self, 400, 'Request missing "payload" parameter.')
return
try:
payload = transforms.loads(payload_param)
except ValueError:
transforms.send_json_response(
self, 400, 'Malformed "payload" parameter.')
return
if not self.assert_xsrf_token_or_fail(
request, self.XSRF_ACTION, {'key': key}):
return
if not CourseSettingsRights.can_edit(self):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
request_data = self.process_put(request, payload)
if request_data:
course_settings = courses.deep_dict_merge(
request_data, self.get_course_dict())
if not self.get_course().save_settings(course_settings):
transforms.send_json_response(self, 412, 'Validation error.')
transforms.send_json_response(self, 200, 'Saved.')
def delete(self):
"""Handles REST DELETE verb with JSON payload."""
key = self.request.get('key')
if not self.assert_xsrf_token_or_fail(
self.request, self.XSRF_ACTION, {'key': key}):
return
if (not CourseSettingsRights.can_delete(self) or
not self.is_deletion_allowed()):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
entity = self.process_delete()
if self.get_course().save_settings(entity):
transforms.send_json_response(self, 200, 'Deleted.')
class CourseSettingsRESTHandler(CourseYamlRESTHandler):
"""Provides REST API for a file."""
REQUIRED_MODULES = [
'inputex-date', 'inputex-string', 'inputex-textarea', 'inputex-url',
'inputex-checkbox', 'inputex-select', 'inputex-uneditable', 'gcb-rte']
URI = '/rest/course/settings'
XSRF_ACTION = 'basic-course-settings-put'
def get_group_id(self, email):
if not email or '@googlegroups.com' not in email:
return None
return email.split('@')[0]
def get_groups_web_url(self, email):
group_id = self.get_group_id(email)
if not group_id:
return None
return 'https://groups.google.com/group/' + group_id
def get_groups_embed_url(self, email):
group_id = self.get_group_id(email)
if not group_id:
return None
return 'https://groups.google.com/forum/embed/?place=forum/' + group_id
def process_get(self):
entity = {}
schema = self.get_course().create_settings_schema()
schema.convert_entity_to_json_entity(
self.get_course_dict(), entity)
json_payload = transforms.dict_to_json(
entity, schema.get_json_schema_dict())
return json_payload
def _process_course_data(self, course_data):
if 'forum_email' in course_data:
forum_email = course_data['forum_email']
forum_web_url = self.get_groups_web_url(forum_email)
if forum_web_url:
course_data['forum_url'] = forum_web_url
forum_web_url = self.get_groups_embed_url(forum_email)
if forum_web_url:
course_data['forum_embed_url'] = forum_web_url
if 'announcement_list_email' in course_data:
announcement_email = course_data['announcement_list_email']
announcement_web_url = self.get_groups_web_url(
announcement_email)
if announcement_web_url:
course_data['announcement_list_url'] = announcement_web_url
def _process_extra_locales(self, extra_locales):
"""Make sure each locale has a label to go along."""
existing = set([
label.title for label in models.LabelDAO.get_all_of_type(
models.LabelDTO.LABEL_TYPE_LOCALE)])
course_locale = self.app_context.default_locale
for extra_locale in extra_locales + [{'locale': course_locale}]:
locale = extra_locale['locale']
if locale in existing:
continue
models.LabelDAO.save(models.LabelDTO(
None, {'title': locale,
'version': '1.0',
'description': '[%s] locale' % locale,
'type': models.LabelDTO.LABEL_TYPE_LOCALE}))
def process_put(self, request, payload):
errors = []
request_data = {}
schema = self.get_course().create_settings_schema()
schema.convert_json_to_entity(payload, request_data)
schema.validate(request_data, errors)
if errors:
transforms.send_json_response(
self, 400, 'Invalid data: \n' + '\n'.join(errors))
return
if 'extra_locales' in request_data:
self._process_extra_locales(request_data['extra_locales'])
if 'course' in request_data:
self._process_course_data(request_data['course'])
return request_data
def is_deletion_allowed(self):
return False
class HtmlHookHandler(ApplicationHandler):
"""Set up for OEditor manipulation of HTML hook contents.
A separate handler and REST handler is required for hook contents,
since the set of hooks is not statically known. Users are free to add
whatever hooks they want where-ever they want with fairly arbitrary
names. This class and its companion REST class deal with persisting the
hook values into the course.yaml settings.
"""
def post_edit_html_hook(self):
filer.create_course_file_if_not_exists(self)
self.redirect(self.get_action_url(
'edit_html_hook', key=self.request.get('html_hook')))
def get_edit_html_hook(self):
key = self.request.get('key')
registry = HtmlHookRESTHandler.REGISTRY
exit_url = self.canonicalize_url(self.request.referer)
rest_url = self.canonicalize_url(HtmlHookRESTHandler.URI)
delete_url = '%s?%s' % (
self.canonicalize_url(HtmlHookRESTHandler.URI),
urllib.urlencode({
'key': key,
'xsrf_token': cgi.escape(
self.create_xsrf_token(HtmlHookRESTHandler.XSRF_ACTION))
}))
form_html = oeditor.ObjectEditor.get_html_for(
self, registry.get_json_schema(), registry.get_schema_dict(),
key, rest_url, exit_url,
delete_url=delete_url, delete_method='delete',
required_modules=HtmlHookRESTHandler.REQUIRED_MODULES)
template_values = {}
template_values['page_title'] = self.format_title('Edit Hook HTML')
template_values['page_description'] = (
messages.EDIT_HTML_HOOK_DESCRIPTION)
template_values['main_content'] = form_html
self.render_page(template_values)
def _create_hook_registry():
reg = schema_fields.FieldRegistry('Html Hook', description='Html Hook')
reg.add_property(schema_fields.SchemaField(
'hook_content', 'HTML Hook Content', 'html',
optional=True))
return reg
class HtmlHookRESTHandler(CourseYamlRESTHandler):
"""REST API for individual HTML hook entries in course.yaml."""
REGISTRY = _create_hook_registry()
REQUIRED_MODULES = [
'inputex-textarea', 'inputex-uneditable', 'gcb-rte', 'inputex-hidden']
URI = '/rest/course/html_hook'
XSRF_ACTION = 'html-hook-put'
def process_get(self):
course_dict = self.get_course_dict()
html_hook = self.request.get('key')
path = html_hook.split(':')
for element in path:
item = course_dict.get(element)
if type(item) == dict:
course_dict = item
return {'hook_content': item}
def process_put(self, request, payload):
request_data = {}
HtmlHookRESTHandler.REGISTRY.convert_json_to_entity(
payload, request_data)
if 'hook_content' not in request_data:
transforms.send_json_response(
self, 400, 'Payload missing "hook_content" parameter.')
return None
# Walk from bottom to top of hook element name building up
# dict-in-dict until we are at outermost level, which is
# the course_dict we will return.
course_dict = request_data['hook_content']
for element in reversed(request['key'].split(':')):
course_dict = {element: course_dict}
return course_dict
def is_deletion_allowed(self):
return True
def process_delete(self):
html_hook = self.request.get('key')
course_dict = self.get_course_dict()
pruned_dict = course_dict
for element in html_hook.split(':'):
if element in pruned_dict:
if type(pruned_dict[element]) == dict:
pruned_dict = pruned_dict[element]
else:
del pruned_dict[element]
return course_dict
| apache-2.0 | -4,669,929,664,366,945,000 | 36.122931 | 80 | 0.610329 | false |
rossburton/yocto-autobuilder | lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/manhole/telnet.py | 37 | 3494 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Telnet-based shell."""
# twisted imports
from twisted.protocols import telnet
from twisted.internet import protocol
from twisted.python import log, failure
# system imports
import string, copy, sys
from cStringIO import StringIO
class Shell(telnet.Telnet):
"""A Python command-line shell."""
def connectionMade(self):
telnet.Telnet.connectionMade(self)
self.lineBuffer = []
def loggedIn(self):
self.transport.write(">>> ")
def checkUserAndPass(self, username, password):
return ((self.factory.username == username) and (password == self.factory.password))
def write(self, data):
"""Write some data to the transport.
"""
self.transport.write(data)
def telnet_Command(self, cmd):
if self.lineBuffer:
if not cmd:
cmd = string.join(self.lineBuffer, '\n') + '\n\n\n'
self.doCommand(cmd)
self.lineBuffer = []
return "Command"
else:
self.lineBuffer.append(cmd)
self.transport.write("... ")
return "Command"
else:
self.doCommand(cmd)
return "Command"
def doCommand(self, cmd):
# TODO -- refactor this, Reality.author.Author, and the manhole shell
#to use common functionality (perhaps a twisted.python.code module?)
fn = '$telnet$'
result = None
try:
out = sys.stdout
sys.stdout = self
try:
code = compile(cmd,fn,'eval')
result = eval(code, self.factory.namespace)
except:
try:
code = compile(cmd, fn, 'exec')
exec code in self.factory.namespace
except SyntaxError, e:
if not self.lineBuffer and str(e)[:14] == "unexpected EOF":
self.lineBuffer.append(cmd)
self.transport.write("... ")
return
else:
failure.Failure().printTraceback(file=self)
log.deferr()
self.write('\r\n>>> ')
return
except:
io = StringIO()
failure.Failure().printTraceback(file=self)
log.deferr()
self.write('\r\n>>> ')
return
finally:
sys.stdout = out
self.factory.namespace['_'] = result
if result is not None:
self.transport.write(repr(result))
self.transport.write('\r\n')
self.transport.write(">>> ")
class ShellFactory(protocol.Factory):
username = "admin"
password = "admin"
protocol = Shell
service = None
def __init__(self):
self.namespace = {
'factory': self,
'service': None,
'_': None
}
def setService(self, service):
self.namespace['service'] = self.service = service
def __getstate__(self):
"""This returns the persistent state of this shell factory.
"""
dict = self.__dict__
ns = copy.copy(dict['namespace'])
dict['namespace'] = ns
if ns.has_key('__builtins__'):
del ns['__builtins__']
return dict
| gpl-2.0 | 6,508,369,808,368,335,000 | 28.863248 | 92 | 0.508872 | false |
TeamEOS/external_chromium_org | tools/cr/cr/commands/info.py | 44 | 1236 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the info implementation of Command."""
import cr
class InfoCommand(cr.Command):
"""The cr info command implementation."""
def __init__(self):
super(InfoCommand, self).__init__()
self.help = 'Print information about the cr environment'
def AddArguments(self, subparsers):
parser = super(InfoCommand, self).AddArguments(subparsers)
parser.add_argument(
'-s', '--short', dest='_short',
action='store_true', default=False,
help='Short form results, useful for scripting.'
)
self.ConsumeArgs(parser, 'the environment')
return parser
def EarlyArgProcessing(self):
if getattr(cr.context.args, '_short', False):
self.requires_build_dir = False
def Run(self):
if cr.context.remains:
for var in cr.context.remains:
if getattr(cr.context.args, '_short', False):
val = cr.context.Find(var)
if val is None:
val = ''
print val
else:
print var, '=', cr.context.Find(var)
else:
cr.base.client.PrintInfo()
| bsd-3-clause | -6,554,393,157,461,776,000 | 27.744186 | 72 | 0.63835 | false |
alexandregz/simian | src/simian/mac/admin/packages.py | 1 | 7391 | #!/usr/bin/env python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
"""Packages admin handler."""
import datetime
from simian.mac import admin
from simian.mac import common
from simian.mac import models
from simian.mac.common import auth
DEFAULT_PACKAGE_LOG_FETCH_LIMIT = 25
class Packages(admin.AdminHandler):
"""Handler for /admin/packages."""
XSRF_PROTECT = True
DATASTORE_MODEL = models.PackageInfo
LOGGING_MODEL = models.AdminPackageLog
TEMPLATE = 'packages.html'
REPORT_TYPE = 'packages'
LOG_REPORT_TYPE = 'package_logs'
def get(self, report=None):
"""GET handler."""
auth.DoUserAuth()
if report == 'logs':
self._DisplayLogs()
else:
historical = self.request.get('historical') == '1'
applesus = self.request.get('applesus') == '1'
if historical or applesus:
self._DisplayPackagesListFromCache(applesus=applesus)
else:
self._DisplayPackagesList()
def _GetPackageQuery(self):
"""Build query."""
all_packages = self.request.get('all_packages') == '1'
query = self.DATASTORE_MODEL.all()
if self.REPORT_TYPE == 'packages' and not all_packages:
query.filter('catalogs IN', common.TRACKS)
return query
def _DisplayPackagesList(self):
"""Displays list of all installs/removals/etc."""
installs, counts_mtime = models.ReportsCache.GetInstallCounts()
pending, pending_mtime = models.ReportsCache.GetPendingCounts()
packages = []
all_packages = self.request.get('all_packages') == '1'
query = self._GetPackageQuery()
for p in query:
if not p.plist:
self.error(403)
self.response.out.write('Package %s has a broken plist!' % p.filename)
return
pkg = {}
pkg['count'] = installs.get(p.munki_name, {}).get('install_count', 'N/A')
pkg['fail_count'] = installs.get(p.munki_name, {}).get(
'install_fail_count', 'N/A')
pkg['pending_count'] = pending.get(p.munki_name, 'N/A')
pkg['duration_seconds_avg'] = installs.get(p.munki_name, {}).get(
'duration_seconds_avg', None) or 'N/A'
pkg['unattended'] = p.plist.get('unattended_install', False)
force_install_after_date = p.plist.get('force_install_after_date', None)
if force_install_after_date:
pkg['force_install_after_date'] = force_install_after_date
pkg['catalogs'] = p.catalog_matrix
pkg['manifests'] = p.manifest_matrix
pkg['munki_name'] = p.munki_name or p.plist.GetMunkiName()
pkg['filename'] = p.filename
pkg['file_size'] = p.plist.get('installer_item_size', 0) * 1024
pkg['install_types'] = p.install_types
pkg['manifest_mod_access'] = p.manifest_mod_access
pkg['description'] = p.description
packages.append(pkg)
packages.sort(key=lambda pkg: pkg['munki_name'].lower())
self.Render(self.TEMPLATE,
{'packages': packages, 'counts_mtime': counts_mtime,
'pending_mtime': pending_mtime,
'report_type': self.REPORT_TYPE,
'active_pkg': self.request.GET.get('activepkg'),
'is_support_user': auth.IsSupportUser(),
'can_upload': auth.HasPermission(auth.UPLOAD),
'is_admin': auth.IsAdminUser(),
'all_packages': all_packages,})
def _DisplayPackagesListFromCache(self, applesus=False):
installs, counts_mtime = models.ReportsCache.GetInstallCounts()
pkgs = []
names = installs.keys()
names.sort()
for name in names:
install = installs[name]
if applesus and install.get('applesus', False):
d = {'name': name,
'count': install.get('install_count', 'N/A'),
'fail_count': install.get('install_fail_count', 'N/A'),
'duration_seconds_avg': install.get('duration_seconds_avg', 'N/A')
}
pkgs.append(d)
elif not applesus and not install['applesus']:
d = {'name': name,
'count': install.get('install_count', 'N/A'),
'fail_count': install.get('install_fail_count', 'N/A'),
'duration_seconds_avg': install.get('duration_seconds_avg', 'N/A')
}
pkgs.append(d)
if applesus:
report_type = 'apple_historical'
else:
report_type = 'packages_historical'
self.Render(self.TEMPLATE,
{'packages': pkgs, 'counts_mtime': counts_mtime,
'applesus': applesus, 'cached_pkgs_list': True,
'report_type': report_type})
def _DisplayLogs(self):
"""Displays all models.AdminPackageLog entities."""
key_id = self.request.get('plist')
if key_id:
try:
key_id = int(key_id)
except ValueError:
self.error(404)
return
log = self.LOGGING_MODEL.get_by_id(key_id)
if self.request.get('format') == 'xml':
self.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
self.response.out.write(log.plist)
else:
time = datetime.datetime.strftime(log.mtime, '%Y-%m-%d %H:%M:%S')
title = 'plist for Package Log <b>%s - %s</b>' % (log.filename, time)
raw_xml = '/admin/packages/logs?plist=%d&format=xml' % key_id
self.Render('plist.html',
{'plist_type': 'package_log',
'xml': admin.XmlToHtml(log.plist.GetXml()),
'title': title,
'raw_xml_link': raw_xml,
})
else:
filename = self.request.get('filename')
query = self.LOGGING_MODEL.all()
if filename:
query.filter('filename =', filename)
query.order('-mtime')
logs = self.Paginate(query, DEFAULT_PACKAGE_LOG_FETCH_LIMIT)
formatted_logs = []
for log in logs:
formatted_log = {}
formatted_log['data'] = log
if (hasattr(log, 'proposed_catalogs')
and hasattr(log, 'proposed_manifest')):
formatted_log['catalogs'] = common.util.MakeTrackMatrix(
log.catalogs, log.proposed_catalogs)
formatted_log['manifests'] = common.util.MakeTrackMatrix(
log.manifests, log.proposed_manifests)
else:
formatted_log['catalogs'] = common.util.MakeTrackMatrix(log.catalogs)
formatted_log['manifests'] = common.util.MakeTrackMatrix(
log.manifests)
formatted_logs.append(formatted_log)
self.Render(
'package_logs.html',
{'logs': formatted_logs,
'report_type': self.LOG_REPORT_TYPE,
'filename': filename})
class PackageProposals(Packages):
"""Handler for /admin/proposals."""
DATASTORE_MODEL = models.PackageInfoProposal
LOGGING_MODEL = models.AdminPackageProposalLog
TEMPLATE = 'packages.html'
LOG_REPORT_TYPE = 'proposal_logs'
REPORT_TYPE = 'proposals'
def _GetPackageQuery(self):
return self.DATASTORE_MODEL.all()
| apache-2.0 | 2,467,566,175,966,245,000 | 35.053659 | 79 | 0.621837 | false |
DwangoMediaVillage/pqkmeans | test/encoder/test_pq_encoder.py | 2 | 1590 | import unittest
import pqkmeans
import numpy
import pipe
class TestPQEncoder(unittest.TestCase):
def data_source(self, n: int):
for i in range(n):
for _ in range(3):
yield [i * 100] * 6
def setUp(self):
self.encoder = pqkmeans.encoder.PQEncoder(num_subdim=2)
def test_just_train_array(self):
input_array = numpy.random.random((300, 10))
self.encoder.fit(numpy.array(input_array))
encoded = list(self.encoder.transform(numpy.array(input_array)))
self.assertEqual(len(input_array), len(encoded))
def test_fit_and_transform_generator(self):
self.encoder.fit(numpy.array(list(self.data_source(300))))
# infinite list
encoded = self.encoder.transform_generator(self.data_source(100000000)) | pipe.take(60) | pipe.as_list
for i in range(0, len(encoded), 3):
numpy.testing.assert_array_almost_equal(encoded[i], encoded[i + 1])
numpy.testing.assert_array_almost_equal(encoded[i], encoded[i + 2])
def test_transform_and_inverse_transform(self):
input_array = numpy.random.random((300, 10))
self.encoder.fit(numpy.array(input_array))
encoded = self.encoder.transform(numpy.array(input_array))
decoded = self.encoder.inverse_transform(encoded)
N1, M = encoded.shape
N2, D = decoded.shape
self.assertEqual(N1, N2)
self.assertEqual(M, self.encoder.M)
self.assertEqual(D, self.encoder.Ds * self.encoder.M)
self.assertEqual(encoded.dtype, self.encoder.code_dtype)
| mit | -1,177,935,312,318,469,000 | 35.976744 | 110 | 0.646541 | false |
rs2/pandas | pandas/tests/indexes/test_base.py | 1 | 93051 | from collections import defaultdict
from datetime import datetime, timedelta
from io import StringIO
import math
import operator
import re
import numpy as np
import pytest
import pandas._config.config as cf
from pandas._libs.tslib import Timestamp
from pandas.compat.numpy import np_datetime64_compat
from pandas.util._test_decorators import async_mark
from pandas.core.dtypes.generic import ABCIndex
import pandas as pd
from pandas import (
CategoricalIndex,
DataFrame,
DatetimeIndex,
Float64Index,
Int64Index,
PeriodIndex,
RangeIndex,
Series,
TimedeltaIndex,
UInt64Index,
date_range,
isna,
period_range,
)
import pandas._testing as tm
from pandas.core.indexes.api import (
Index,
MultiIndex,
_get_combined_index,
ensure_index,
ensure_index_from_sequences,
)
from pandas.tests.indexes.common import Base
class TestIndex(Base):
_holder = Index
def create_index(self) -> Index:
return Index(list("abcde"))
def test_can_hold_identifiers(self):
index = self.create_index()
key = index[0]
assert index._can_hold_identifiers_and_holds_name(key) is True
@pytest.mark.parametrize("index", ["datetime"], indirect=True)
def test_new_axis(self, index):
with tm.assert_produces_warning(FutureWarning):
# GH#30588 multi-dimensional indexing deprecated
new_index = index[None, :]
assert new_index.ndim == 2
assert isinstance(new_index, np.ndarray)
def test_constructor_regular(self, index):
tm.assert_contains_all(index, index)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_constructor_casting(self, index):
# casting
arr = np.array(index)
new_index = Index(arr)
tm.assert_contains_all(arr, new_index)
tm.assert_index_equal(index, new_index)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_constructor_copy(self, index):
# copy
# index = self.create_index()
arr = np.array(index)
new_index = Index(arr, copy=True, name="name")
assert isinstance(new_index, Index)
assert new_index.name == "name"
tm.assert_numpy_array_equal(arr, new_index.values)
arr[0] = "SOMEBIGLONGSTRING"
assert new_index[0] != "SOMEBIGLONGSTRING"
# FIXME: dont leave commented-out
# what to do here?
# arr = np.array(5.)
# pytest.raises(Exception, arr.view, Index)
@pytest.mark.parametrize("cast_as_obj", [True, False])
@pytest.mark.parametrize(
"index",
[
pd.date_range(
"2015-01-01 10:00",
freq="D",
periods=3,
tz="US/Eastern",
name="Green Eggs & Ham",
), # DTI with tz
pd.date_range("2015-01-01 10:00", freq="D", periods=3), # DTI no tz
pd.timedelta_range("1 days", freq="D", periods=3), # td
pd.period_range("2015-01-01", freq="D", periods=3), # period
],
)
def test_constructor_from_index_dtlike(self, cast_as_obj, index):
if cast_as_obj:
result = pd.Index(index.astype(object))
else:
result = pd.Index(index)
tm.assert_index_equal(result, index)
if isinstance(index, pd.DatetimeIndex):
assert result.tz == index.tz
if cast_as_obj:
# GH#23524 check that Index(dti, dtype=object) does not
# incorrectly raise ValueError, and that nanoseconds are not
# dropped
index += pd.Timedelta(nanoseconds=50)
result = pd.Index(index, dtype=object)
assert result.dtype == np.object_
assert list(result) == list(index)
@pytest.mark.parametrize(
"index,has_tz",
[
(
pd.date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern"),
True,
), # datetimetz
(pd.timedelta_range("1 days", freq="D", periods=3), False), # td
(pd.period_range("2015-01-01", freq="D", periods=3), False), # period
],
)
def test_constructor_from_series_dtlike(self, index, has_tz):
result = pd.Index(pd.Series(index))
tm.assert_index_equal(result, index)
if has_tz:
assert result.tz == index.tz
def test_constructor_from_series_freq(self):
# GH 6273
# create from a series, passing a freq
dts = ["1-1-1990", "2-1-1990", "3-1-1990", "4-1-1990", "5-1-1990"]
expected = DatetimeIndex(dts, freq="MS")
s = Series(pd.to_datetime(dts))
result = DatetimeIndex(s, freq="MS")
tm.assert_index_equal(result, expected)
def test_constructor_from_frame_series_freq(self):
# GH 6273
# create from a series, passing a freq
dts = ["1-1-1990", "2-1-1990", "3-1-1990", "4-1-1990", "5-1-1990"]
expected = DatetimeIndex(dts, freq="MS")
df = pd.DataFrame(np.random.rand(5, 3))
df["date"] = dts
result = DatetimeIndex(df["date"], freq="MS")
assert df["date"].dtype == object
expected.name = "date"
tm.assert_index_equal(result, expected)
expected = pd.Series(dts, name="date")
tm.assert_series_equal(df["date"], expected)
# GH 6274
# infer freq of same
freq = pd.infer_freq(df["date"])
assert freq == "MS"
@pytest.mark.parametrize(
"array",
[
np.arange(5),
np.array(["a", "b", "c"]),
date_range("2000-01-01", periods=3).values,
],
)
def test_constructor_ndarray_like(self, array):
# GH 5460#issuecomment-44474502
# it should be possible to convert any object that satisfies the numpy
# ndarray interface directly into an Index
class ArrayLike:
def __init__(self, array):
self.array = array
def __array__(self, dtype=None) -> np.ndarray:
return self.array
expected = pd.Index(array)
result = pd.Index(ArrayLike(array))
tm.assert_index_equal(result, expected)
def test_constructor_int_dtype_nan(self):
# see gh-15187
data = [np.nan]
expected = Float64Index(data)
result = Index(data, dtype="float")
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("dtype", ["int64", "uint64"])
def test_constructor_int_dtype_nan_raises(self, dtype):
# see gh-15187
data = [np.nan]
msg = "cannot convert"
with pytest.raises(ValueError, match=msg):
Index(data, dtype=dtype)
def test_constructor_no_pandas_array(self):
ser = pd.Series([1, 2, 3])
result = pd.Index(ser.array)
expected = pd.Index([1, 2, 3])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"klass,dtype,na_val",
[
(pd.Float64Index, np.float64, np.nan),
(pd.DatetimeIndex, "datetime64[ns]", pd.NaT),
],
)
def test_index_ctor_infer_nan_nat(self, klass, dtype, na_val):
# GH 13467
na_list = [na_val, na_val]
expected = klass(na_list)
assert expected.dtype == dtype
result = Index(na_list)
tm.assert_index_equal(result, expected)
result = Index(np.array(na_list))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"vals,dtype",
[
([1, 2, 3, 4, 5], "int"),
([1.1, np.nan, 2.2, 3.0], "float"),
(["A", "B", "C", np.nan], "obj"),
],
)
def test_constructor_simple_new(self, vals, dtype):
index = Index(vals, name=dtype)
result = index._simple_new(index.values, dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize(
"vals",
[
[1, 2, 3],
np.array([1, 2, 3]),
np.array([1, 2, 3], dtype=int),
# below should coerce
[1.0, 2.0, 3.0],
np.array([1.0, 2.0, 3.0], dtype=float),
],
)
def test_constructor_dtypes_to_int64(self, vals):
index = Index(vals, dtype=int)
assert isinstance(index, Int64Index)
@pytest.mark.parametrize(
"vals",
[
[1, 2, 3],
[1.0, 2.0, 3.0],
np.array([1.0, 2.0, 3.0]),
np.array([1, 2, 3], dtype=int),
np.array([1.0, 2.0, 3.0], dtype=float),
],
)
def test_constructor_dtypes_to_float64(self, vals):
index = Index(vals, dtype=float)
assert isinstance(index, Float64Index)
@pytest.mark.parametrize(
"vals",
[
[1, 2, 3],
np.array([1, 2, 3], dtype=int),
np.array(
[np_datetime64_compat("2011-01-01"), np_datetime64_compat("2011-01-02")]
),
[datetime(2011, 1, 1), datetime(2011, 1, 2)],
],
)
def test_constructor_dtypes_to_categorical(self, vals):
index = Index(vals, dtype="category")
assert isinstance(index, CategoricalIndex)
@pytest.mark.parametrize("cast_index", [True, False])
@pytest.mark.parametrize(
"vals",
[
Index(
np.array(
[
np_datetime64_compat("2011-01-01"),
np_datetime64_compat("2011-01-02"),
]
)
),
Index([datetime(2011, 1, 1), datetime(2011, 1, 2)]),
],
)
def test_constructor_dtypes_to_datetime(self, cast_index, vals):
if cast_index:
index = Index(vals, dtype=object)
assert isinstance(index, Index)
assert index.dtype == object
else:
index = Index(vals)
assert isinstance(index, DatetimeIndex)
@pytest.mark.parametrize("cast_index", [True, False])
@pytest.mark.parametrize(
"vals",
[
np.array([np.timedelta64(1, "D"), np.timedelta64(1, "D")]),
[timedelta(1), timedelta(1)],
],
)
def test_constructor_dtypes_to_timedelta(self, cast_index, vals):
if cast_index:
index = Index(vals, dtype=object)
assert isinstance(index, Index)
assert index.dtype == object
else:
index = Index(vals)
assert isinstance(index, TimedeltaIndex)
@pytest.mark.parametrize("attr", ["values", "asi8"])
@pytest.mark.parametrize("klass", [pd.Index, pd.DatetimeIndex])
def test_constructor_dtypes_datetime(self, tz_naive_fixture, attr, klass):
# Test constructing with a datetimetz dtype
# .values produces numpy datetimes, so these are considered naive
# .asi8 produces integers, so these are considered epoch timestamps
# ^the above will be true in a later version. Right now we `.view`
# the i8 values as NS_DTYPE, effectively treating them as wall times.
index = pd.date_range("2011-01-01", periods=5)
arg = getattr(index, attr)
index = index.tz_localize(tz_naive_fixture)
dtype = index.dtype
if attr == "asi8":
result = pd.DatetimeIndex(arg).tz_localize(tz_naive_fixture)
else:
result = klass(arg, tz=tz_naive_fixture)
tm.assert_index_equal(result, index)
if attr == "asi8":
result = pd.DatetimeIndex(arg).astype(dtype)
else:
result = klass(arg, dtype=dtype)
tm.assert_index_equal(result, index)
if attr == "asi8":
result = pd.DatetimeIndex(list(arg)).tz_localize(tz_naive_fixture)
else:
result = klass(list(arg), tz=tz_naive_fixture)
tm.assert_index_equal(result, index)
if attr == "asi8":
result = pd.DatetimeIndex(list(arg)).astype(dtype)
else:
result = klass(list(arg), dtype=dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("attr", ["values", "asi8"])
@pytest.mark.parametrize("klass", [pd.Index, pd.TimedeltaIndex])
def test_constructor_dtypes_timedelta(self, attr, klass):
index = pd.timedelta_range("1 days", periods=5)
index = index._with_freq(None) # wont be preserved by constructors
dtype = index.dtype
values = getattr(index, attr)
result = klass(values, dtype=dtype)
tm.assert_index_equal(result, index)
result = klass(list(values), dtype=dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("value", [[], iter([]), (_ for _ in [])])
@pytest.mark.parametrize(
"klass",
[
Index,
Float64Index,
Int64Index,
UInt64Index,
CategoricalIndex,
DatetimeIndex,
TimedeltaIndex,
],
)
def test_constructor_empty(self, value, klass):
empty = klass(value)
assert isinstance(empty, klass)
assert not len(empty)
@pytest.mark.parametrize(
"empty,klass",
[
(PeriodIndex([], freq="B"), PeriodIndex),
(PeriodIndex(iter([]), freq="B"), PeriodIndex),
(PeriodIndex((_ for _ in []), freq="B"), PeriodIndex),
(RangeIndex(step=1), pd.RangeIndex),
(MultiIndex(levels=[[1, 2], ["blue", "red"]], codes=[[], []]), MultiIndex),
],
)
def test_constructor_empty_special(self, empty, klass):
assert isinstance(empty, klass)
assert not len(empty)
def test_constructor_overflow_int64(self):
# see gh-15832
msg = (
"The elements provided in the data cannot "
"all be casted to the dtype int64"
)
with pytest.raises(OverflowError, match=msg):
Index([np.iinfo(np.uint64).max - 1], dtype="int64")
@pytest.mark.parametrize(
"index",
[
"datetime",
"float",
"int",
"period",
"range",
"repeats",
"timedelta",
"tuples",
"uint",
],
indirect=True,
)
def test_view_with_args(self, index):
index.view("i8")
@pytest.mark.parametrize(
"index",
[
"unicode",
"string",
pytest.param("categorical", marks=pytest.mark.xfail(reason="gh-25464")),
"bool",
"empty",
],
indirect=True,
)
def test_view_with_args_object_array_raises(self, index):
msg = "Cannot change data-type for object array"
with pytest.raises(TypeError, match=msg):
index.view("i8")
@pytest.mark.parametrize("index", ["int", "range"], indirect=True)
def test_astype(self, index):
casted = index.astype("i8")
# it works!
casted.get_loc(5)
# pass on name
index.name = "foobar"
casted = index.astype("i8")
assert casted.name == "foobar"
def test_equals_object(self):
# same
assert Index(["a", "b", "c"]).equals(Index(["a", "b", "c"]))
@pytest.mark.parametrize(
"comp", [Index(["a", "b"]), Index(["a", "b", "d"]), ["a", "b", "c"]]
)
def test_not_equals_object(self, comp):
assert not Index(["a", "b", "c"]).equals(comp)
def test_insert_missing(self, nulls_fixture):
# GH 22295
# test there is no mangling of NA values
expected = Index(["a", nulls_fixture, "b", "c"])
result = Index(list("abc")).insert(1, nulls_fixture)
tm.assert_index_equal(result, expected)
def test_delete_raises(self):
index = Index(["a", "b", "c", "d"], name="index")
msg = "index 5 is out of bounds for axis 0 with size 4"
with pytest.raises(IndexError, match=msg):
index.delete(5)
def test_identical(self):
# index
i1 = Index(["a", "b", "c"])
i2 = Index(["a", "b", "c"])
assert i1.identical(i2)
i1 = i1.rename("foo")
assert i1.equals(i2)
assert not i1.identical(i2)
i2 = i2.rename("foo")
assert i1.identical(i2)
i3 = Index([("a", "a"), ("a", "b"), ("b", "a")])
i4 = Index([("a", "a"), ("a", "b"), ("b", "a")], tupleize_cols=False)
assert not i3.identical(i4)
def test_is_(self):
ind = Index(range(10))
assert ind.is_(ind)
assert ind.is_(ind.view().view().view().view())
assert not ind.is_(Index(range(10)))
assert not ind.is_(ind.copy())
assert not ind.is_(ind.copy(deep=False))
assert not ind.is_(ind[:])
assert not ind.is_(np.array(range(10)))
# quasi-implementation dependent
assert ind.is_(ind.view())
ind2 = ind.view()
ind2.name = "bob"
assert ind.is_(ind2)
assert ind2.is_(ind)
# doesn't matter if Indices are *actually* views of underlying data,
assert not ind.is_(Index(ind.values))
arr = np.array(range(1, 11))
ind1 = Index(arr, copy=False)
ind2 = Index(arr, copy=False)
assert not ind1.is_(ind2)
@pytest.mark.parametrize("index", ["datetime"], indirect=True)
def test_asof(self, index):
d = index[0]
assert index.asof(d) == d
assert isna(index.asof(d - timedelta(1)))
d = index[-1]
assert index.asof(d + timedelta(1)) == d
d = index[0].to_pydatetime()
assert isinstance(index.asof(d), Timestamp)
def test_asof_datetime_partial(self):
index = pd.date_range("2010-01-01", periods=2, freq="m")
expected = Timestamp("2010-02-28")
result = index.asof("2010-02")
assert result == expected
assert not isinstance(result, Index)
def test_nanosecond_index_access(self):
s = Series([Timestamp("20130101")]).values.view("i8")[0]
r = DatetimeIndex([s + 50 + i for i in range(100)])
x = Series(np.random.randn(100), index=r)
first_value = x.asof(x.index[0])
# this does not yet work, as parsing strings is done via dateutil
# assert first_value == x['2013-01-01 00:00:00.000000050+0000']
expected_ts = np_datetime64_compat("2013-01-01 00:00:00.000000050+0000", "ns")
assert first_value == x[Timestamp(expected_ts)]
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_booleanindex(self, index):
bool_index = np.ones(len(index), dtype=bool)
bool_index[5:30:2] = False
sub_index = index[bool_index]
for i, val in enumerate(sub_index):
assert sub_index.get_loc(val) == i
sub_index = index[list(bool_index)]
for i, val in enumerate(sub_index):
assert sub_index.get_loc(val) == i
def test_fancy(self):
index = self.create_index()
sl = index[[1, 2, 3]]
for i in sl:
assert i == sl[sl.get_loc(i)]
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
@pytest.mark.parametrize("dtype", [np.int_, np.bool_])
def test_empty_fancy(self, index, dtype):
empty_arr = np.array([], dtype=dtype)
empty_index = type(index)([])
assert index[[]].identical(empty_index)
assert index[empty_arr].identical(empty_index)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
def test_empty_fancy_raises(self, index):
# pd.DatetimeIndex is excluded, because it overrides getitem and should
# be tested separately.
empty_farr = np.array([], dtype=np.float_)
empty_index = type(index)([])
assert index[[]].identical(empty_index)
# np.ndarray only accepts ndarray of int & bool dtypes, so should Index
msg = r"arrays used as indices must be of integer \(or boolean\) type"
with pytest.raises(IndexError, match=msg):
index[empty_farr]
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_intersection(self, index, sort):
first = index[:20]
second = index[:10]
intersect = first.intersection(second, sort=sort)
if sort is None:
tm.assert_index_equal(intersect, second.sort_values())
assert tm.equalContents(intersect, second)
# Corner cases
inter = first.intersection(first, sort=sort)
assert inter is first
@pytest.mark.parametrize(
"index2,keeps_name",
[
(Index([3, 4, 5, 6, 7], name="index"), True), # preserve same name
(Index([3, 4, 5, 6, 7], name="other"), False), # drop diff names
(Index([3, 4, 5, 6, 7]), False),
],
)
def test_intersection_name_preservation(self, index2, keeps_name, sort):
index1 = Index([1, 2, 3, 4, 5], name="index")
expected = Index([3, 4, 5])
result = index1.intersection(index2, sort)
if keeps_name:
expected.name = "index"
assert result.name == expected.name
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("index", ["string"], indirect=True)
@pytest.mark.parametrize(
"first_name,second_name,expected_name",
[("A", "A", "A"), ("A", "B", None), (None, "B", None)],
)
def test_intersection_name_preservation2(
self, index, first_name, second_name, expected_name, sort
):
first = index[5:20]
second = index[:10]
first.name = first_name
second.name = second_name
intersect = first.intersection(second, sort=sort)
assert intersect.name == expected_name
@pytest.mark.parametrize(
"index2,keeps_name",
[
(Index([4, 7, 6, 5, 3], name="index"), True),
(Index([4, 7, 6, 5, 3], name="other"), False),
],
)
def test_intersection_monotonic(self, index2, keeps_name, sort):
index1 = Index([5, 3, 2, 4, 1], name="index")
expected = Index([5, 3, 4])
if keeps_name:
expected.name = "index"
result = index1.intersection(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"index2,expected_arr",
[(Index(["B", "D"]), ["B"]), (Index(["B", "D", "A"]), ["A", "B", "A"])],
)
def test_intersection_non_monotonic_non_unique(self, index2, expected_arr, sort):
# non-monotonic non-unique
index1 = Index(["A", "B", "A", "C"])
expected = Index(expected_arr, dtype="object")
result = index1.intersection(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
def test_intersect_str_dates(self, sort):
dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)]
i1 = Index(dt_dates, dtype=object)
i2 = Index(["aa"], dtype=object)
result = i2.intersection(i1, sort=sort)
assert len(result) == 0
@pytest.mark.xfail(reason="Not implemented")
def test_intersection_equal_sort_true(self):
# TODO decide on True behaviour
idx = pd.Index(["c", "a", "b"])
sorted_ = pd.Index(["a", "b", "c"])
tm.assert_index_equal(idx.intersection(idx, sort=True), sorted_)
def test_chained_union(self, sort):
# Chained unions handles names correctly
i1 = Index([1, 2], name="i1")
i2 = Index([5, 6], name="i2")
i3 = Index([3, 4], name="i3")
union = i1.union(i2.union(i3, sort=sort), sort=sort)
expected = i1.union(i2, sort=sort).union(i3, sort=sort)
tm.assert_index_equal(union, expected)
j1 = Index([1, 2], name="j1")
j2 = Index([], name="j2")
j3 = Index([], name="j3")
union = j1.union(j2.union(j3, sort=sort), sort=sort)
expected = j1.union(j2, sort=sort).union(j3, sort=sort)
tm.assert_index_equal(union, expected)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_union(self, index, sort):
first = index[5:20]
second = index[:10]
everything = index[:20]
union = first.union(second, sort=sort)
if sort is None:
tm.assert_index_equal(union, everything.sort_values())
assert tm.equalContents(union, everything)
@pytest.mark.parametrize("slice_", [slice(None), slice(0)])
def test_union_sort_other_special(self, slice_):
# https://github.com/pandas-dev/pandas/issues/24959
idx = pd.Index([1, 0, 2])
# default, sort=None
other = idx[slice_]
tm.assert_index_equal(idx.union(other), idx)
tm.assert_index_equal(other.union(idx), idx)
# sort=False
tm.assert_index_equal(idx.union(other, sort=False), idx)
@pytest.mark.xfail(reason="Not implemented")
@pytest.mark.parametrize("slice_", [slice(None), slice(0)])
def test_union_sort_special_true(self, slice_):
# TODO decide on True behaviour
# sort=True
idx = pd.Index([1, 0, 2])
# default, sort=None
other = idx[slice_]
result = idx.union(other, sort=True)
expected = pd.Index([0, 1, 2])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("klass", [np.array, Series, list])
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_union_from_iterables(self, index, klass, sort):
# GH 10149
first = index[5:20]
second = index[:10]
everything = index[:20]
case = klass(second.values)
result = first.union(case, sort=sort)
if sort is None:
tm.assert_index_equal(result, everything.sort_values())
assert tm.equalContents(result, everything)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_union_identity(self, index, sort):
first = index[5:20]
union = first.union(first, sort=sort)
# i.e. identity is not preserved when sort is True
assert (union is first) is (not sort)
# This should no longer be the same object, since [] is not consistent,
# both objects will be recast to dtype('O')
union = first.union([], sort=sort)
assert (union is first) is (not sort)
union = Index([]).union(first, sort=sort)
assert (union is first) is (not sort)
@pytest.mark.parametrize("first_list", [list("ba"), list()])
@pytest.mark.parametrize("second_list", [list("ab"), list()])
@pytest.mark.parametrize(
"first_name, second_name, expected_name",
[("A", "B", None), (None, "B", None), ("A", None, None)],
)
def test_union_name_preservation(
self, first_list, second_list, first_name, second_name, expected_name, sort
):
first = Index(first_list, name=first_name)
second = Index(second_list, name=second_name)
union = first.union(second, sort=sort)
vals = set(first_list).union(second_list)
if sort is None and len(first_list) > 0 and len(second_list) > 0:
expected = Index(sorted(vals), name=expected_name)
tm.assert_index_equal(union, expected)
else:
expected = Index(vals, name=expected_name)
assert tm.equalContents(union, expected)
def test_union_dt_as_obj(self, sort):
# TODO: Replace with fixturesult
index = self.create_index()
date_index = pd.date_range("2019-01-01", periods=10)
first_cat = index.union(date_index)
second_cat = index.union(index)
if date_index.dtype == np.object_:
appended = np.append(index, date_index)
else:
appended = np.append(index, date_index.astype("O"))
assert tm.equalContents(first_cat, appended)
assert tm.equalContents(second_cat, index)
tm.assert_contains_all(index, first_cat)
tm.assert_contains_all(index, second_cat)
tm.assert_contains_all(date_index, first_cat)
def test_map_identity_mapping(self, index):
# GH 12766
tm.assert_index_equal(index, index.map(lambda x: x))
def test_map_with_tuples(self):
# GH 12766
# Test that returning a single tuple from an Index
# returns an Index.
index = tm.makeIntIndex(3)
result = tm.makeIntIndex(3).map(lambda x: (x,))
expected = Index([(i,) for i in index])
tm.assert_index_equal(result, expected)
# Test that returning a tuple from a map of a single index
# returns a MultiIndex object.
result = index.map(lambda x: (x, x == 1))
expected = MultiIndex.from_tuples([(i, i == 1) for i in index])
tm.assert_index_equal(result, expected)
def test_map_with_tuples_mi(self):
# Test that returning a single object from a MultiIndex
# returns an Index.
first_level = ["foo", "bar", "baz"]
multi_index = MultiIndex.from_tuples(zip(first_level, [1, 2, 3]))
reduced_index = multi_index.map(lambda x: x[0])
tm.assert_index_equal(reduced_index, Index(first_level))
@pytest.mark.parametrize(
"attr", ["makeDateIndex", "makePeriodIndex", "makeTimedeltaIndex"]
)
def test_map_tseries_indices_return_index(self, attr):
index = getattr(tm, attr)(10)
expected = Index([1] * 10)
result = index.map(lambda x: 1)
tm.assert_index_equal(expected, result)
def test_map_tseries_indices_accsr_return_index(self):
date_index = tm.makeDateIndex(24, freq="h", name="hourly")
expected = Index(range(24), name="hourly")
tm.assert_index_equal(expected, date_index.map(lambda x: x.hour))
@pytest.mark.parametrize(
"mapper",
[
lambda values, index: {i: e for e, i in zip(values, index)},
lambda values, index: pd.Series(values, index),
],
)
def test_map_dictlike_simple(self, mapper):
# GH 12756
expected = Index(["foo", "bar", "baz"])
index = tm.makeIntIndex(3)
result = index.map(mapper(expected.values, index))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"mapper",
[
lambda values, index: {i: e for e, i in zip(values, index)},
lambda values, index: pd.Series(values, index),
],
)
def test_map_dictlike(self, index, mapper):
# GH 12756
if isinstance(index, CategoricalIndex):
# Tested in test_categorical
return
elif not index.is_unique:
# Cannot map duplicated index
return
if index.empty:
# to match proper result coercion for uints
expected = Index([])
else:
expected = Index(np.arange(len(index), 0, -1))
result = index.map(mapper(expected, index))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"mapper",
[Series(["foo", 2.0, "baz"], index=[0, 2, -1]), {0: "foo", 2: 2.0, -1: "baz"}],
)
def test_map_with_non_function_missing_values(self, mapper):
# GH 12756
expected = Index([2.0, np.nan, "foo"])
result = Index([2, 1, 0]).map(mapper)
tm.assert_index_equal(expected, result)
def test_map_na_exclusion(self):
index = Index([1.5, np.nan, 3, np.nan, 5])
result = index.map(lambda x: x * 2, na_action="ignore")
expected = index * 2
tm.assert_index_equal(result, expected)
def test_map_defaultdict(self):
index = Index([1, 2, 3])
default_dict = defaultdict(lambda: "blank")
default_dict[1] = "stuff"
result = index.map(default_dict)
expected = Index(["stuff", "blank", "blank"])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("name,expected", [("foo", "foo"), ("bar", None)])
def test_append_empty_preserve_name(self, name, expected):
left = Index([], name="foo")
right = Index([1, 2, 3], name=name)
result = left.append(right)
assert result.name == expected
@pytest.mark.parametrize("index", ["string"], indirect=True)
@pytest.mark.parametrize("second_name,expected", [(None, None), ("name", "name")])
def test_difference_name_preservation(self, index, second_name, expected, sort):
first = index[5:20]
second = index[:10]
answer = index[10:20]
first.name = "name"
second.name = second_name
result = first.difference(second, sort=sort)
assert tm.equalContents(result, answer)
if expected is None:
assert result.name is None
else:
assert result.name == expected
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_difference_empty_arg(self, index, sort):
first = index[5:20]
first.name = "name"
result = first.difference([], sort)
assert tm.equalContents(result, first)
assert result.name == first.name
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_difference_identity(self, index, sort):
first = index[5:20]
first.name = "name"
result = first.difference(first, sort)
assert len(result) == 0
assert result.name == first.name
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_difference_sort(self, index, sort):
first = index[5:20]
second = index[:10]
result = first.difference(second, sort)
expected = index[10:20]
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
def test_symmetric_difference(self, sort):
# smoke
index1 = Index([5, 2, 3, 4], name="index1")
index2 = Index([2, 3, 4, 1])
result = index1.symmetric_difference(index2, sort=sort)
expected = Index([5, 1])
assert tm.equalContents(result, expected)
assert result.name is None
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
# __xor__ syntax
expected = index1 ^ index2
assert tm.equalContents(result, expected)
assert result.name is None
@pytest.mark.parametrize("opname", ["difference", "symmetric_difference"])
def test_difference_incomparable(self, opname):
a = pd.Index([3, pd.Timestamp("2000"), 1])
b = pd.Index([2, pd.Timestamp("1999"), 1])
op = operator.methodcaller(opname, b)
# sort=None, the default
result = op(a)
expected = pd.Index([3, pd.Timestamp("2000"), 2, pd.Timestamp("1999")])
if opname == "difference":
expected = expected[:2]
tm.assert_index_equal(result, expected)
# sort=False
op = operator.methodcaller(opname, b, sort=False)
result = op(a)
tm.assert_index_equal(result, expected)
@pytest.mark.xfail(reason="Not implemented")
@pytest.mark.parametrize("opname", ["difference", "symmetric_difference"])
def test_difference_incomparable_true(self, opname):
# TODO decide on True behaviour
# # sort=True, raises
a = pd.Index([3, pd.Timestamp("2000"), 1])
b = pd.Index([2, pd.Timestamp("1999"), 1])
op = operator.methodcaller(opname, b, sort=True)
with pytest.raises(TypeError, match="Cannot compare"):
op(a)
def test_symmetric_difference_mi(self, sort):
index1 = MultiIndex.from_tuples(zip(["foo", "bar", "baz"], [1, 2, 3]))
index2 = MultiIndex.from_tuples([("foo", 1), ("bar", 3)])
result = index1.symmetric_difference(index2, sort=sort)
expected = MultiIndex.from_tuples([("bar", 2), ("baz", 3), ("bar", 3)])
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
assert tm.equalContents(result, expected)
@pytest.mark.parametrize(
"index2,expected",
[
(Index([0, 1, np.nan]), Index([2.0, 3.0, 0.0])),
(Index([0, 1]), Index([np.nan, 2.0, 3.0, 0.0])),
],
)
def test_symmetric_difference_missing(self, index2, expected, sort):
# GH 13514 change: {nan} - {nan} == {}
# (GH 6444, sorting of nans, is no longer an issue)
index1 = Index([1, np.nan, 2, 3])
result = index1.symmetric_difference(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
def test_symmetric_difference_non_index(self, sort):
index1 = Index([1, 2, 3, 4], name="index1")
index2 = np.array([2, 3, 4, 5])
expected = Index([1, 5])
result = index1.symmetric_difference(index2, sort=sort)
assert tm.equalContents(result, expected)
assert result.name == "index1"
result = index1.symmetric_difference(index2, result_name="new_name", sort=sort)
assert tm.equalContents(result, expected)
assert result.name == "new_name"
def test_difference_type(self, index, sort):
# GH 20040
# If taking difference of a set and itself, it
# needs to preserve the type of the index
if not index.is_unique:
return
result = index.difference(index, sort=sort)
expected = index.drop(index)
tm.assert_index_equal(result, expected)
def test_intersection_difference(self, index, sort):
# GH 20040
# Test that the intersection of an index with an
# empty index produces the same index as the difference
# of an index with itself. Test for all types
if not index.is_unique:
return
inter = index.intersection(index.drop(index))
diff = index.difference(index, sort=sort)
tm.assert_index_equal(inter, diff)
def test_is_mixed_deprecated(self):
# GH#32922
index = self.create_index()
with tm.assert_produces_warning(FutureWarning):
index.is_mixed()
@pytest.mark.parametrize(
"index, expected",
[
("string", False),
("bool", False),
("categorical", False),
("int", True),
("datetime", False),
("float", True),
],
indirect=["index"],
)
def test_is_numeric(self, index, expected):
assert index.is_numeric() is expected
@pytest.mark.parametrize(
"index, expected",
[
("string", True),
("bool", True),
("categorical", False),
("int", False),
("datetime", False),
("float", False),
],
indirect=["index"],
)
def test_is_object(self, index, expected):
assert index.is_object() is expected
@pytest.mark.parametrize(
"index, expected",
[
("string", False),
("bool", False),
("categorical", False),
("int", False),
("datetime", True),
("float", False),
],
indirect=["index"],
)
def test_is_all_dates(self, index, expected):
assert index.is_all_dates is expected
def test_summary(self, index):
self._check_method_works(Index._summary, index)
def test_summary_bug(self):
# GH3869`
ind = Index(["{other}%s", "~:{range}:0"], name="A")
result = ind._summary()
# shouldn't be formatted accidentally.
assert "~:{range}:0" in result
assert "{other}%s" in result
def test_format_different_scalar_lengths(self):
# GH35439
idx = Index(["aaaaaaaaa", "b"])
expected = ["aaaaaaaaa", "b"]
assert idx.format() == expected
def test_format_bug(self):
# GH 14626
# windows has different precision on datetime.datetime.now (it doesn't
# include us since the default for Timestamp shows these but Index
# formatting does not we are skipping)
now = datetime.now()
if not str(now).endswith("000"):
index = Index([now])
formatted = index.format()
expected = [str(index[0])]
assert formatted == expected
Index([]).format()
@pytest.mark.parametrize("vals", [[1, 2.0 + 3.0j, 4.0], ["a", "b", "c"]])
def test_format_missing(self, vals, nulls_fixture):
# 2845
vals = list(vals) # Copy for each iteration
vals.append(nulls_fixture)
index = Index(vals)
formatted = index.format()
expected = [str(index[0]), str(index[1]), str(index[2]), "NaN"]
assert formatted == expected
assert index[3] is nulls_fixture
def test_format_with_name_time_info(self):
# bug I fixed 12/20/2011
dates = date_range("2011-01-01 04:00:00", periods=10, name="something")
formatted = dates.format(name=True)
assert formatted[0] == "something"
def test_format_datetime_with_time(self):
t = Index([datetime(2012, 2, 7), datetime(2012, 2, 7, 23)])
result = t.format()
expected = ["2012-02-07 00:00:00", "2012-02-07 23:00:00"]
assert len(result) == 2
assert result == expected
@pytest.mark.parametrize("op", ["any", "all"])
def test_logical_compat(self, op):
index = self.create_index()
assert getattr(index, op)() == getattr(index.values, op)()
def _check_method_works(self, method, index):
method(index)
def test_get_indexer(self):
index1 = Index([1, 2, 3, 4, 5])
index2 = Index([2, 4, 6])
r1 = index1.get_indexer(index2)
e1 = np.array([1, 3, -1], dtype=np.intp)
tm.assert_almost_equal(r1, e1)
@pytest.mark.parametrize("reverse", [True, False])
@pytest.mark.parametrize(
"expected,method",
[
(np.array([-1, 0, 0, 1, 1], dtype=np.intp), "pad"),
(np.array([-1, 0, 0, 1, 1], dtype=np.intp), "ffill"),
(np.array([0, 0, 1, 1, 2], dtype=np.intp), "backfill"),
(np.array([0, 0, 1, 1, 2], dtype=np.intp), "bfill"),
],
)
def test_get_indexer_methods(self, reverse, expected, method):
index1 = Index([1, 2, 3, 4, 5])
index2 = Index([2, 4, 6])
if reverse:
index1 = index1[::-1]
expected = expected[::-1]
result = index2.get_indexer(index1, method=method)
tm.assert_almost_equal(result, expected)
def test_get_indexer_invalid(self):
# GH10411
index = Index(np.arange(10))
with pytest.raises(ValueError, match="tolerance argument"):
index.get_indexer([1, 0], tolerance=1)
with pytest.raises(ValueError, match="limit argument"):
index.get_indexer([1, 0], limit=1)
@pytest.mark.parametrize(
"method, tolerance, indexer, expected",
[
("pad", None, [0, 5, 9], [0, 5, 9]),
("backfill", None, [0, 5, 9], [0, 5, 9]),
("nearest", None, [0, 5, 9], [0, 5, 9]),
("pad", 0, [0, 5, 9], [0, 5, 9]),
("backfill", 0, [0, 5, 9], [0, 5, 9]),
("nearest", 0, [0, 5, 9], [0, 5, 9]),
("pad", None, [0.2, 1.8, 8.5], [0, 1, 8]),
("backfill", None, [0.2, 1.8, 8.5], [1, 2, 9]),
("nearest", None, [0.2, 1.8, 8.5], [0, 2, 9]),
("pad", 1, [0.2, 1.8, 8.5], [0, 1, 8]),
("backfill", 1, [0.2, 1.8, 8.5], [1, 2, 9]),
("nearest", 1, [0.2, 1.8, 8.5], [0, 2, 9]),
("pad", 0.2, [0.2, 1.8, 8.5], [0, -1, -1]),
("backfill", 0.2, [0.2, 1.8, 8.5], [-1, 2, -1]),
("nearest", 0.2, [0.2, 1.8, 8.5], [0, 2, -1]),
],
)
def test_get_indexer_nearest(self, method, tolerance, indexer, expected):
index = Index(np.arange(10))
actual = index.get_indexer(indexer, method=method, tolerance=tolerance)
tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp))
@pytest.mark.parametrize("listtype", [list, tuple, Series, np.array])
@pytest.mark.parametrize(
"tolerance, expected",
list(
zip(
[[0.3, 0.3, 0.1], [0.2, 0.1, 0.1], [0.1, 0.5, 0.5]],
[[0, 2, -1], [0, -1, -1], [-1, 2, 9]],
)
),
)
def test_get_indexer_nearest_listlike_tolerance(
self, tolerance, expected, listtype
):
index = Index(np.arange(10))
actual = index.get_indexer(
[0.2, 1.8, 8.5], method="nearest", tolerance=listtype(tolerance)
)
tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp))
def test_get_indexer_nearest_error(self):
index = Index(np.arange(10))
with pytest.raises(ValueError, match="limit argument"):
index.get_indexer([1, 0], method="nearest", limit=1)
with pytest.raises(ValueError, match="tolerance size must match"):
index.get_indexer([1, 0], method="nearest", tolerance=[1, 2, 3])
@pytest.mark.parametrize(
"method,expected",
[("pad", [8, 7, 0]), ("backfill", [9, 8, 1]), ("nearest", [9, 7, 0])],
)
def test_get_indexer_nearest_decreasing(self, method, expected):
index = Index(np.arange(10))[::-1]
actual = index.get_indexer([0, 5, 9], method=method)
tm.assert_numpy_array_equal(actual, np.array([9, 4, 0], dtype=np.intp))
actual = index.get_indexer([0.2, 1.8, 8.5], method=method)
tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp))
@pytest.mark.parametrize(
"method,expected",
[
("pad", np.array([-1, 0, 1, 1], dtype=np.intp)),
("backfill", np.array([0, 0, 1, -1], dtype=np.intp)),
],
)
def test_get_indexer_strings(self, method, expected):
index = pd.Index(["b", "c"])
actual = index.get_indexer(["a", "b", "c", "d"], method=method)
tm.assert_numpy_array_equal(actual, expected)
def test_get_indexer_strings_raises(self):
index = pd.Index(["b", "c"])
msg = r"unsupported operand type\(s\) for -: 'str' and 'str'"
with pytest.raises(TypeError, match=msg):
index.get_indexer(["a", "b", "c", "d"], method="nearest")
with pytest.raises(TypeError, match=msg):
index.get_indexer(["a", "b", "c", "d"], method="pad", tolerance=2)
with pytest.raises(TypeError, match=msg):
index.get_indexer(
["a", "b", "c", "d"], method="pad", tolerance=[2, 2, 2, 2]
)
@pytest.mark.parametrize("idx_class", [Int64Index, RangeIndex, Float64Index])
def test_get_indexer_numeric_index_boolean_target(self, idx_class):
# GH 16877
numeric_index = idx_class(RangeIndex(4))
result = numeric_index.get_indexer([True, False, True])
expected = np.array([-1, -1, -1], dtype=np.intp)
tm.assert_numpy_array_equal(result, expected)
def test_get_indexer_with_NA_values(
self, unique_nulls_fixture, unique_nulls_fixture2
):
# GH 22332
# check pairwise, that no pair of na values
# is mangled
if unique_nulls_fixture is unique_nulls_fixture2:
return # skip it, values are not unique
arr = np.array([unique_nulls_fixture, unique_nulls_fixture2], dtype=object)
index = pd.Index(arr, dtype=object)
result = index.get_indexer(
[unique_nulls_fixture, unique_nulls_fixture2, "Unknown"]
)
expected = np.array([0, 1, -1], dtype=np.intp)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("method", [None, "pad", "backfill", "nearest"])
def test_get_loc(self, method):
index = pd.Index([0, 1, 2])
assert index.get_loc(1, method=method) == 1
if method:
assert index.get_loc(1, method=method, tolerance=0) == 1
@pytest.mark.parametrize("method", [None, "pad", "backfill", "nearest"])
def test_get_loc_raises_bad_label(self, method):
index = pd.Index([0, 1, 2])
if method:
msg = "not supported between"
else:
msg = "invalid key"
with pytest.raises(TypeError, match=msg):
index.get_loc([1, 2], method=method)
@pytest.mark.parametrize(
"method,loc", [("pad", 1), ("backfill", 2), ("nearest", 1)]
)
def test_get_loc_tolerance(self, method, loc):
index = pd.Index([0, 1, 2])
assert index.get_loc(1.1, method) == loc
assert index.get_loc(1.1, method, tolerance=1) == loc
@pytest.mark.parametrize("method", ["pad", "backfill", "nearest"])
def test_get_loc_outside_tolerance_raises(self, method):
index = pd.Index([0, 1, 2])
with pytest.raises(KeyError, match="1.1"):
index.get_loc(1.1, method, tolerance=0.05)
def test_get_loc_bad_tolerance_raises(self):
index = pd.Index([0, 1, 2])
with pytest.raises(ValueError, match="must be numeric"):
index.get_loc(1.1, "nearest", tolerance="invalid")
def test_get_loc_tolerance_no_method_raises(self):
index = pd.Index([0, 1, 2])
with pytest.raises(ValueError, match="tolerance .* valid if"):
index.get_loc(1.1, tolerance=1)
def test_get_loc_raises_missized_tolerance(self):
index = pd.Index([0, 1, 2])
with pytest.raises(ValueError, match="tolerance size must match"):
index.get_loc(1.1, "nearest", tolerance=[1, 1])
def test_get_loc_raises_object_nearest(self):
index = pd.Index(["a", "c"])
with pytest.raises(TypeError, match="unsupported operand type"):
index.get_loc("a", method="nearest")
def test_get_loc_raises_object_tolerance(self):
index = pd.Index(["a", "c"])
with pytest.raises(TypeError, match="unsupported operand type"):
index.get_loc("a", method="pad", tolerance="invalid")
@pytest.mark.parametrize("dtype", [int, float])
def test_slice_locs(self, dtype):
index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype))
n = len(index)
assert index.slice_locs(start=2) == (2, n)
assert index.slice_locs(start=3) == (3, n)
assert index.slice_locs(3, 8) == (3, 6)
assert index.slice_locs(5, 10) == (3, n)
assert index.slice_locs(end=8) == (0, 6)
assert index.slice_locs(end=9) == (0, 7)
# reversed
index2 = index[::-1]
assert index2.slice_locs(8, 2) == (2, 6)
assert index2.slice_locs(7, 3) == (2, 5)
@pytest.mark.parametrize("dtype", [int, float])
def test_slice_float_locs(self, dtype):
index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype))
n = len(index)
assert index.slice_locs(5.0, 10.0) == (3, n)
assert index.slice_locs(4.5, 10.5) == (3, 8)
index2 = index[::-1]
assert index2.slice_locs(8.5, 1.5) == (2, 6)
assert index2.slice_locs(10.5, -1) == (0, n)
def test_slice_locs_dup(self):
index = Index(["a", "a", "b", "c", "d", "d"])
assert index.slice_locs("a", "d") == (0, 6)
assert index.slice_locs(end="d") == (0, 6)
assert index.slice_locs("a", "c") == (0, 4)
assert index.slice_locs("b", "d") == (2, 6)
index2 = index[::-1]
assert index2.slice_locs("d", "a") == (0, 6)
assert index2.slice_locs(end="a") == (0, 6)
assert index2.slice_locs("d", "b") == (0, 4)
assert index2.slice_locs("c", "a") == (2, 6)
@pytest.mark.parametrize("dtype", [int, float])
def test_slice_locs_dup_numeric(self, dtype):
index = Index(np.array([10, 12, 12, 14], dtype=dtype))
assert index.slice_locs(12, 12) == (1, 3)
assert index.slice_locs(11, 13) == (1, 3)
index2 = index[::-1]
assert index2.slice_locs(12, 12) == (1, 3)
assert index2.slice_locs(13, 11) == (1, 3)
def test_slice_locs_na(self):
index = Index([np.nan, 1, 2])
assert index.slice_locs(1) == (1, 3)
assert index.slice_locs(np.nan) == (0, 3)
index = Index([0, np.nan, np.nan, 1, 2])
assert index.slice_locs(np.nan) == (1, 5)
def test_slice_locs_na_raises(self):
index = Index([np.nan, 1, 2])
with pytest.raises(KeyError, match=""):
index.slice_locs(start=1.5)
with pytest.raises(KeyError, match=""):
index.slice_locs(end=1.5)
@pytest.mark.parametrize(
"in_slice,expected",
[
# error: Slice index must be an integer or None
(pd.IndexSlice[::-1], "yxdcb"),
(pd.IndexSlice["b":"y":-1], ""), # type: ignore[misc]
(pd.IndexSlice["b"::-1], "b"), # type: ignore[misc]
(pd.IndexSlice[:"b":-1], "yxdcb"), # type: ignore[misc]
(pd.IndexSlice[:"y":-1], "y"), # type: ignore[misc]
(pd.IndexSlice["y"::-1], "yxdcb"), # type: ignore[misc]
(pd.IndexSlice["y"::-4], "yb"), # type: ignore[misc]
# absent labels
(pd.IndexSlice[:"a":-1], "yxdcb"), # type: ignore[misc]
(pd.IndexSlice[:"a":-2], "ydb"), # type: ignore[misc]
(pd.IndexSlice["z"::-1], "yxdcb"), # type: ignore[misc]
(pd.IndexSlice["z"::-3], "yc"), # type: ignore[misc]
(pd.IndexSlice["m"::-1], "dcb"), # type: ignore[misc]
(pd.IndexSlice[:"m":-1], "yx"), # type: ignore[misc]
(pd.IndexSlice["a":"a":-1], ""), # type: ignore[misc]
(pd.IndexSlice["z":"z":-1], ""), # type: ignore[misc]
(pd.IndexSlice["m":"m":-1], ""), # type: ignore[misc]
],
)
def test_slice_locs_negative_step(self, in_slice, expected):
index = Index(list("bcdxy"))
s_start, s_stop = index.slice_locs(in_slice.start, in_slice.stop, in_slice.step)
result = index[s_start : s_stop : in_slice.step]
expected = pd.Index(list(expected))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
def test_drop_by_str_label(self, index):
n = len(index)
drop = index[list(range(5, 10))]
dropped = index.drop(drop)
expected = index[list(range(5)) + list(range(10, n))]
tm.assert_index_equal(dropped, expected)
dropped = index.drop(index[0])
expected = index[1:]
tm.assert_index_equal(dropped, expected)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
@pytest.mark.parametrize("keys", [["foo", "bar"], ["1", "bar"]])
def test_drop_by_str_label_raises_missing_keys(self, index, keys):
with pytest.raises(KeyError, match=""):
index.drop(keys)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
def test_drop_by_str_label_errors_ignore(self, index):
n = len(index)
drop = index[list(range(5, 10))]
mixed = drop.tolist() + ["foo"]
dropped = index.drop(mixed, errors="ignore")
expected = index[list(range(5)) + list(range(10, n))]
tm.assert_index_equal(dropped, expected)
dropped = index.drop(["foo", "bar"], errors="ignore")
expected = index[list(range(n))]
tm.assert_index_equal(dropped, expected)
def test_drop_by_numeric_label_loc(self):
# TODO: Parametrize numeric and str tests after self.strIndex fixture
index = Index([1, 2, 3])
dropped = index.drop(1)
expected = Index([2, 3])
tm.assert_index_equal(dropped, expected)
def test_drop_by_numeric_label_raises_missing_keys(self):
index = Index([1, 2, 3])
with pytest.raises(KeyError, match=""):
index.drop([3, 4])
@pytest.mark.parametrize(
"key,expected", [(4, Index([1, 2, 3])), ([3, 4, 5], Index([1, 2]))]
)
def test_drop_by_numeric_label_errors_ignore(self, key, expected):
index = Index([1, 2, 3])
dropped = index.drop(key, errors="ignore")
tm.assert_index_equal(dropped, expected)
@pytest.mark.parametrize(
"values",
[["a", "b", ("c", "d")], ["a", ("c", "d"), "b"], [("c", "d"), "a", "b"]],
)
@pytest.mark.parametrize("to_drop", [[("c", "d"), "a"], ["a", ("c", "d")]])
def test_drop_tuple(self, values, to_drop):
# GH 18304
index = pd.Index(values)
expected = pd.Index(["b"])
result = index.drop(to_drop)
tm.assert_index_equal(result, expected)
removed = index.drop(to_drop[0])
for drop_me in to_drop[1], [to_drop[1]]:
result = removed.drop(drop_me)
tm.assert_index_equal(result, expected)
removed = index.drop(to_drop[1])
msg = fr"\"\[{re.escape(to_drop[1].__repr__())}\] not found in axis\""
for drop_me in to_drop[1], [to_drop[1]]:
with pytest.raises(KeyError, match=msg):
removed.drop(drop_me)
@pytest.mark.parametrize(
"method,expected,sort",
[
(
"intersection",
np.array(
[(1, "A"), (2, "A"), (1, "B"), (2, "B")],
dtype=[("num", int), ("let", "a1")],
),
False,
),
(
"intersection",
np.array(
[(1, "A"), (1, "B"), (2, "A"), (2, "B")],
dtype=[("num", int), ("let", "a1")],
),
None,
),
(
"union",
np.array(
[(1, "A"), (1, "B"), (1, "C"), (2, "A"), (2, "B"), (2, "C")],
dtype=[("num", int), ("let", "a1")],
),
None,
),
],
)
def test_tuple_union_bug(self, method, expected, sort):
index1 = Index(
np.array(
[(1, "A"), (2, "A"), (1, "B"), (2, "B")],
dtype=[("num", int), ("let", "a1")],
)
)
index2 = Index(
np.array(
[(1, "A"), (2, "A"), (1, "B"), (2, "B"), (1, "C"), (2, "C")],
dtype=[("num", int), ("let", "a1")],
)
)
result = getattr(index1, method)(index2, sort=sort)
assert result.ndim == 1
expected = Index(expected)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"attr",
[
"is_monotonic_increasing",
"is_monotonic_decreasing",
"_is_strictly_monotonic_increasing",
"_is_strictly_monotonic_decreasing",
],
)
def test_is_monotonic_incomparable(self, attr):
index = Index([5, datetime.now(), 7])
assert not getattr(index, attr)
def test_set_value_deprecated(self):
# GH 28621
idx = self.create_index()
arr = np.array([1, 2, 3])
with tm.assert_produces_warning(FutureWarning):
idx.set_value(arr, idx[1], 80)
assert arr[1] == 80
@pytest.mark.parametrize(
"index", ["string", "int", "datetime", "timedelta"], indirect=True
)
def test_get_value(self, index):
# TODO: Remove function? GH 19728
values = np.random.randn(100)
value = index[67]
with pytest.raises(AttributeError, match="has no attribute '_values'"):
# Index.get_value requires a Series, not an ndarray
with tm.assert_produces_warning(FutureWarning):
index.get_value(values, value)
with tm.assert_produces_warning(FutureWarning):
result = index.get_value(Series(values, index=values), value)
tm.assert_almost_equal(result, values[67])
@pytest.mark.parametrize("values", [["foo", "bar", "quux"], {"foo", "bar", "quux"}])
@pytest.mark.parametrize(
"index,expected",
[
(Index(["qux", "baz", "foo", "bar"]), np.array([False, False, True, True])),
(Index([]), np.array([], dtype=bool)), # empty
],
)
def test_isin(self, values, index, expected):
result = index.isin(values)
tm.assert_numpy_array_equal(result, expected)
def test_isin_nan_common_object(self, nulls_fixture, nulls_fixture2):
# Test cartesian product of null fixtures and ensure that we don't
# mangle the various types (save a corner case with PyPy)
# all nans are the same
if (
isinstance(nulls_fixture, float)
and isinstance(nulls_fixture2, float)
and math.isnan(nulls_fixture)
and math.isnan(nulls_fixture2)
):
tm.assert_numpy_array_equal(
Index(["a", nulls_fixture]).isin([nulls_fixture2]),
np.array([False, True]),
)
elif nulls_fixture is nulls_fixture2: # should preserve NA type
tm.assert_numpy_array_equal(
Index(["a", nulls_fixture]).isin([nulls_fixture2]),
np.array([False, True]),
)
else:
tm.assert_numpy_array_equal(
Index(["a", nulls_fixture]).isin([nulls_fixture2]),
np.array([False, False]),
)
def test_isin_nan_common_float64(self, nulls_fixture):
if nulls_fixture is pd.NaT:
pytest.skip("pd.NaT not compatible with Float64Index")
# Float64Index overrides isin, so must be checked separately
if nulls_fixture is pd.NA:
pytest.xfail("Float64Index cannot contain pd.NA")
tm.assert_numpy_array_equal(
Float64Index([1.0, nulls_fixture]).isin([np.nan]), np.array([False, True])
)
# we cannot compare NaT with NaN
tm.assert_numpy_array_equal(
Float64Index([1.0, nulls_fixture]).isin([pd.NaT]), np.array([False, False])
)
@pytest.mark.parametrize("level", [0, -1])
@pytest.mark.parametrize(
"index",
[
Index(["qux", "baz", "foo", "bar"]),
# Float64Index overrides isin, so must be checked separately
Float64Index([1.0, 2.0, 3.0, 4.0]),
],
)
def test_isin_level_kwarg(self, level, index):
values = index.tolist()[-2:] + ["nonexisting"]
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(expected, index.isin(values, level=level))
index.name = "foobar"
tm.assert_numpy_array_equal(expected, index.isin(values, level="foobar"))
def test_isin_level_kwarg_bad_level_raises(self, index):
for level in [10, index.nlevels, -(index.nlevels + 1)]:
with pytest.raises(IndexError, match="Too many levels"):
index.isin([], level=level)
@pytest.mark.parametrize("label", [1.0, "foobar", "xyzzy", np.nan])
def test_isin_level_kwarg_bad_label_raises(self, label, index):
if isinstance(index, MultiIndex):
index = index.rename(["foo", "bar"] + index.names[2:])
msg = f"'Level {label} not found'"
else:
index = index.rename("foo")
msg = fr"Requested level \({label}\) does not match index name \(foo\)"
with pytest.raises(KeyError, match=msg):
index.isin([], level=label)
@pytest.mark.parametrize("empty", [[], Series(dtype=object), np.array([])])
def test_isin_empty(self, empty):
# see gh-16991
index = Index(["a", "b"])
expected = np.array([False, False])
result = index.isin(empty)
tm.assert_numpy_array_equal(expected, result)
@pytest.mark.parametrize(
"values",
[
[1, 2, 3, 4],
[1.0, 2.0, 3.0, 4.0],
[True, True, True, True],
["foo", "bar", "baz", "qux"],
pd.date_range("2018-01-01", freq="D", periods=4),
],
)
def test_boolean_cmp(self, values):
index = Index(values)
result = index == values
expected = np.array([True, True, True, True], dtype=bool)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("index", ["string"], indirect=True)
@pytest.mark.parametrize("name,level", [(None, 0), ("a", "a")])
def test_get_level_values(self, index, name, level):
expected = index.copy()
if name:
expected.name = name
result = expected.get_level_values(level)
tm.assert_index_equal(result, expected)
def test_slice_keep_name(self):
index = Index(["a", "b"], name="asdf")
assert index.name == index[1:].name
@pytest.mark.parametrize(
"index",
["unicode", "string", "datetime", "int", "uint", "float"],
indirect=True,
)
def test_join_self(self, index, join_type):
joined = index.join(index, how=join_type)
assert index is joined
@pytest.mark.parametrize("method", ["strip", "rstrip", "lstrip"])
def test_str_attribute(self, method):
# GH9068
index = Index([" jack", "jill ", " jesse ", "frank"])
expected = Index([getattr(str, method)(x) for x in index.values])
result = getattr(index.str, method)()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"index",
[
Index(range(5)),
tm.makeDateIndex(10),
MultiIndex.from_tuples([("foo", "1"), ("bar", "3")]),
period_range(start="2000", end="2010", freq="A"),
],
)
def test_str_attribute_raises(self, index):
with pytest.raises(AttributeError, match="only use .str accessor"):
index.str.repeat(2)
@pytest.mark.parametrize(
"expand,expected",
[
(None, Index([["a", "b", "c"], ["d", "e"], ["f"]])),
(False, Index([["a", "b", "c"], ["d", "e"], ["f"]])),
(
True,
MultiIndex.from_tuples(
[("a", "b", "c"), ("d", "e", np.nan), ("f", np.nan, np.nan)]
),
),
],
)
def test_str_split(self, expand, expected):
index = Index(["a b c", "d e", "f"])
if expand is not None:
result = index.str.split(expand=expand)
else:
result = index.str.split()
tm.assert_index_equal(result, expected)
def test_str_bool_return(self):
# test boolean case, should return np.array instead of boolean Index
index = Index(["a1", "a2", "b1", "b2"])
result = index.str.startswith("a")
expected = np.array([True, True, False, False])
tm.assert_numpy_array_equal(result, expected)
assert isinstance(result, np.ndarray)
def test_str_bool_series_indexing(self):
index = Index(["a1", "a2", "b1", "b2"])
s = Series(range(4), index=index)
result = s[s.index.str.startswith("a")]
expected = Series(range(2), index=["a1", "a2"])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"index,expected", [(Index(list("abcd")), True), (Index(range(4)), False)]
)
def test_tab_completion(self, index, expected):
# GH 9910
result = "str" in dir(index)
assert result == expected
def test_indexing_doesnt_change_class(self):
index = Index([1, 2, 3, "a", "b", "c"])
assert index[1:3].identical(pd.Index([2, 3], dtype=np.object_))
assert index[[0, 1]].identical(pd.Index([1, 2], dtype=np.object_))
def test_outer_join_sort(self):
left_index = Index(np.random.permutation(15))
right_index = tm.makeDateIndex(10)
with tm.assert_produces_warning(RuntimeWarning):
result = left_index.join(right_index, how="outer")
# right_index in this case because DatetimeIndex has join precedence
# over Int64Index
with tm.assert_produces_warning(RuntimeWarning):
expected = right_index.astype(object).union(left_index.astype(object))
tm.assert_index_equal(result, expected)
def test_nan_first_take_datetime(self):
index = Index([pd.NaT, Timestamp("20130101"), Timestamp("20130102")])
result = index.take([-1, 0, 1])
expected = Index([index[-1], index[0], index[1]])
tm.assert_index_equal(result, expected)
def test_take_fill_value(self):
# GH 12631
index = pd.Index(list("ABC"), name="xxx")
result = index.take(np.array([1, 0, -1]))
expected = pd.Index(list("BAC"), name="xxx")
tm.assert_index_equal(result, expected)
# fill_value
result = index.take(np.array([1, 0, -1]), fill_value=True)
expected = pd.Index(["B", "A", np.nan], name="xxx")
tm.assert_index_equal(result, expected)
# allow_fill=False
result = index.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True)
expected = pd.Index(["B", "A", "C"], name="xxx")
tm.assert_index_equal(result, expected)
def test_take_fill_value_none_raises(self):
index = pd.Index(list("ABC"), name="xxx")
msg = (
"When allow_fill=True and fill_value is not None, "
"all indices must be >= -1"
)
with pytest.raises(ValueError, match=msg):
index.take(np.array([1, 0, -2]), fill_value=True)
with pytest.raises(ValueError, match=msg):
index.take(np.array([1, 0, -5]), fill_value=True)
def test_take_bad_bounds_raises(self):
index = pd.Index(list("ABC"), name="xxx")
with pytest.raises(IndexError, match="out of bounds"):
index.take(np.array([1, -5]))
@pytest.mark.parametrize("name", [None, "foobar"])
@pytest.mark.parametrize(
"labels",
[
[],
np.array([]),
["A", "B", "C"],
["C", "B", "A"],
np.array(["A", "B", "C"]),
np.array(["C", "B", "A"]),
# Must preserve name even if dtype changes
pd.date_range("20130101", periods=3).values,
pd.date_range("20130101", periods=3).tolist(),
],
)
def test_reindex_preserves_name_if_target_is_list_or_ndarray(self, name, labels):
# GH6552
index = pd.Index([0, 1, 2])
index.name = name
assert index.reindex(labels)[0].name == name
@pytest.mark.parametrize("labels", [[], np.array([]), np.array([], dtype=np.int64)])
def test_reindex_preserves_type_if_target_is_empty_list_or_array(self, labels):
# GH7774
index = pd.Index(list("abc"))
assert index.reindex(labels)[0].dtype.type == np.object_
@pytest.mark.parametrize(
"labels,dtype",
[
(pd.Int64Index([]), np.int64),
(pd.Float64Index([]), np.float64),
(pd.DatetimeIndex([]), np.datetime64),
],
)
def test_reindex_doesnt_preserve_type_if_target_is_empty_index(self, labels, dtype):
# GH7774
index = pd.Index(list("abc"))
assert index.reindex(labels)[0].dtype.type == dtype
def test_reindex_no_type_preserve_target_empty_mi(self):
index = pd.Index(list("abc"))
result = index.reindex(
pd.MultiIndex([pd.Int64Index([]), pd.Float64Index([])], [[], []])
)[0]
assert result.levels[0].dtype.type == np.int64
assert result.levels[1].dtype.type == np.float64
def test_groupby(self):
index = Index(range(5))
result = index.groupby(np.array([1, 1, 2, 2, 2]))
expected = {1: pd.Index([0, 1]), 2: pd.Index([2, 3, 4])}
tm.assert_dict_equal(result, expected)
@pytest.mark.parametrize(
"mi,expected",
[
(MultiIndex.from_tuples([(1, 2), (4, 5)]), np.array([True, True])),
(MultiIndex.from_tuples([(1, 2), (4, 6)]), np.array([True, False])),
],
)
def test_equals_op_multiindex(self, mi, expected):
# GH9785
# test comparisons of multiindex
df = pd.read_csv(StringIO("a,b,c\n1,2,3\n4,5,6"), index_col=[0, 1])
result = df.index == mi
tm.assert_numpy_array_equal(result, expected)
def test_equals_op_multiindex_identify(self):
df = pd.read_csv(StringIO("a,b,c\n1,2,3\n4,5,6"), index_col=[0, 1])
result = df.index == df.index
expected = np.array([True, True])
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize(
"index",
[
MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)]),
Index(["foo", "bar", "baz"]),
],
)
def test_equals_op_mismatched_multiindex_raises(self, index):
df = pd.read_csv(StringIO("a,b,c\n1,2,3\n4,5,6"), index_col=[0, 1])
with pytest.raises(ValueError, match="Lengths must match"):
df.index == index
def test_equals_op_index_vs_mi_same_length(self):
mi = MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)])
index = Index(["foo", "bar", "baz"])
result = mi == index
expected = np.array([False, False, False])
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("dt_conv", [pd.to_datetime, pd.to_timedelta])
def test_dt_conversion_preserves_name(self, dt_conv):
# GH 10875
index = pd.Index(["01:02:03", "01:02:04"], name="label")
assert index.name == dt_conv(index).name
@pytest.mark.parametrize(
"index,expected",
[
# ASCII
# short
(
pd.Index(["a", "bb", "ccc"]),
"""Index(['a', 'bb', 'ccc'], dtype='object')""",
),
# multiple lines
(
pd.Index(["a", "bb", "ccc"] * 10),
"""\
Index(['a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc',
'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc',
'a', 'bb', 'ccc', 'a', 'bb', 'ccc'],
dtype='object')""",
),
# truncated
(
pd.Index(["a", "bb", "ccc"] * 100),
"""\
Index(['a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a',
...
'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'],
dtype='object', length=300)""",
),
# Non-ASCII
# short
(
pd.Index(["あ", "いい", "ううう"]),
"""Index(['あ', 'いい', 'ううう'], dtype='object')""",
),
# multiple lines
(
pd.Index(["あ", "いい", "ううう"] * 10),
(
"Index(['あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', "
"'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう',\n"
" 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', "
"'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう',\n"
" 'あ', 'いい', 'ううう', 'あ', 'いい', "
"'ううう'],\n"
" dtype='object')"
),
),
# truncated
(
pd.Index(["あ", "いい", "ううう"] * 100),
(
"Index(['あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', "
"'あ', 'いい', 'ううう', 'あ',\n"
" ...\n"
" 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', "
"'ううう', 'あ', 'いい', 'ううう'],\n"
" dtype='object', length=300)"
),
),
],
)
def test_string_index_repr(self, index, expected):
result = repr(index)
assert result == expected
@pytest.mark.parametrize(
"index,expected",
[
# short
(
pd.Index(["あ", "いい", "ううう"]),
("Index(['あ', 'いい', 'ううう'], dtype='object')"),
),
# multiple lines
(
pd.Index(["あ", "いい", "ううう"] * 10),
(
"Index(['あ', 'いい', 'ううう', 'あ', 'いい', "
"'ううう', 'あ', 'いい', 'ううう',\n"
" 'あ', 'いい', 'ううう', 'あ', 'いい', "
"'ううう', 'あ', 'いい', 'ううう',\n"
" 'あ', 'いい', 'ううう', 'あ', 'いい', "
"'ううう', 'あ', 'いい', 'ううう',\n"
" 'あ', 'いい', 'ううう'],\n"
" dtype='object')"
""
),
),
# truncated
(
pd.Index(["あ", "いい", "ううう"] * 100),
(
"Index(['あ', 'いい', 'ううう', 'あ', 'いい', "
"'ううう', 'あ', 'いい', 'ううう',\n"
" 'あ',\n"
" ...\n"
" 'ううう', 'あ', 'いい', 'ううう', 'あ', "
"'いい', 'ううう', 'あ', 'いい',\n"
" 'ううう'],\n"
" dtype='object', length=300)"
),
),
],
)
def test_string_index_repr_with_unicode_option(self, index, expected):
# Enable Unicode option -----------------------------------------
with cf.option_context("display.unicode.east_asian_width", True):
result = repr(index)
assert result == expected
def test_cached_properties_not_settable(self):
index = pd.Index([1, 2, 3])
with pytest.raises(AttributeError, match="Can't set attribute"):
index.is_unique = False
@async_mark()
async def test_tab_complete_warning(self, ip):
# https://github.com/pandas-dev/pandas/issues/16409
pytest.importorskip("IPython", minversion="6.0.0")
from IPython.core.completer import provisionalcompleter
code = "import pandas as pd; idx = pd.Index([1, 2])"
await ip.run_code(code)
# GH 31324 newer jedi version raises Deprecation warning
import jedi
if jedi.__version__ < "0.16.0":
warning = tm.assert_produces_warning(None)
else:
warning = tm.assert_produces_warning(
DeprecationWarning, check_stacklevel=False
)
with warning:
with provisionalcompleter("ignore"):
list(ip.Completer.completions("idx.", 4))
def test_contains_method_removed(self, index):
# GH#30103 method removed for all types except IntervalIndex
if isinstance(index, pd.IntervalIndex):
index.contains(1)
else:
msg = f"'{type(index).__name__}' object has no attribute 'contains'"
with pytest.raises(AttributeError, match=msg):
index.contains(1)
class TestMixedIntIndex(Base):
# Mostly the tests from common.py for which the results differ
# in py2 and py3 because ints and strings are uncomparable in py3
# (GH 13514)
_holder = Index
@pytest.fixture(params=[[0, "a", 1, "b", 2, "c"]], ids=["mixedIndex"])
def index(self, request):
return Index(request.param)
def create_index(self) -> Index:
return Index([0, "a", 1, "b", 2, "c"])
def test_argsort(self):
index = self.create_index()
with pytest.raises(TypeError, match="'>|<' not supported"):
index.argsort()
def test_numpy_argsort(self):
index = self.create_index()
with pytest.raises(TypeError, match="'>|<' not supported"):
np.argsort(index)
def test_copy_name(self):
# Check that "name" argument passed at initialization is honoured
# GH12309
index = self.create_index()
first = type(index)(index, copy=True, name="mario")
second = type(first)(first, copy=False)
# Even though "copy=False", we want a new object.
assert first is not second
tm.assert_index_equal(first, second)
assert first.name == "mario"
assert second.name == "mario"
s1 = Series(2, index=first)
s2 = Series(3, index=second[:-1])
s3 = s1 * s2
assert s3.index.name == "mario"
def test_copy_name2(self):
# Check that adding a "name" parameter to the copy is honored
# GH14302
index = pd.Index([1, 2], name="MyName")
index1 = index.copy()
tm.assert_index_equal(index, index1)
index2 = index.copy(name="NewName")
tm.assert_index_equal(index, index2, check_names=False)
assert index.name == "MyName"
assert index2.name == "NewName"
index3 = index.copy(names=["NewName"])
tm.assert_index_equal(index, index3, check_names=False)
assert index.name == "MyName"
assert index.names == ["MyName"]
assert index3.name == "NewName"
assert index3.names == ["NewName"]
def test_unique_na(self):
idx = pd.Index([2, np.nan, 2, 1], name="my_index")
expected = pd.Index([2, np.nan, 1], name="my_index")
result = idx.unique()
tm.assert_index_equal(result, expected)
def test_logical_compat(self):
index = self.create_index()
assert index.all() == index.values.all()
assert index.any() == index.values.any()
@pytest.mark.parametrize("how", ["any", "all"])
@pytest.mark.parametrize("dtype", [None, object, "category"])
@pytest.mark.parametrize(
"vals,expected",
[
([1, 2, 3], [1, 2, 3]),
([1.0, 2.0, 3.0], [1.0, 2.0, 3.0]),
([1.0, 2.0, np.nan, 3.0], [1.0, 2.0, 3.0]),
(["A", "B", "C"], ["A", "B", "C"]),
(["A", np.nan, "B", "C"], ["A", "B", "C"]),
],
)
def test_dropna(self, how, dtype, vals, expected):
# GH 6194
index = pd.Index(vals, dtype=dtype)
result = index.dropna(how=how)
expected = pd.Index(expected, dtype=dtype)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("how", ["any", "all"])
@pytest.mark.parametrize(
"index,expected",
[
(
pd.DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"]),
pd.DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"]),
),
(
pd.DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03", pd.NaT]),
pd.DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"]),
),
(
pd.TimedeltaIndex(["1 days", "2 days", "3 days"]),
pd.TimedeltaIndex(["1 days", "2 days", "3 days"]),
),
(
pd.TimedeltaIndex([pd.NaT, "1 days", "2 days", "3 days", pd.NaT]),
pd.TimedeltaIndex(["1 days", "2 days", "3 days"]),
),
(
pd.PeriodIndex(["2012-02", "2012-04", "2012-05"], freq="M"),
pd.PeriodIndex(["2012-02", "2012-04", "2012-05"], freq="M"),
),
(
pd.PeriodIndex(["2012-02", "2012-04", "NaT", "2012-05"], freq="M"),
pd.PeriodIndex(["2012-02", "2012-04", "2012-05"], freq="M"),
),
],
)
def test_dropna_dt_like(self, how, index, expected):
result = index.dropna(how=how)
tm.assert_index_equal(result, expected)
def test_dropna_invalid_how_raises(self):
msg = "invalid how option: xxx"
with pytest.raises(ValueError, match=msg):
pd.Index([1, 2, 3]).dropna(how="xxx")
def test_get_combined_index(self):
result = _get_combined_index([])
expected = Index([])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"index",
[
pd.Index([np.nan]),
pd.Index([np.nan, 1]),
pd.Index([1, 2, np.nan]),
pd.Index(["a", "b", np.nan]),
pd.to_datetime(["NaT"]),
pd.to_datetime(["NaT", "2000-01-01"]),
pd.to_datetime(["2000-01-01", "NaT", "2000-01-02"]),
pd.to_timedelta(["1 day", "NaT"]),
],
)
def test_is_monotonic_na(self, index):
assert index.is_monotonic_increasing is False
assert index.is_monotonic_decreasing is False
assert index._is_strictly_monotonic_increasing is False
assert index._is_strictly_monotonic_decreasing is False
def test_repr_summary(self):
with cf.option_context("display.max_seq_items", 10):
result = repr(pd.Index(np.arange(1000)))
assert len(result) < 200
assert "..." in result
@pytest.mark.parametrize("klass", [Series, DataFrame])
def test_int_name_format(self, klass):
index = Index(["a", "b", "c"], name=0)
result = klass(list(range(3)), index=index)
assert "0" in repr(result)
def test_str_to_bytes_raises(self):
# GH 26447
index = Index([str(x) for x in range(10)])
msg = "^'str' object cannot be interpreted as an integer$"
with pytest.raises(TypeError, match=msg):
bytes(index)
def test_intersect_str_dates(self):
dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)]
index1 = Index(dt_dates, dtype=object)
index2 = Index(["aa"], dtype=object)
result = index2.intersection(index1)
expected = Index([], dtype=object)
tm.assert_index_equal(result, expected)
def test_index_repr_bool_nan(self):
# GH32146
arr = Index([True, False, np.nan], dtype=object)
exp1 = arr.format()
out1 = ["True", "False", "NaN"]
assert out1 == exp1
exp2 = repr(arr)
out2 = "Index([True, False, nan], dtype='object')"
assert out2 == exp2
@pytest.mark.filterwarnings("ignore:elementwise comparison failed:FutureWarning")
def test_index_with_tuple_bool(self):
# GH34123
# TODO: remove tupleize_cols=False once correct behaviour is restored
# TODO: also this op right now produces FutureWarning from numpy
idx = Index([("a", "b"), ("b", "c"), ("c", "a")], tupleize_cols=False)
result = idx == ("c", "a")
expected = np.array([False, False, True])
tm.assert_numpy_array_equal(result, expected)
class TestIndexUtils:
@pytest.mark.parametrize(
"data, names, expected",
[
([[1, 2, 3]], None, Index([1, 2, 3])),
([[1, 2, 3]], ["name"], Index([1, 2, 3], name="name")),
(
[["a", "a"], ["c", "d"]],
None,
MultiIndex([["a"], ["c", "d"]], [[0, 0], [0, 1]]),
),
(
[["a", "a"], ["c", "d"]],
["L1", "L2"],
MultiIndex([["a"], ["c", "d"]], [[0, 0], [0, 1]], names=["L1", "L2"]),
),
],
)
def test_ensure_index_from_sequences(self, data, names, expected):
result = ensure_index_from_sequences(data, names)
tm.assert_index_equal(result, expected)
def test_ensure_index_mixed_closed_intervals(self):
# GH27172
intervals = [
pd.Interval(0, 1, closed="left"),
pd.Interval(1, 2, closed="right"),
pd.Interval(2, 3, closed="neither"),
pd.Interval(3, 4, closed="both"),
]
result = ensure_index(intervals)
expected = Index(intervals, dtype=object)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"opname",
[
"eq",
"ne",
"le",
"lt",
"ge",
"gt",
"add",
"radd",
"sub",
"rsub",
"mul",
"rmul",
"truediv",
"rtruediv",
"floordiv",
"rfloordiv",
"pow",
"rpow",
"mod",
"divmod",
],
)
def test_generated_op_names(opname, index):
if isinstance(index, ABCIndex) and opname == "rsub":
# pd.Index.__rsub__ does not exist; though the method does exist
# for subclasses. see GH#19723
return
opname = f"__{opname}__"
method = getattr(index, opname)
assert method.__name__ == opname
@pytest.mark.parametrize("index_maker", tm.index_subclass_makers_generator())
def test_index_subclass_constructor_wrong_kwargs(index_maker):
# GH #19348
with pytest.raises(TypeError, match="unexpected keyword argument"):
index_maker(foo="bar")
def test_deprecated_fastpath():
msg = "[Uu]nexpected keyword argument"
with pytest.raises(TypeError, match=msg):
pd.Index(np.array(["a", "b"], dtype=object), name="test", fastpath=True)
with pytest.raises(TypeError, match=msg):
pd.Int64Index(np.array([1, 2, 3], dtype="int64"), name="test", fastpath=True)
with pytest.raises(TypeError, match=msg):
pd.RangeIndex(0, 5, 2, name="test", fastpath=True)
with pytest.raises(TypeError, match=msg):
pd.CategoricalIndex(["a", "b", "c"], name="test", fastpath=True)
def test_shape_of_invalid_index():
# Currently, it is possible to create "invalid" index objects backed by
# a multi-dimensional array (see https://github.com/pandas-dev/pandas/issues/27125
# about this). However, as long as this is not solved in general,this test ensures
# that the returned shape is consistent with this underlying array for
# compat with matplotlib (see https://github.com/pandas-dev/pandas/issues/27775)
idx = pd.Index([0, 1, 2, 3])
with tm.assert_produces_warning(FutureWarning):
# GH#30588 multi-dimensional indexing deprecated
assert idx[:, None].shape == (4, 1)
def test_validate_1d_input():
# GH#27125 check that we do not have >1-dimensional input
msg = "Index data must be 1-dimensional"
arr = np.arange(8).reshape(2, 2, 2)
with pytest.raises(ValueError, match=msg):
pd.Index(arr)
with pytest.raises(ValueError, match=msg):
pd.Float64Index(arr.astype(np.float64))
with pytest.raises(ValueError, match=msg):
pd.Int64Index(arr.astype(np.int64))
with pytest.raises(ValueError, match=msg):
pd.UInt64Index(arr.astype(np.uint64))
df = pd.DataFrame(arr.reshape(4, 2))
with pytest.raises(ValueError, match=msg):
pd.Index(df)
# GH#13601 trying to assign a multi-dimensional array to an index is not
# allowed
ser = pd.Series(0, range(4))
with pytest.raises(ValueError, match=msg):
ser.index = np.array([[2, 3]] * 4)
def test_convert_almost_null_slice(index):
# slice with None at both ends, but not step
key = slice(None, None, "foo")
if isinstance(index, pd.IntervalIndex):
msg = "label-based slicing with step!=1 is not supported for IntervalIndex"
with pytest.raises(ValueError, match=msg):
index._convert_slice_indexer(key, "loc")
else:
msg = "'>=' not supported between instances of 'str' and 'int'"
with pytest.raises(TypeError, match=msg):
index._convert_slice_indexer(key, "loc")
dtlike_dtypes = [
np.dtype("timedelta64[ns]"),
np.dtype("datetime64[ns]"),
pd.DatetimeTZDtype("ns", "Asia/Tokyo"),
pd.PeriodDtype("ns"),
]
@pytest.mark.parametrize("ldtype", dtlike_dtypes)
@pytest.mark.parametrize("rdtype", dtlike_dtypes)
def test_get_indexer_non_unique_wrong_dtype(ldtype, rdtype):
vals = np.tile(3600 * 10 ** 9 * np.arange(3), 2)
def construct(dtype):
if dtype is dtlike_dtypes[-1]:
# PeriodArray will try to cast ints to strings
return pd.DatetimeIndex(vals).astype(dtype)
return pd.Index(vals, dtype=dtype)
left = construct(ldtype)
right = construct(rdtype)
result = left.get_indexer_non_unique(right)
if ldtype is rdtype:
ex1 = np.array([0, 3, 1, 4, 2, 5] * 2, dtype=np.intp)
ex2 = np.array([], dtype=np.intp)
tm.assert_numpy_array_equal(result[0], ex1)
tm.assert_numpy_array_equal(result[1], ex2)
else:
no_matches = np.array([-1] * 6, dtype=np.intp)
tm.assert_numpy_array_equal(result[0], no_matches)
tm.assert_numpy_array_equal(result[1], no_matches)
| bsd-3-clause | 8,710,660,139,587,784,000 | 34.393881 | 88 | 0.542197 | false |
kennethgillen/ansible | lib/ansible/modules/network/ldap_attr.py | 28 | 11014 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Peter Sagerson <[email protected]>
# (c) 2016, Jiri Tyr <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ldap_attr
short_description: Add or remove LDAP attribute values.
description:
- Add or remove LDAP attribute values.
notes:
- This only deals with attributes on existing entries. To add or remove
whole entries, see M(ldap_entry).
- The default authentication settings will attempt to use a SASL EXTERNAL
bind over a UNIX domain socket. This works well with the default Ubuntu
install for example, which includes a cn=peercred,cn=external,cn=auth ACL
rule allowing root to modify the server configuration. If you need to use
a simple bind to access your server, pass the credentials in I(bind_dn)
and I(bind_pw).
- For I(state=present) and I(state=absent), all value comparisons are
performed on the server for maximum accuracy. For I(state=exact), values
have to be compared in Python, which obviously ignores LDAP matching
rules. This should work out in most cases, but it is theoretically
possible to see spurious changes when target and actual values are
semantically identical but lexically distinct.
version_added: '2.3'
author:
- Jiri Tyr (@jtyr)
requirements:
- python-ldap
options:
bind_dn:
required: false
default: null
description:
- A DN to bind with. If this is omitted, we'll try a SASL bind with
the EXTERNAL mechanism. If this is blank, we'll use an anonymous
bind.
bind_pw:
required: false
default: null
description:
- The password to use with I(bind_dn).
dn:
required: true
description:
- The DN of the entry to modify.
name:
required: true
description:
- The name of the attribute to modify.
server_uri:
required: false
default: ldapi:///
description:
- A URI to the LDAP server. The default value lets the underlying
LDAP client library look for a UNIX domain socket in its default
location.
start_tls:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- If true, we'll use the START_TLS LDAP extension.
state:
required: false
choices: [present, absent, exact]
default: present
description:
- The state of the attribute values. If C(present), all given
values will be added if they're missing. If C(absent), all given
values will be removed if present. If C(exact), the set of values
will be forced to exactly those provided and no others. If
I(state=exact) and I(value) is empty, all values for this
attribute will be removed.
values:
required: true
description:
- The value(s) to add or remove. This can be a string or a list of
strings. The complex argument format is required in order to pass
a list of strings (see examples).
"""
EXAMPLES = """
- name: Configure directory number 1 for example.com
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: olcSuffix
values: dc=example,dc=com
state: exact
# The complex argument format is required here to pass a list of ACL strings.
- name: Set up the ACL
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: olcAccess
values:
- >-
{0}to attrs=userPassword,shadowLastChange
by self write
by anonymous auth
by dn="cn=admin,dc=example,dc=com" write
by * none'
- >-
{1}to dn.base="dc=example,dc=com"
by dn="cn=admin,dc=example,dc=com" write
by * read
state: exact
- name: Declare some indexes
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: olcDbIndex
values: "{{ item }}"
with_items:
- objectClass eq
- uid eq
- name: Set up a root user, which we can use later to bootstrap the directory
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: "{{ item.key }}"
values: "{{ item.value }}"
state: exact
with_dict:
olcRootDN: cn=root,dc=example,dc=com
olcRootPW: "{SSHA}tabyipcHzhwESzRaGA7oQ/SDoBZQOGND"
- name: Get rid of an unneeded attribute
ldap_attr:
dn: uid=jdoe,ou=people,dc=example,dc=com
name: shadowExpire
values: ""
state: exact
server_uri: ldap://localhost/
bind_dn: cn=admin,dc=example,dc=com
bind_pw: password
#
# The same as in the previous example but with the authentication details
# stored in the ldap_auth variable:
#
# ldap_auth:
# server_uri: ldap://localhost/
# bind_dn: cn=admin,dc=example,dc=com
# bind_pw: password
- name: Get rid of an unneeded attribute
ldap_attr:
dn: uid=jdoe,ou=people,dc=example,dc=com
name: shadowExpire
values: ""
state: exact
params: "{{ ldap_auth }}"
"""
RETURN = """
modlist:
description: list of modified parameters
returned: success
type: list
sample: '[[2, "olcRootDN", ["cn=root,dc=example,dc=com"]]]'
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
try:
import ldap
import ldap.sasl
HAS_LDAP = True
except ImportError:
HAS_LDAP = False
class LdapAttr(object):
def __init__(self, module):
# Shortcuts
self.module = module
self.bind_dn = self.module.params['bind_dn']
self.bind_pw = self.module.params['bind_pw']
self.dn = self.module.params['dn']
self.name = self.module.params['name']
self.server_uri = self.module.params['server_uri']
self.start_tls = self.module.params['start_tls']
self.state = self.module.params['state']
# Normalize values
if isinstance(self.module.params['values'], list):
self.values = map(str, self.module.params['values'])
else:
self.values = [str(self.module.params['values'])]
# Establish connection
self.connection = self._connect_to_ldap()
def add(self):
values_to_add = filter(self._is_value_absent, self.values)
if len(values_to_add) > 0:
modlist = [(ldap.MOD_ADD, self.name, values_to_add)]
else:
modlist = []
return modlist
def delete(self):
values_to_delete = filter(self._is_value_present, self.values)
if len(values_to_delete) > 0:
modlist = [(ldap.MOD_DELETE, self.name, values_to_delete)]
else:
modlist = []
return modlist
def exact(self):
try:
results = self.connection.search_s(
self.dn, ldap.SCOPE_BASE, attrlist=[self.name])
except ldap.LDAPError:
e = get_exception()
self.module.fail_json(
msg="Cannot search for attribute %s" % self.name,
details=str(e))
current = results[0][1].get(self.name, [])
modlist = []
if frozenset(self.values) != frozenset(current):
if len(current) == 0:
modlist = [(ldap.MOD_ADD, self.name, self.values)]
elif len(self.values) == 0:
modlist = [(ldap.MOD_DELETE, self.name, None)]
else:
modlist = [(ldap.MOD_REPLACE, self.name, self.values)]
return modlist
def _is_value_present(self, value):
""" True if the target attribute has the given value. """
try:
is_present = bool(
self.connection.compare_s(self.dn, self.name, value))
except ldap.NO_SUCH_ATTRIBUTE:
is_present = False
return is_present
def _is_value_absent(self, value):
""" True if the target attribute doesn't have the given value. """
return not self._is_value_present(value)
def _connect_to_ldap(self):
connection = ldap.initialize(self.server_uri)
if self.start_tls:
try:
connection.start_tls_s()
except ldap.LDAPError:
e = get_exception()
self.module.fail_json(msg="Cannot start TLS.", details=str(e))
try:
if self.bind_dn is not None:
connection.simple_bind_s(self.bind_dn, self.bind_pw)
else:
connection.sasl_interactive_bind_s('', ldap.sasl.external())
except ldap.LDAPError:
e = get_exception()
self.module.fail_json(
msg="Cannot bind to the server.", details=str(e))
return connection
def main():
module = AnsibleModule(
argument_spec={
'bind_dn': dict(default=None),
'bind_pw': dict(default='', no_log=True),
'dn': dict(required=True),
'name': dict(required=True),
'params': dict(type='dict'),
'server_uri': dict(default='ldapi:///'),
'start_tls': dict(default=False, type='bool'),
'state': dict(
default='present',
choices=['present', 'absent', 'exact']),
'values': dict(required=True, type='raw'),
},
supports_check_mode=True,
)
if not HAS_LDAP:
module.fail_json(
msg="Missing requried 'ldap' module (pip install python-ldap)")
# Update module parameters with user's parameters if defined
if 'params' in module.params and isinstance(module.params['params'], dict):
module.params.update(module.params['params'])
# Remove the params
module.params.pop('params', None)
# Instantiate the LdapAttr object
ldap = LdapAttr(module)
state = module.params['state']
# Perform action
if state == 'present':
modlist = ldap.add()
elif state == 'absent':
modlist = ldap.delete()
elif state == 'exact':
modlist = ldap.exact()
changed = False
if len(modlist) > 0:
changed = True
if not module.check_mode:
try:
ldap.connection.modify_s(ldap.dn, modlist)
except Exception:
e = get_exception()
module.fail_json(
msg="Attribute action failed.", details=str(e))
module.exit_json(changed=changed, modlist=modlist)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,757,366,830,278,018,000 | 29.679666 | 79 | 0.615217 | false |
cchurch/ansible-modules-core | network/junos/junos_config.py | 11 | 11221 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
---
module: junos_config
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage configuration on devices running Juniper JUNOS
description:
- This module provides an implementation for working with the active
configuration running on Juniper JUNOS devices. It provides a set
of arguments for loading configuration, performing rollback operations
and zeroing the active configuration on the device.
extends_documentation_fragment: junos
options:
lines:
description:
- This argument takes a list of C(set) or C(delete) configuration
lines to push into the remote device. Each line must start with
either C(set) or C(delete). This argument is mutually exclusive
with the I(src) argument.
required: false
default: null
src:
description:
- The I(src) argument provides a path to the configuration file
to load into the remote system. The path can either be a full
system path to the configuration file if the value starts with /
or relative to the root of the implemented role or playbook.
This argument is mutually exclusive with the I(lines) argument.
required: false
default: null
version_added: "2.2"
src_format:
description:
- The I(src_format) argument specifies the format of the configuration
found int I(src). If the I(src_format) argument is not provided,
the module will attempt to determine the format of the configuration
file specified in I(src).
required: false
default: null
choices: ['xml', 'set', 'text', 'json']
version_added: "2.2"
rollback:
description:
- The C(rollback) argument instructs the module to rollback the
current configuration to the identifier specified in the
argument. If the specified rollback identifier does not
exist on the remote device, the module will fail. To rollback
to the most recent commit, set the C(rollback) argument to 0.
required: false
default: null
zeroize:
description:
- The C(zeroize) argument is used to completely sanitize the
remote device configuration back to initial defaults. This
argument will effectively remove all current configuration
statements on the remote device.
required: false
default: null
confirm:
description:
- The C(confirm) argument will configure a time out value for
the commit to be confirmed before it is automatically
rolled back. If the C(confirm) argument is set to False, this
argument is silently ignored. If the value for this argument
is set to 0, the commit is confirmed immediately.
required: false
default: 0
comment:
description:
- The C(comment) argument specifies a text string to be used
when committing the configuration. If the C(confirm) argument
is set to False, this argument is silently ignored.
required: false
default: configured by junos_config
replace:
description:
- The C(replace) argument will instruct the remote device to
replace the current configuration hierarchy with the one specified
in the corresponding hierarchy of the source configuration loaded
from this module.
- Note this argument should be considered deprecated. To achieve
the equivalent, set the I(update) argument to C(replace). This argument
will be removed in a future release.
required: false
choices: ['yes', 'no']
default: false
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory. If the directory does not
exist, it is created.
required: false
default: no
choices: ['yes', 'no']
version_added: "2.2"
requirements:
- junos-eznc
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
"""
EXAMPLES = """
# Note: examples below use the following provider dict to handle
# transport and authentication to the node.
vars:
netconf:
host: "{{ inventory_hostname }}"
username: ansible
password: Ansible
- name: load configure file into device
junos_config:
src: srx.cfg
comment: update config
provider: "{{ netconf }}"
- name: rollback the configuration to id 10
junos_config:
rollback: 10
provider: "{{ netconf }}"
- name: zero out the current configuration
junos_config:
zeroize: yes
provider: "{{ netconf }}"
- name: confirm a previous commit
junos_config:
provider: "{{ netconf }}"
"""
RETURN = """
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: path
sample: /playbooks/ansible/backup/config.2016-07-16@22:28:34
"""
import json
from xml.etree import ElementTree
import ansible.module_utils.junos
from ansible.module_utils.basic import get_exception
from ansible.module_utils.network import NetworkModule, NetworkError
from ansible.module_utils.netcfg import NetworkConfig
DEFAULT_COMMENT = 'configured by junos_config'
def guess_format(config):
try:
json.loads(config)
return 'json'
except ValueError:
pass
try:
ElementTree.fromstring(config)
return 'xml'
except ElementTree.ParseError:
pass
if config.startswith('set') or config.startswith('delete'):
return 'set'
return 'text'
def config_to_commands(config):
set_format = config.startswith('set') or config.startswith('delete')
candidate = NetworkConfig(indent=4, contents=config, device_os='junos')
if not set_format:
candidate = [c.line for c in candidate.items]
commands = list()
# this filters out less specific lines
for item in candidate:
for index, entry in enumerate(commands):
if item.startswith(entry):
del commands[index]
break
commands.append(item)
else:
commands = str(candidate).split('\n')
return commands
def diff_commands(commands, config):
config = [unicode(c).replace("'", '') for c in config]
updates = list()
visited = set()
for item in commands:
if len(item) > 0:
if not item.startswith('set') and not item.startswith('delete'):
raise ValueError('line must start with either `set` or `delete`')
elif item.startswith('set') and item[4:] not in config:
updates.append(item)
elif item.startswith('delete'):
for entry in config:
if entry.startswith(item[7:]) and item not in visited:
updates.append(item)
visited.add(item)
return updates
def load_config(module, result):
candidate = module.params['lines'] or module.params['src']
if isinstance(candidate, basestring):
candidate = candidate.split('\n')
kwargs = dict()
kwargs['comment'] = module.params['comment']
kwargs['confirm'] = module.params['confirm']
kwargs['replace'] = module.params['replace']
kwargs['commit'] = not module.check_mode
if module.params['src']:
config_format = module.params['src_format'] or guess_format(candidate)
elif module.params['lines']:
config_format = 'set'
kwargs['config_format'] = config_format
# this is done to filter out `delete ...` statements which map to
# nothing in the config as that will cause an exception to be raised
if config_format == 'set':
config = module.config.get_config()
config = config_to_commands(config)
candidate = diff_commands(candidate, config)
diff = module.config.load_config(candidate, **kwargs)
if diff:
result['changed'] = True
result['diff'] = dict(prepared=diff)
def rollback_config(module, result):
rollback = module.params['rollback']
kwargs = dict(comment=module.param['comment'],
commit=not module.check_mode)
diff = module.connection.rollback_config(rollback, **kwargs)
if diff:
result['changed'] = True
result['diff'] = dict(prepared=diff)
def zeroize_config(module, result):
if not module.check_mode:
module.cli.run_commands('request system zeroize')
result['changed'] = True
def confirm_config(module, result):
checkonly = module.check_mode
result['changed'] = module.connection.confirm_commit(checkonly)
def run(module, result):
if module.params['rollback']:
return rollback_config(module, result)
elif module.params['zeroize']:
return zeroize_config(module, result)
elif not any((module.params['src'], module.params['lines'])):
return confirm_config(module, result)
else:
return load_config(module, result)
def main():
""" main entry point for module execution
"""
argument_spec = dict(
lines=dict(type='list'),
src=dict(type='path'),
src_format=dict(choices=['xml', 'text', 'set', 'json']),
# update operations
replace=dict(default=False, type='bool'),
confirm=dict(default=0, type='int'),
comment=dict(default=DEFAULT_COMMENT),
# config operations
backup=dict(type='bool', default=False),
rollback=dict(type='int'),
zeroize=dict(default=False, type='bool'),
transport=dict(default='netconf', choices=['netconf'])
)
mutually_exclusive = [('lines', 'rollback'), ('lines', 'zeroize'),
('rollback', 'zeroize'), ('lines', 'src'),
('src', 'zeroize'), ('src', 'rollback')]
required_if = [('replace', True, ['src'])]
module = NetworkModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
result = dict(changed=False)
if module.params['backup']:
result['__backup__'] = module.config.get_config()
try:
run(module, result)
except NetworkError:
exc = get_exception()
module.fail_json(msg=str(exc), **exc.kwargs)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -528,848,127,193,416,000 | 31.714286 | 81 | 0.654309 | false |
projecthamster/hamster-gtk | hamster_gtk/overview/widgets/misc.py | 1 | 5056 | # -*- coding: utf-8 -*-
# This file is part of 'hamster-gtk'.
#
# 'hamster-gtk' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-gtk' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-gtk'. If not, see <http://www.gnu.org/licenses/>.
"""This module provide widgets that did not fit in the other modules."""
from __future__ import absolute_import, unicode_literals
from gettext import gettext as _
from gi.repository import GObject, Gtk
from six import text_type
from hamster_gtk.helpers import get_parent_window
from hamster_gtk.misc.dialogs import DateRangeSelectDialog
from hamster_gtk.overview.dialogs import ExportDialog
class HeaderBar(Gtk.HeaderBar):
"""Headerbar used by the overview screen."""
def __init__(self, controller, *args, **kwargs):
"""Initialize headerbar."""
super(HeaderBar, self).__init__(*args, **kwargs)
self.set_show_close_button(True)
self.set_title(_("Overview"))
self._daterange_button = self._get_daterange_button()
self.pack_start(self._get_prev_daterange_button())
self.pack_start(self._get_next_daterange_button())
self.pack_start(self._daterange_button)
self.pack_end(self._get_export_button())
controller.signal_handler.connect('daterange-changed', self._on_daterange_changed)
# Widgets
def _get_export_button(self):
"""Return a button to export facts."""
button = Gtk.Button(_("Export"))
button.connect('clicked', self._on_export_button_clicked)
return button
def _get_daterange_button(self):
"""Return a button that opens the *select daterange* dialog."""
# We add a dummy label which will be set properly once a daterange is
# set.
button = Gtk.Button('')
button.connect('clicked', self._on_daterange_button_clicked)
return button
def _get_prev_daterange_button(self):
"""Return a 'previous dateframe' widget."""
button = Gtk.Button(_("Earlier"))
button.connect('clicked', self._on_previous_daterange_button_clicked)
return button
def _get_next_daterange_button(self):
"""Return a 'next dateframe' widget."""
button = Gtk.Button(_("Later"))
button.connect('clicked', self._on_next_daterange_button_clicked)
return button
# Callbacks
def _on_daterange_button_clicked(self, button):
"""Callback for when the 'daterange' button is clicked."""
parent = get_parent_window(self)
dialog = DateRangeSelectDialog(parent)
response = dialog.run()
if response == Gtk.ResponseType.APPLY:
parent._daterange = dialog.daterange
dialog.destroy()
def _on_daterange_changed(self, sender, daterange):
"""Callback to be triggered if the 'daterange' changed."""
def get_label_text(daterange):
start, end = daterange
if start == end:
text = text_type(start)
else:
text = '{} - {}'.format(start, end)
return text
self._daterange_button.set_label(get_label_text(daterange))
def _on_previous_daterange_button_clicked(self, button):
"""Callback for when the 'previous' button is clicked."""
get_parent_window(self).apply_previous_daterange()
def _on_next_daterange_button_clicked(self, button):
"""Callback for when the 'next' button is clicked."""
get_parent_window(self).apply_next_daterange()
def _on_export_button_clicked(self, button):
"""
Trigger fact export if button clicked.
This is the place to run extra logic about where to save/which format.
``parent._export_facts`` only deals with the actual export.
"""
parent = get_parent_window(self)
dialog = ExportDialog(parent)
response = dialog.run()
if response == Gtk.ResponseType.OK:
parent._export_facts(dialog.get_export_format(), dialog.get_filename())
else:
pass
dialog.destroy()
class Summary(Gtk.Box):
"""A widget that shows categories with highest commutative ``Fact.delta``."""
def __init__(self, category_totals):
"""Initialize widget."""
super(Summary, self).__init__()
for category, total in category_totals:
label = Gtk.Label()
label.set_markup("<b>{}:</b> {} minutes".format(
GObject.markup_escape_text(text_type(category)),
int(total.total_seconds() / 60)))
self.pack_start(label, False, False, 10)
| gpl-3.0 | 5,358,059,220,522,168,000 | 37.015038 | 90 | 0.641218 | false |
healpy/healpy | healpy/newvisufunc.py | 1 | 17516 | __all__ = ["projview", "newprojplot"]
import numpy as np
from .pixelfunc import ang2pix, npix2nside
from .rotator import Rotator
import matplotlib.pyplot as plt
from matplotlib.projections.geo import GeoAxes
from matplotlib.ticker import MultipleLocator, FormatStrFormatter, AutoMinorLocator
import warnings
class ThetaFormatterCounterclockwisePhi(GeoAxes.ThetaFormatter):
"""Convert tick labels from rads to degs and shifts labelling from -180|-90|0|90|180 to conterclockwise periodic 180|90|0|270|180 """
def __call__(self, x, pos=None):
if x != 0:
x *= -1
if x < 0:
x += 2 * np.pi
return super(ThetaFormatterCounterclockwisePhi, self).__call__(x, pos)
class ThetaFormatterClockwisePhi(GeoAxes.ThetaFormatter):
"""Convert tick labels from rads to degs and shifts labelling from -180|-90|0|90|180 to clockwise periodic 180|270|0|90|180 """
def __call__(self, x, pos=None):
if x < 0:
x += 2 * np.pi
# return super(ThetaFormatterShiftPhi, self).__call__(x, pos)
return super(ThetaFormatterClockwisePhi, self).__call__(x, pos)
class ThetaFormatterSymmetricPhi(GeoAxes.ThetaFormatter):
"""Just convert phi ticks from rad to degs and keep the true -180|-90|0|90|180 """
def __call__(self, x, pos=None):
return super(ThetaFormatterSymmetricPhi, self).__call__(x, pos)
class ThetaFormatterTheta(GeoAxes.ThetaFormatter):
"""Convert theta ticks from rads to degs"""
def __call__(self, x, pos=None):
return super(ThetaFormatterTheta, self).__call__(x, pos)
def lonlat(theta, phi):
"""Converts theta and phi to longitude and latitude"""
longitude = np.asarray(phi)
latitude = np.pi / 2 - np.asarray(theta)
return longitude, latitude
def update_dictionary(main_dict, update_dict):
for key, key_val in main_dict.items():
if key in update_dict:
main_dict[key] = update_dict[key]
return main_dict
def projview(
m=None,
rot=None,
coord=None,
unit="",
xsize=1000,
nest=False,
min=None,
max=None,
flip="astro",
format="%g",
cbar=True,
cmap="viridis",
norm=None,
graticule=False,
graticule_labels=False,
return_only_data=False,
projection_type="mollweide",
cb_orientation="horizontal",
xlabel=None,
ylabel=None,
longitude_grid_spacing=60,
latitude_grid_spacing=30,
override_plot_properties=None,
title=None,
xtick_label_color="black",
ytick_label_color="black",
graticule_color=None,
fontsize=None,
phi_convention="counterclockwise",
custom_xtick_labels=None,
custom_ytick_labels=None,
**kwargs
):
"""Plot a healpix map (given as an array) in the chosen projection.
See examples of using this function in the documentation under "Other tutorials".
Overplot points or lines using :func:`newprojplot`.
.. warning::
this function is work in progress, the aim is to reimplement the healpy
plot functions using the new features of matplotlib and remove most
of the custom projection code.
Please report bugs or submit feature requests via Github.
The interface will change in future releases.
Parameters
----------
map : float, array-like or None
An array containing the map, supports masked maps, see the `ma` function.
If None, will display a blank map, useful for overplotting.
rot : scalar or sequence, optional
Describe the rotation to apply.
In the form (lon, lat, psi) (unit: degrees) : the point at
longitude *lon* and latitude *lat* will be at the center. An additional rotation
of angle *psi* around this direction is applied.
coord : sequence of character, optional
Either one of 'G', 'E' or 'C' to describe the coordinate
system of the map, or a sequence of 2 of these to rotate
the map from the first to the second coordinate system.
unit : str, optional
A text describing the unit of the data. Default: ''
xsize : int, optional
The size of the image. Default: 800
nest : bool, optional
If True, ordering scheme is NESTED. Default: False (RING)
min : float, optional
The minimum range value
max : float, optional
The maximum range value
flip : {'astro', 'geo'}, optional
Defines the convention of projection : 'astro' (default, east towards left, west towards right)
or 'geo' (east towards roght, west towards left)
It creates the `healpy_flip` attribute on the Axes to save the convention in the figure.
format : str, optional
The format of the scale label. Default: '%g'
cbar : bool, optional
Display the colorbar. Default: True
norm : {'hist', 'log', None}
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
kwargs : keywords
any additional keyword is passed to pcolormesh
graticule : bool
add graticule
graticule_labels : bool
longitude and latitude labels
projection_type : {'aitoff', 'hammer', 'lambert', 'mollweide', 'cart', '3d', 'polar'}
type of the plot
cb_orientation : {'horizontal', 'vertical'}
color bar orientation
xlabel : str
set x axis label
ylabel : str
set y axis label
longitude_grid_spacing : float
set x axis grid spacing
latitude_grid_spacing : float
set y axis grid spacing
override_plot_properties : dict
Override the following plot proporties: "cbar_shrink", "cbar_pad", "cbar_label_pad", "figure_width": width, "figure_size_ratio": ratio.
title : str
set title of the plot
lcolor : str
change the color of the longitude tick labels, some color maps make it hard to read black tick labels
fontsize: dict
Override fontsize of labels: "xlabel", "ylabel", "title", "xtick_label", "ytick_label", "cbar_label", "cbar_tick_label".
phi_convention : string
convention on x-axis (phi), 'counterclockwise' (default), 'clockwise', 'symmetrical' (phi as it is truly given)
if `flip` is "geo", `phi_convention` should be set to 'clockwise'.
custom_xtick_labels : list
override x-axis tick labels
custom_ytick_labels : list
override y-axis tick labels
"""
geographic_projections = ["aitoff", "hammer", "lambert", "mollweide"]
if not m is None:
# auto min and max
if min is None:
min = m.min()
if max is None:
max = m.max()
# do this to find how many decimals are in the colorbar labels, so that the padding in the vertical cbar can done properly
def find_number_of_decimals(number):
try:
return len(str(number).split(".")[1])
except:
return 0
# default font sizes
fontsize_defaults = {
"xlabel": 12,
"ylabel": 12,
"title": 14,
"xtick_label": 12,
"ytick_label": 12,
"cbar_label": 12,
"cbar_tick_label": 12,
}
if fontsize is not None:
fontsize_defaults = update_dictionary(fontsize_defaults, fontsize)
# default plot settings
decs = np.max([find_number_of_decimals(min), find_number_of_decimals(max)])
if decs >= 3:
lpad = -27
else:
lpad = -9 * decs
ratio = 0.63
if projection_type == "3d":
if cb_orientation == "vertical":
shrink = 0.55
pad = 0.02
lpad = lpad
width = 11.5
if cb_orientation == "horizontal":
shrink = 0.2
pad = 0
lpad = -10
width = 14
if projection_type in geographic_projections:
if cb_orientation == "vertical":
shrink = 0.6
pad = 0.01
lpad = lpad
width = 10
if cb_orientation == "horizontal":
shrink = 0.6
pad = 0.05
lpad = -8
width = 8.5
if projection_type == "cart":
if cb_orientation == "vertical":
shrink = 1
pad = 0.01
lpad = lpad
width = 9.6
ratio = 0.42
if cb_orientation == "horizontal":
shrink = 0.4
pad = 0.1
lpad = -12
width = 8.8
if xlabel == None:
pad = 0.01
ratio = 0.63
if projection_type == "polar":
if cb_orientation == "vertical":
shrink = 1
pad = 0.01
lpad = lpad
width = 10
if cb_orientation == "horizontal":
shrink = 0.4
pad = 0.01
lpad = 0
width = 12
# pass the default settings to the plot_properties dictionary
plot_properties = {
"cbar_shrink": shrink,
"cbar_pad": pad,
"cbar_label_pad": lpad,
"figure_width": width,
"figure_size_ratio": ratio,
}
if override_plot_properties is not None:
warnings.warn(
"\n *** Overriding default plot properies: " + str(plot_properties) + " ***"
)
plot_properties = update_dictionary(plot_properties, override_plot_properties)
warnings.warn("\n *** New plot properies: " + str(plot_properties) + " ***")
# not implemented features
if not (norm is None):
raise NotImplementedError()
# Create the figure
if not return_only_data: # supress figure creation when only dumping the data
width = width # 8.5
fig = plt.figure(
figsize=(
plot_properties["figure_width"],
plot_properties["figure_width"] * plot_properties["figure_size_ratio"],
)
)
if projection_type == "cart":
ax = fig.add_subplot(111)
else:
ax = fig.add_subplot(111, projection=projection_type)
# FIXME: make a more general axes creation that works also with subplots
# ax = plt.gcf().add_axes((.125, .1, .9, .9), projection="mollweide")
# remove white space around the image
plt.subplots_adjust(left=0.02, right=0.98, top=0.95, bottom=0.05)
# end if not
if graticule and graticule_labels:
plt.subplots_adjust(left=0.04, right=0.98, top=0.95, bottom=0.05)
# allow callers to override the hold state by passing hold=True|False
# washold = ax.ishold() # commented out
hold = kwargs.pop("hold", None)
# if hold is not None:
# ax.hold(hold)
# try:
ysize = xsize // 2
theta = np.linspace(np.pi, 0, ysize)
phi = np.linspace(-np.pi, np.pi, xsize)
longitude = np.radians(np.linspace(-180, 180, xsize))
if flip == "astro":
longitude = longitude[::-1]
if not return_only_data:
# set property on ax so it can be used in newprojplot
ax.healpy_flip = flip
latitude = np.radians(np.linspace(-90, 90, ysize))
# project the map to a rectangular matrix xsize x ysize
PHI, THETA = np.meshgrid(phi, theta)
# coord or rotation
if coord or rot:
r = Rotator(coord=coord, rot=rot, inv=True)
THETA, PHI = r(THETA.flatten(), PHI.flatten())
THETA = THETA.reshape(ysize, xsize)
PHI = PHI.reshape(ysize, xsize)
nside = npix2nside(len(m))
if not m is None:
grid_pix = ang2pix(nside, THETA, PHI, nest=nest)
grid_map = m[grid_pix]
# plot
if return_only_data: # exit here when dumping the data
return [longitude, latitude, grid_map]
if projection_type != "3d": # test for 3d plot
ret = plt.pcolormesh(
longitude,
latitude,
grid_map,
vmin=min,
vmax=max,
rasterized=True,
cmap=cmap,
shading="auto",
**kwargs
)
elif projection_type == "3d": # test for 3d plot
LONGITUDE, LATITUDE = np.meshgrid(longitude, latitude)
ret = ax.plot_surface(
LONGITUDE,
LATITUDE,
grid_map,
cmap=cmap,
vmin=min,
vmax=max,
rasterized=True,
**kwargs
)
# graticule
if graticule_color is None:
plt.grid(graticule)
else:
plt.grid(graticule, color=graticule_color)
if graticule:
if projection_type in geographic_projections:
longitude_grid_spacing = longitude_grid_spacing # deg 60
ax.set_longitude_grid(longitude_grid_spacing)
ax.set_latitude_grid(latitude_grid_spacing)
ax.set_longitude_grid_ends(90)
else:
longitude_grid_spacing = longitude_grid_spacing # deg
latitude_grid_spacing = latitude_grid_spacing # deg
ax.xaxis.set_major_locator(
MultipleLocator(np.deg2rad(longitude_grid_spacing))
) # longitude
ax.yaxis.set_major_locator(
MultipleLocator(np.deg2rad(latitude_grid_spacing))
) # lattitude
# labelling
if graticule_labels & graticule:
if phi_convention == "counterclockwise":
xtick_formatter = ThetaFormatterCounterclockwisePhi(longitude_grid_spacing)
elif phi_convention == "clockwise":
xtick_formatter = ThetaFormatterClockwisePhi(longitude_grid_spacing)
elif phi_convention == "symmetrical":
xtick_formatter = ThetaFormatterSymmetricPhi(longitude_grid_spacing)
ax.xaxis.set_major_formatter(xtick_formatter)
ax.yaxis.set_major_formatter(ThetaFormatterTheta(latitude_grid_spacing))
if custom_xtick_labels is not None:
try:
ax.xaxis.set_ticklabels(custom_xtick_labels)
except:
warnings.warn(
"Put names for all "
+ str(len(ax.xaxis.get_ticklabels()))
+ " x-tick labels!. No re-labelling done."
)
if custom_ytick_labels is not None:
try:
ax.yaxis.set_ticklabels(custom_ytick_labels)
except:
warnings.warn(
"Put names for all "
+ str(len(ax.yaxis.get_ticklabels()))
+ " y-tick labels!. No re-labelling done."
)
if not graticule:
# remove longitude and latitude labels
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
ax.tick_params(axis=u"both", which=u"both", length=0)
ax.set_title(title, fontsize=fontsize_defaults["title"])
# tick font size
ax.tick_params(
axis="x", labelsize=fontsize_defaults["xtick_label"], colors=xtick_label_color
)
ax.tick_params(
axis="y", labelsize=fontsize_defaults["ytick_label"], colors=ytick_label_color
)
# colorbar
if projection_type == "cart":
ax.set_aspect(1)
extend = "neither"
if min > np.min(m):
extend = "min"
if max < np.max(m):
extend = "max"
if min > np.min(m) and max < np.max(m):
extend = "both"
if cbar:
cb = fig.colorbar(
ret,
orientation=cb_orientation,
shrink=plot_properties["cbar_shrink"],
pad=plot_properties["cbar_pad"],
ticks=[min, max],
extend=extend,
)
if cb_orientation == "horizontal":
cb.ax.xaxis.set_label_text(unit, fontsize=fontsize_defaults["cbar_label"])
cb.ax.tick_params(axis="x", labelsize=fontsize_defaults["cbar_tick_label"])
cb.ax.xaxis.labelpad = plot_properties["cbar_label_pad"]
if cb_orientation == "vertical":
cb.ax.yaxis.set_label_text(unit, fontsize=fontsize_defaults["cbar_label"])
cb.ax.tick_params(axis="y", labelsize=fontsize_defaults["cbar_tick_label"])
cb.ax.yaxis.labelpad = plot_properties["cbar_label_pad"]
# workaround for issue with viewers, see colorbar docstring
cb.solids.set_edgecolor("face")
ax.set_xlabel(xlabel, fontsize=fontsize_defaults["xlabel"])
ax.set_ylabel(ylabel, fontsize=fontsize_defaults["ylabel"])
plt.draw()
# except:
# pass
return ret
def newprojplot(theta, phi, fmt=None, **kwargs):
"""newprojplot is a wrapper around :func:`matplotlib.Axes.plot` to support
colatitude theta and longitude phi and take into account the longitude convention
(see the `flip` keyword of :func:`projview`)
You can call this function as::
newprojplot(theta, phi) # plot a line going through points at coord (theta, phi)
newprojplot(theta, phi, 'bo') # plot 'o' in blue at coord (theta, phi)
Parameters
----------
theta, phi : float, array-like
Coordinates of point to plot in radians.
fmt : str
A format string (see :func:`matplotlib.Axes.plot` for details)
Notes
-----
Other keywords are passed to :func:`matplotlib.Axes.plot`.
"""
import matplotlib.pyplot as plt
ax = plt.gca()
flip = getattr(ax, "healpy_flip", "astro")
longitude, latitude = lonlat(theta, phi)
if flip == "astro":
longitude = longitude * -1
if fmt is None:
ret = plt.plot(longitude, latitude, **kwargs)
else:
ret = plt.plot(longitude, latitude, fmt, **kwargs)
return ret
| gpl-2.0 | 7,630,636,766,318,857,000 | 33.753968 | 141 | 0.594028 | false |
indykish/heroku-buildpack-python | vendor/distribute-0.6.36/setuptools/command/setopt.py | 167 | 5053 | import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
def config_file(kind="local"):
"""Get the filename of the distutils, local, global, or per-user config
`kind` must be one of "local", "global", or "user"
"""
if kind=='local':
return 'setup.cfg'
if kind=='global':
return os.path.join(
os.path.dirname(distutils.__file__),'distutils.cfg'
)
if kind=='user':
dot = os.name=='posix' and '.' or ''
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
raise ValueError(
"config_file() type must be 'local', 'global', or 'user'", kind
)
def edit_config(filename, settings, dry_run=False):
"""Edit a configuration file to include `settings`
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
command/section name. A ``None`` value means to delete the entire section,
while a dictionary lists settings to be changed or deleted in that section.
A setting of ``None`` means to delete that setting.
"""
from ConfigParser import RawConfigParser
log.debug("Reading configuration from %s", filename)
opts = RawConfigParser()
opts.read([filename])
for section, options in settings.items():
if options is None:
log.info("Deleting section [%s] from %s", section, filename)
opts.remove_section(section)
else:
if not opts.has_section(section):
log.debug("Adding new section [%s] to %s", section, filename)
opts.add_section(section)
for option,value in options.items():
if value is None:
log.debug("Deleting %s.%s from %s",
section, option, filename
)
opts.remove_option(section,option)
if not opts.options(section):
log.info("Deleting empty [%s] section from %s",
section, filename)
opts.remove_section(section)
else:
log.debug(
"Setting %s.%s to %r in %s",
section, option, value, filename
)
opts.set(section,option,value)
log.info("Writing %s", filename)
if not dry_run:
f = open(filename,'w'); opts.write(f); f.close()
class option_base(Command):
"""Abstract base class for commands that mess with config files"""
user_options = [
('global-config', 'g',
"save options to the site-wide distutils.cfg file"),
('user-config', 'u',
"save options to the current user's pydistutils.cfg file"),
('filename=', 'f',
"configuration file to use (default=setup.cfg)"),
]
boolean_options = [
'global-config', 'user-config',
]
def initialize_options(self):
self.global_config = None
self.user_config = None
self.filename = None
def finalize_options(self):
filenames = []
if self.global_config:
filenames.append(config_file('global'))
if self.user_config:
filenames.append(config_file('user'))
if self.filename is not None:
filenames.append(self.filename)
if not filenames:
filenames.append(config_file('local'))
if len(filenames)>1:
raise DistutilsOptionError(
"Must specify only one configuration file option",
filenames
)
self.filename, = filenames
class setopt(option_base):
"""Save command-line options to a file"""
description = "set an option in setup.cfg or another config file"
user_options = [
('command=', 'c', 'command to set an option for'),
('option=', 'o', 'option to set'),
('set-value=', 's', 'value of the option'),
('remove', 'r', 'remove (unset) the value'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.command = None
self.option = None
self.set_value = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.command is None or self.option is None:
raise DistutilsOptionError("Must specify --command *and* --option")
if self.set_value is None and not self.remove:
raise DistutilsOptionError("Must specify --set-value or --remove")
def run(self):
edit_config(
self.filename, {
self.command: {self.option.replace('-','_'):self.set_value}
},
self.dry_run
)
| mit | -2,111,368,828,108,263,700 | 29.810976 | 79 | 0.56323 | false |
ntt-pf-lab/backup_openstackx | openstackx/auth/tokens.py | 1 | 1872 | from openstackx.api import base
class Tenant(base.Resource):
def __repr__(self):
return "<Tenant %s>" % self._info
@property
def id(self):
return self._info['id']
@property
def description(self):
return self._info['description']
@property
def enabled(self):
return self._info['enabled']
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def username(self):
try:
return self._info['user']['username']
except:
return "?"
@property
def tenant_id(self):
try:
return self._info['user']['tenantId']
except:
return "?"
def delete(self):
self.manager.delete(self)
class TokenManager(base.ManagerWithFind):
resource_class = Token
def create(self, tenant, username, password):
params = {"auth": {"passwordCredentials": {"username": username,
"password": password},
"tenantId": tenant}}
return self._create('tokens', params, "access")
def create_scoped_with_token(self, tenant, token):
params = {"auth": {"tenantId": tenant, "tokenId": token}}
return self._create('tokens', params, "access")
class TenantManager(base.ManagerWithFind):
resource_class = Tenant
def for_token(self, token):
# FIXME(ja): now that tenants & tokens are separate managers we shouldn't
# need the uglyness of setting token this way?
orig = self.api.connection.auth_token
self.api.connection.auth_token = token
rval = self._list('tenants', "tenants")
self.api.connection.auth_token = orig
return rval
| bsd-3-clause | -8,851,234,189,612,941,000 | 24.643836 | 81 | 0.568376 | false |
arnavd96/Cinemiezer | myvenv/lib/python3.4/site-packages/django/contrib/contenttypes/models.py | 49 | 6717 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
class ContentTypeManager(models.Manager):
use_in_migrations = True
def __init__(self, *args, **kwargs):
super(ContentTypeManager, self).__init__(*args, **kwargs)
# Cache shared by all the get_for_* methods to speed up
# ContentType retrieval.
self._cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
self._add_to_cache(self.db, ct)
return ct
def _get_opts(self, model, for_concrete_model):
if for_concrete_model:
model = model._meta.concrete_model
return model._meta
def _get_from_cache(self, opts):
key = (opts.app_label, opts.model_name)
return self._cache[self.db][key]
def get_for_model(self, model, for_concrete_model=True):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = self._get_opts(model, for_concrete_model)
try:
return self._get_from_cache(opts)
except KeyError:
pass
# The ContentType entry was not found in the cache, therefore we
# proceed to load or create it.
try:
# Start with get() and not get_or_create() in order to use
# the db_for_read (see #20401).
ct = self.get(app_label=opts.app_label, model=opts.model_name)
except self.model.DoesNotExist:
# Not found in the database; we proceed to create it. This time
# use get_or_create to take care of any race conditions.
ct, created = self.get_or_create(
app_label=opts.app_label,
model=opts.model_name,
)
self._add_to_cache(self.db, ct)
return ct
def get_for_models(self, *models, **kwargs):
"""
Given *models, returns a dictionary mapping {model: content_type}.
"""
for_concrete_models = kwargs.pop('for_concrete_models', True)
# Final results
results = {}
# models that aren't already in the cache
needed_app_labels = set()
needed_models = set()
needed_opts = set()
for model in models:
opts = self._get_opts(model, for_concrete_models)
try:
ct = self._get_from_cache(opts)
except KeyError:
needed_app_labels.add(opts.app_label)
needed_models.add(opts.model_name)
needed_opts.add(opts)
else:
results[model] = ct
if needed_opts:
cts = self.filter(
app_label__in=needed_app_labels,
model__in=needed_models
)
for ct in cts:
model = ct.model_class()
if model._meta in needed_opts:
results[model] = ct
needed_opts.remove(model._meta)
self._add_to_cache(self.db, ct)
for opts in needed_opts:
# These weren't in the cache, or the DB, create them.
ct = self.create(
app_label=opts.app_label,
model=opts.model_name,
)
self._add_to_cache(self.db, ct)
results[ct.model_class()] = ct
return results
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
# Note it's possible for ContentType objects to be stale; model_class() will return None.
# Hence, there is no reliance on model._meta.app_label here, just using the model fields instead.
key = (ct.app_label, ct.model)
self._cache.setdefault(using, {})[key] = ct
self._cache.setdefault(using, {})[ct.id] = ct
@python_2_unicode_compatible
class ContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
def __str__(self):
return self.name
@property
def name(self):
model = self.model_class()
if not model:
return self.model
return force_text(model._meta.verbose_name)
def model_class(self):
"Returns the Python model class for this type of content."
try:
return apps.get_model(self.app_label, self.model)
except LookupError:
return None
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._base_manager.using(self._state.db).get(**kwargs)
def get_all_objects_for_this_type(self, **kwargs):
"""
Returns all objects of this type for the keyword arguments given.
"""
return self.model_class()._base_manager.using(self._state.db).filter(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| mit | -1,299,013,781,508,792,600 | 35.906593 | 105 | 0.584934 | false |
Princeton-CDH/derrida-django | derrida/people/migrations/0001_initial.py | 1 | 3871 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-06 19:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('places', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notes', models.TextField(blank=True)),
('start_year', models.PositiveIntegerField(blank=True, null=True)),
('end_year', models.PositiveIntegerField(blank=True, null=True)),
('authorized_name', models.CharField(max_length=255)),
('viaf_id', models.URLField(blank=True, null=True)),
('sort_name', models.CharField(blank=True, max_length=255)),
('family_group', models.CharField(blank=True, max_length=255)),
],
options={
'ordering': ['authorized_name'],
'verbose_name_plural': 'People',
},
),
migrations.CreateModel(
name='Relationship',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notes', models.TextField(blank=True)),
('start_year', models.PositiveIntegerField(blank=True, null=True)),
('end_year', models.PositiveIntegerField(blank=True, null=True)),
('from_person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='from_relationships', to='people.Person')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RelationshipType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('notes', models.TextField(blank=True)),
('is_symmetric', models.BooleanField(default=False)),
],
options={
'ordering': ['name'],
'abstract': False,
},
),
migrations.CreateModel(
name='Residence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notes', models.TextField(blank=True)),
('start_year', models.PositiveIntegerField(blank=True, null=True)),
('end_year', models.PositiveIntegerField(blank=True, null=True)),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='people.Person')),
('place', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='places.Place')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='relationship',
name='relationship_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='people.RelationshipType'),
),
migrations.AddField(
model_name='relationship',
name='to_person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='to_relationships', to='people.Person'),
),
migrations.AddField(
model_name='person',
name='relationships',
field=models.ManyToManyField(related_name='related_to', through='people.Relationship', to='people.Person'),
),
]
| apache-2.0 | -1,408,887,249,575,479,000 | 42.011111 | 151 | 0.55567 | false |
lz1988/company-site | django/conf/locale/ru/formats.py | 107 | 1134 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y г.'
TIME_FORMAT = 'G:i:s'
DATETIME_FORMAT = 'j E Y г. G:i:s'
YEAR_MONTH_FORMAT = 'F Y г.'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y', # '25.10.06'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause | 5,382,774,954,390,505,000 | 33.272727 | 77 | 0.597701 | false |
earonesty/bitcoin | test/functional/p2p-segwit.py | 5 | 90114 | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER
from test_framework.key import CECKey, CPubKey
import time
import random
from binascii import hexlify
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
VB_PERIOD = 144
VB_ACTIVATION_THRESHOLD = 108
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
# Calculate the virtual size of a witness block:
# (base + witness/4)
def get_virtual_size(witness_block):
base_size = len(witness_block.serialize())
total_size = len(witness_block.serialize(with_witness=True))
# the "+3" is so we round up
vsize = int((3*base_size + total_size + 3)/4)
return vsize
class TestNode(NodeConnCB):
def __init__(self):
super().__init__()
self.getdataset = set()
def on_getdata(self, conn, message):
for inv in message.inv:
self.getdataset.add(inv.hash)
def announce_tx_and_wait_for_getdata(self, tx, timeout=60):
with mininode_lock:
self.last_message.pop("getdata", None)
self.send_message(msg_inv(inv=[CInv(1, tx.sha256)]))
self.wait_for_getdata(timeout)
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
with mininode_lock:
self.last_message.pop("getdata", None)
self.last_message.pop("getheaders", None)
msg = msg_headers()
msg.headers = [ CBlockHeader(block) ]
if use_header:
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
self.wait_for_getdata()
def request_block(self, blockhash, inv_type, timeout=60):
with mininode_lock:
self.last_message.pop("block", None)
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
return self.last_message["block"].block
def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None):
tx_message = msg_tx(tx)
if with_witness:
tx_message = msg_witness_tx(tx)
self.send_message(tx_message)
self.sync_with_ping()
assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted)
if (reason != None and not accepted):
# Check the rejection reason as well.
with mininode_lock:
assert_equal(self.last_message["reject"].reason, reason)
# Test whether a witness block had the correct effect on the tip
def test_witness_block(self, block, accepted, with_witness=True):
if with_witness:
self.send_message(msg_witness_block(block))
else:
self.send_message(msg_block(block))
self.sync_with_ping()
assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted)
# Used to keep track of anyone-can-spend outputs that we can use in the tests
class UTXO(object):
def __init__(self, sha256, n, nValue):
self.sha256 = sha256
self.n = n
self.nValue = nValue
# Helper for getting the script associated with a P2PKH
def GetP2PKHScript(pubkeyhash):
return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)])
# Add signature for a P2PK witness program.
def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key):
tx_hash = SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, value)
signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1')
txTo.wit.vtxinwit[inIdx].scriptWitness.stack = [signature, script]
txTo.rehash()
class SegWitTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [["-whitelist=127.0.0.1"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0"], ["-whitelist=127.0.0.1", "-bip9params=segwit:0:0"]]
def setup_network(self):
self.setup_nodes()
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
self.sync_all()
''' Helpers '''
# Build a block on top of node0's tip.
def build_next_block(self, nVersion=4):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount() + 1
block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.nVersion = nVersion
block.rehash()
return block
# Adds list of transactions to block, adds witness commitment, then solves.
def update_witness_block_with_transactions(self, block, tx_list, nonce=0):
block.vtx.extend(tx_list)
add_witness_commitment(block, nonce)
block.solve()
return
''' Individual tests '''
def test_witness_services(self):
self.log.info("Verifying NODE_WITNESS service bit")
assert((self.test_node.connection.nServices & NODE_WITNESS) != 0)
# See if sending a regular transaction works, and create a utxo
# to use in later tests.
def test_non_witness_transaction(self):
# Mine a block with an anyone-can-spend coinbase,
# let it mature, then try to spend it.
self.log.info("Testing non-witness transaction")
block = self.build_next_block(nVersion=1)
block.solve()
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping() # make sure the block was processed
txid = block.vtx[0].sha256
self.nodes[0].generate(99) # let the block mature
# Create a transaction that spends the coinbase
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(txid, 0), b""))
tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE])))
tx.calc_sha256()
# Check that serializing it with or without witness is the same
# This is a sanity check of our testing framework.
assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize())
self.test_node.send_message(msg_witness_tx(tx))
self.test_node.sync_with_ping() # make sure the tx was processed
assert(tx.hash in self.nodes[0].getrawmempool())
# Save this transaction for later
self.utxo.append(UTXO(tx.sha256, 0, 49*100000000))
self.nodes[0].generate(1)
# Verify that blocks with witnesses are rejected before activation.
def test_unnecessary_witness_before_segwit_activation(self):
self.log.info("Testing behavior of unnecessary witnesses")
# For now, rely on earlier tests to have created at least one utxo for
# us to use
assert(len(self.utxo) > 0)
assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active')
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])]
# Verify the hash with witness differs from the txid
# (otherwise our testing framework must be broken!)
tx.rehash()
assert(tx.sha256 != tx.calc_sha256(with_witness=True))
# Construct a segwit-signaling block that includes the transaction.
block = self.build_next_block(nVersion=(VB_TOP_BITS|(1 << VB_WITNESS_BIT)))
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
self.test_node.test_witness_block(block, accepted=False)
# TODO: fix synchronization so we can test reject reason
# Right now, bitcoind delays sending reject messages for blocks
# until the future, making synchronization here difficult.
#assert_equal(self.test_node.last_message["reject"].reason, "unexpected-witness")
# But it should not be permanently marked bad...
# Resend without witness information.
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping()
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
sync_blocks(self.nodes)
# Create a p2sh output -- this is so we can pass the standardness
# rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped
# in P2SH).
p2sh_program = CScript([OP_TRUE])
p2sh_pubkey = hash160(p2sh_program)
scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
# Now check that unnecessary witnesses can't be used to blind a node
# to a transaction, eg by violating standardness checks.
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
self.test_node.test_transaction_acceptance(tx2, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# We'll add an unnecessary witness to this transaction that would cause
# it to be non-standard, to test that violating policy with a witness before
# segwit activation doesn't blind a node to a transaction. Transactions
# rejected for having a witness before segwit activation shouldn't be added
# to the rejection cache.
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program])))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptPubKey))
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a'*400000]
tx3.rehash()
# Note that this should be rejected for the premature witness reason,
# rather than a policy check, since segwit hasn't activated yet.
self.std_node.test_transaction_acceptance(tx3, True, False, b'no-witness-yet')
# If we send without witness, it should be accepted.
self.std_node.test_transaction_acceptance(tx3, False, True)
# Now create a new anyone-can-spend utxo for the next test.
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), CScript([p2sh_program])))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, CScript([OP_TRUE])))
tx4.rehash()
self.test_node.test_transaction_acceptance(tx3, False, True)
self.test_node.test_transaction_acceptance(tx4, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx4.sha256, 0, tx4.vout[0].nValue))
# Mine enough blocks for segwit's vb state to be 'started'.
def advance_to_segwit_started(self):
height = self.nodes[0].getblockcount()
# Will need to rewrite the tests here if we are past the first period
assert(height < VB_PERIOD - 1)
# Genesis block is 'defined'.
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined')
# Advance to end of period, status should now be 'started'
self.nodes[0].generate(VB_PERIOD-height-1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Mine enough blocks to lock in segwit, but don't activate.
# TODO: we could verify that lockin only happens at the right threshold of
# signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_lockin(self):
height = self.nodes[0].getblockcount()
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Advance to end of period, and verify lock-in happens at the end
self.nodes[0].generate(VB_PERIOD-1)
height = self.nodes[0].getblockcount()
assert((height % VB_PERIOD) == VB_PERIOD - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
# Mine enough blocks to activate segwit.
# TODO: we could verify that activation only happens at the right threshold
# of signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_active(self):
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
height = self.nodes[0].getblockcount()
self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active')
# This test can only be run after segwit has activated
def test_witness_commitments(self):
self.log.info("Testing witness commitments")
# First try a correct witness commitment.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Test the test -- witness serialization should be different
assert(msg_witness_block(block).serialize() != msg_block(block).serialize())
# This empty block should be valid.
self.test_node.test_witness_block(block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
add_witness_commitment(block_2, nonce=28)
block_2.solve()
# The commitment should have changed!
assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1])
# This should also be valid.
self.test_node.test_witness_block(block_2, accepted=True)
# Now test commitments with actual transactions
assert (len(self.utxo) > 0)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# Let's construct a witness program
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# tx2 will spend tx1, and send back to a regular anyone-can-spend address
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1)
# Add an extra OP_RETURN output that matches the witness commitment template,
# even though it has extra data after the incorrect commitment.
# This block should fail.
block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10])))
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=False)
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
# This should succeed (nValue shouldn't affect finding the
# witness commitment).
add_witness_commitment(block_3, nonce=0)
block_3.vtx[0].vout[0].nValue -= 1
block_3.vtx[0].vout[-1].nValue += 1
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
block_4 = self.build_next_block()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx3.rehash()
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
self.test_node.test_witness_block(block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_block_malleability(self):
self.log.info("Testing witness block malleability")
# Make sure that a block that has too big a virtual size
# because of a too-large coinbase witness is not permanently
# marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000)
assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE)
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE)
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() == block.hash)
# Now make sure that malleating the witness nonce doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ]
self.test_node.test_witness_block(block, accepted=False)
# Changing the witness nonce doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ]
self.test_node.test_witness_block(block, accepted=True)
def test_witness_block_size(self):
self.log.info("Testing witness block size limit")
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in p2p-fullblocktest.py
# Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB.
block = self.build_next_block()
assert(len(self.utxo) > 0)
# Create a P2WSH transaction.
# The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
# virtual size.
NUM_DROPS = 200 # 201 max ops per script!
NUM_OUTPUTS = 50
witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE])
witness_hash = uint256_from_str(sha256(witness_program))
scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)])
prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n)
value = self.utxo[0].nValue
parent_tx = CTransaction()
parent_tx.vin.append(CTxIn(prevout, b""))
child_value = int(value/NUM_OUTPUTS)
for i in range(NUM_OUTPUTS):
parent_tx.vout.append(CTxOut(child_value, scriptPubKey))
parent_tx.vout[0].nValue -= 50000
assert(parent_tx.vout[0].nValue > 0)
parent_tx.rehash()
child_tx = CTransaction()
for i in range(NUM_OUTPUTS):
child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b""))
child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))]
for i in range(NUM_OUTPUTS):
child_tx.wit.vtxinwit.append(CTxInWitness())
child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program]
child_tx.rehash()
self.update_witness_block_with_transactions(block, [parent_tx, child_tx])
vsize = get_virtual_size(block)
additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize)*4
i = 0
while additional_bytes > 0:
# Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1
extra_bytes = min(additional_bytes+1, 55)
block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes)
additional_bytes -= extra_bytes
i += 1
block.vtx[0].vout.pop() # Remove old commitment
add_witness_commitment(block)
block.solve()
vsize = get_virtual_size(block)
assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1)
# Make sure that our test case would exceed the old max-network-message
# limit
assert(len(block.serialize(True)) > 2*1024*1024)
self.test_node.test_witness_block(block, accepted=False)
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1)
block.vtx[0].vout.pop()
add_witness_commitment(block)
block.solve()
assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE)
self.test_node.test_witness_block(block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
# submitblock will try to add the nonce automatically, so that mining
# software doesn't need to worry about doing so itself.
def test_submit_block(self):
block = self.build_next_block()
# Try using a custom nonce and then don't supply it.
# This shouldn't possibly work.
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
# Now redo commitment with the standard nonce, but let bitcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
# the commitment.
block_2 = self.build_next_block()
add_witness_commitment(block_2)
block_2.solve()
# Drop commitment and nonce -- submitblock should not fill in.
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True)))
# Tip should not advance!
assert(self.nodes[0].getbestblockhash() != block_2.hash)
# Consensus tests of extra witness data in a transaction.
def test_extra_witness_data(self):
self.log.info("Testing extra witness data in tx")
assert(len(self.utxo) > 0)
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First try extra witness data on a tx that doesn't require a witness
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-2000, scriptPubKey))
tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])]
tx.rehash()
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
self.test_node.test_witness_block(block, accepted=False)
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
block.vtx[1].vin[0].scriptSig = CScript([OP_0])
block.vtx[1].rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()])
tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ]
tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
tx2.vin[1].scriptSig = CScript([OP_TRUE])
tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0)
tx2.wit.vtxinwit[1].scriptWitness.stack = []
tx2.rehash()
add_witness_commitment(block)
block.solve()
# This has extra signature data for a witness input, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
tx2.vin[0].scriptSig = b""
tx2.rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_push_length(self):
''' Should only allow up to 520 byte pushes in witness stack '''
self.log.info("Testing maximum witness push size")
MAX_SCRIPT_ELEMENT_SIZE = 520
assert(len(self.utxo))
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
# First try a 521-byte stack element
tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_program_length(self):
# Can create witness outputs that are long, but can't be greater than
# 10k bytes to successfully spend
self.log.info("Testing maximum witness program length")
assert(len(self.utxo))
MAX_PROGRAM_LENGTH = 10000
# This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes.
long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE])
assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1)
long_witness_hash = sha256(long_witness_program)
long_scriptPubKey = CScript([OP_0, long_witness_hash])
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, long_scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Try again with one less byte in the witness program
witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE])
assert(len(witness_program) == MAX_PROGRAM_LENGTH)
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey)
tx.rehash()
tx2.vin[0].prevout.hash = tx.sha256
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program]
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_input_length(self):
''' Ensure that vin length must match vtxinwit length '''
self.log.info("Testing witness input length")
assert(len(self.utxo))
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
nValue = self.utxo[0].nValue
for i in range(10):
tx.vout.append(CTxOut(int(nValue/10), scriptPubKey))
tx.vout[0].nValue -= 1000
assert(tx.vout[0].nValue >= 0)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
# the length of vtxinwit.
class BrokenCTransaction(CTransaction):
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
tx2 = BrokenCTransaction()
for i in range(10):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE])))
# First try using a too long vtxinwit
for i in range(11):
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
tx2.wit.vtxinwit.pop()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program]
tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_tx_relay_before_segwit_activation(self):
self.log.info("Testing relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected for premature-witness, but should
# not be added to recently rejected list.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
assert(self.old_node.last_message["getdata"].inv[0].type == 1)
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
try:
self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2)
self.log.error("Error: duplicate tx getdata!")
assert(False)
except AssertionError as e:
pass
# Delivering this transaction with witness should fail (no matter who
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Cleanup: mine the first transaction and update utxo
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
# After segwit activates, verify that mempool:
# - rejects transactions with unnecessary/extra witnesses
# - accepts transactions with valid witnesses
# and that witness transactions are relayed to non-upgraded peers.
def test_tx_relay_after_segwit_activation(self):
self.log.info("Testing relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected because we can't use a witness
# when spending a non-witness output.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
self.test_node.announce_tx_and_wait_for_getdata(tx)
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
# Add too-large for IsStandard witness and check that it does not enter reject filter
p2sh_program = CScript([OP_TRUE])
p2sh_pubkey = hash160(p2sh_program)
witness_program2 = CScript([b'a'*400000])
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])))
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2]
tx3.rehash()
# Node will not be blinded to the transaction
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
# Remove witness stuffing, instead add extra witness push on stack
tx3.vout[0] = CTxOut(tx2.vout[0].nValue-1000, CScript([OP_TRUE]))
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv([CInv(1, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4
assert_equal(raw_tx["vsize"], vsize)
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii'))
assert(vsize != raw_tx["size"])
# Cleanup: mine the transactions and update utxo for next test
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
# Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG
# This is true regardless of segwit activation.
# Also test that we don't ask for blocks from unupgraded peers
def test_block_relay(self, segwit_activated):
self.log.info("Testing block relay")
blocktype = 2|MSG_WITNESS_FLAG
# test_node has set NODE_WITNESS, so all getdata requests should be for
# witness blocks.
# Test announcing a block via inv results in a getdata, and that
# announcing a version 4 or random VB block with a header results in a getdata
block1 = self.build_next_block()
block1.solve()
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
self.test_node.test_witness_block(block1, True)
block2 = self.build_next_block(nVersion=4)
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
self.test_node.test_witness_block(block2, True)
block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15)))
block3.solve()
self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
self.test_node.test_witness_block(block3, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
if segwit_activated == False:
# Before activation, we should be able to request old blocks with
# or without witness, and they should be the same.
chain_height = self.nodes[0].getblockcount()
# Pick 10 random blocks on main chain, and verify that getdata's
# for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal.
all_heights = list(range(chain_height+1))
random.shuffle(all_heights)
all_heights = all_heights[0:10]
for height in all_heights:
block_hash = self.nodes[0].getblockhash(height)
rpc_block = self.nodes[0].getblock(block_hash, False)
block_hash = int(block_hash, 16)
block = self.test_node.request_block(block_hash, 2)
wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG)
assert_equal(block.serialize(True), wit_block.serialize(True))
assert_equal(block.serialize(), hex_str_to_bytes(rpc_block))
else:
# After activation, witness blocks and non-witness blocks should
# be different. Verify rpc getblock() returns witness blocks, while
# getdata respects the requested type.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [])
# This gives us a witness commitment.
assert(len(block.vtx[0].wit.vtxinwit) == 1)
assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1)
self.test_node.test_witness_block(block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
wit_block = self.test_node.request_block(block.sha256, 2|MSG_WITNESS_FLAG)
assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block))
assert_equal(wit_block.serialize(False), non_wit_block.serialize())
assert_equal(wit_block.serialize(True), block.serialize(True))
# Test size, vsize, weight
rpc_details = self.nodes[0].getblock(block.hash, True)
assert_equal(rpc_details["size"], len(block.serialize(True)))
assert_equal(rpc_details["strippedsize"], len(block.serialize(False)))
weight = 3*len(block.serialize(False)) + len(block.serialize(True))
assert_equal(rpc_details["weight"], weight)
# Upgraded node should not ask for blocks from unupgraded
block4 = self.build_next_block(nVersion=4)
block4.solve()
self.old_node.getdataset = set()
# Blocks can be requested via direct-fetch (immediately upon processing the announcement)
# or via parallel download (with an indeterminate delay from processing the announcement)
# so to test that a block is NOT requested, we could guess a time period to sleep for,
# and then check. We can avoid the sleep() by taking advantage of transaction getdata's
# being processed after block getdata's, and announce a transaction as well,
# and then check to see if that particular getdata has been received.
# Since 0.14, inv's will only be responded to with a getheaders, so send a header
# to announce this block.
msg = msg_headers()
msg.headers = [ CBlockHeader(block4) ]
self.old_node.send_message(msg)
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert(block4.sha256 not in self.old_node.getdataset)
# V0 segwit outputs should be standard after activation, but not before.
def test_standardness_v0(self, segwit_activated):
self.log.info("Testing standardness of v0 outputs (%s activation)" % ("after" if segwit_activated else "before"))
assert(len(self.utxo))
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
p2sh_pubkey = hash160(witness_program)
p2sh_scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
# First prepare a p2sh output (so that spending it will pass standardness)
p2sh_tx = CTransaction()
p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
p2sh_tx.vout = [CTxOut(self.utxo[0].nValue-1000, p2sh_scriptPubKey)]
p2sh_tx.rehash()
# Mine it on test_node to create the confirmed output.
self.test_node.test_transaction_acceptance(p2sh_tx, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Now test standardness of v0 P2WSH outputs.
# Start by creating a transaction with two outputs.
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx.vout = [CTxOut(p2sh_tx.vout[0].nValue-10000, scriptPubKey)]
tx.vout.append(CTxOut(8000, scriptPubKey)) # Might burn this later
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated)
# Now create something that looks like a P2PKH output. This won't be spendable.
scriptPubKey = CScript([OP_0, hash160(witness_hash)])
tx2 = CTransaction()
if segwit_activated:
# if tx was accepted, then we spend the second output.
tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")]
tx2.vout = [CTxOut(7000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
else:
# if tx wasn't accepted, we just re-spend the p2sh output we started with.
tx2.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, scriptPubKey)]
tx2.rehash()
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated)
# Now update self.utxo for later tests.
tx3 = CTransaction()
if segwit_activated:
# tx and tx2 were both accepted. Don't bother trying to reclaim the
# P2PKH output; just send tx's first output back to an anyone-can-spend.
sync_mempools([self.nodes[0], self.nodes[1]])
tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx3.vout = [CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))]
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
else:
# tx and tx2 didn't go anywhere; just clean up the p2sh_tx output.
tx3.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, witness_program)]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
assert_equal(len(self.nodes[1].getrawmempool()), 0)
# Verify that future segwit upgraded transactions are non-standard,
# but valid in blocks. Can run this before and after segwit activation.
def test_segwit_versions(self):
self.log.info("Testing standardness/consensus for segwit versions (0-16)")
assert(len(self.utxo))
NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16
if (len(self.utxo) < NUM_TESTS):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
split_value = (self.utxo[0].nValue - 4000) // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, CScript([OP_TRUE])))
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_TESTS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
sync_blocks(self.nodes)
temp_utxo = []
tx = CTransaction()
count = 0
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16+1)) + [OP_0]:
count += 1
# First try to spend to a future version segwit scriptPubKey.
scriptPubKey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue-1000, scriptPubKey)]
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
self.nodes[0].generate(1) # Mine all the transactions
sync_blocks(self.nodes)
assert(len(self.nodes[0].getrawmempool()) == 0)
# Finally, verify that version 0 -> version 1 transactions
# are non-standard
scriptPubKey = CScript([CScriptOp(OP_1), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue-1000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
tx2.rehash()
# Gets accepted to test_node, because standardness of outputs isn't
# checked with fRequireStandard
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
# Spend everything in temp_utxo back to an OP_TRUE output.
tx3 = CTransaction()
total_value = 0
for i in temp_utxo:
tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
total_value += i.nValue
tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program]
tx3.vout.append(CTxOut(total_value - 1000, CScript([OP_TRUE])))
tx3.rehash()
# Spending a higher version witness output is not allowed by policy,
# even with fRequireStandard=false.
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
self.test_node.sync_with_ping()
with mininode_lock:
assert(b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason)
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_premature_coinbase_witness_spend(self):
self.log.info("Testing premature coinbase witness spend")
block = self.build_next_block()
# Change the output of the block to be a witness output.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
block.vtx[0].vout[0].scriptPubKey = scriptPubKey
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)]
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
spend_tx.rehash()
# Now test a premature spend.
self.nodes[0].generate(98)
sync_blocks(self.nodes)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=False)
# Advancing one more block should allow the spend.
self.nodes[0].generate(1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=True)
sync_blocks(self.nodes)
def test_signature_version_1(self):
self.log.info("Testing segwit signature hash version 1")
key = CECKey()
key.set_secretbytes(b"9")
pubkey = CPubKey(key.get_pubkey())
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First create a witness output for use in the tests.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
self.utxo.pop(0)
# Test each hashtype
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
for sigflag in [ 0, SIGHASH_ANYONECANPAY ]:
for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]:
hashtype |= sigflag
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
tx.vout.append(CTxOut(prev_utxo.nValue - 1000, scriptPubKey))
tx.wit.vtxinwit.append(CTxInWitness())
# Too-large input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key)
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Too-small input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Now try correct value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
# Test combinations of signature hashes.
# Split the utxo into a lot of outputs.
# Randomly choose up to 10 to spend, sign with different hashtypes, and
# output to a random number of outputs. Repeat NUM_TESTS times.
# Ensure that we've tested a situation where we use SIGHASH_SINGLE with
# an input index > number of outputs.
NUM_TESTS = 500
temp_utxos = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
split_value = prev_utxo.nValue // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key)
for i in range(NUM_TESTS):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
for i in range(NUM_TESTS):
# Ping regularly to keep the connection alive
if (not i % 100):
self.test_node.sync_with_ping()
# Choose random number of inputs to use.
num_inputs = random.randint(1, 10)
# Create a slight bias for producing more utxos
num_outputs = random.randint(1, 11)
random.shuffle(temp_utxos)
assert(len(temp_utxos) > num_inputs)
tx = CTransaction()
total_value = 0
for i in range(num_inputs):
tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
total_value += temp_utxos[i].nValue
split_value = total_value // num_outputs
for i in range(num_outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
for i in range(num_inputs):
# Now try to sign each input, using a random hashtype.
anyonecanpay = 0
if random.randint(0, 1):
anyonecanpay = SIGHASH_ANYONECANPAY
hashtype = random.randint(1, 3) | anyonecanpay
sign_P2PK_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key)
if (hashtype == SIGHASH_SINGLE and i >= num_outputs):
used_sighash_single_out_of_bounds = True
tx.rehash()
for i in range(num_outputs):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
temp_utxos = temp_utxos[num_inputs:]
block.vtx.append(tx)
# Test the block periodically, if we're close to maxblocksize
if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value")
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b""))
tx.vout.append(CTxOut(temp_utxos[0].nValue, scriptPKH))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = GetP2PKHScript(pubkeyhash)
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Move the signature to the witness.
block.vtx.pop()
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.vin[0].scriptSig = b""
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
temp_utxos.pop(0)
# Update self.utxos for later tests. Just spend everything in
# temp_utxos to a corresponding entry in self.utxos
tx = CTransaction()
index = 0
for i in temp_utxos:
# Just spend to our usual anyone-can-spend output
# Use SIGHASH_SINGLE|SIGHASH_ANYONECANPAY so we can build up
# the signatures as we go.
tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx.vout.append(CTxOut(i.nValue, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, index, SIGHASH_SINGLE|SIGHASH_ANYONECANPAY, i.nValue, key)
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
# Test P2SH wrapped witness programs.
def test_p2sh_witness(self, segwit_activated):
self.log.info("Testing P2SH witness transactions")
assert(len(self.utxo))
# Prepare the p2sh-wrapped witness output
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
p2wsh_pubkey = CScript([OP_0, witness_hash])
p2sh_witness_hash = hash160(p2wsh_pubkey)
scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script
# Fund the P2SH output
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# Verify mempool acceptance and block validity
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated)
sync_blocks(self.nodes)
# Now test attempts to spend the output.
spend_tx = CTransaction()
spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), scriptSig))
spend_tx.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
spend_tx.rehash()
# This transaction should not be accepted into the mempool pre- or
# post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older bitcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
spend_tx.vin[0].scriptSig = scriptSig
spend_tx.rehash()
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ]
# Verify mempool acceptance
self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
# If we're before activation, then sending this without witnesses
# should be valid. If we're after activation, then sending this with
# witnesses should be valid.
if segwit_activated:
self.test_node.test_witness_block(block, accepted=True)
else:
self.test_node.test_witness_block(block, accepted=True, with_witness=False)
# Update self.utxo
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
# Test the behavior of starting up a segwit-aware node after the softfork
# has activated. As segwit requires different block data than pre-segwit
# nodes would have stored, this requires special handling.
# To enable this test, pass --oldbinary=<path-to-pre-segwit-bitcoind> to
# the test.
def test_upgrade_after_activation(self, node, node_id):
self.log.info("Testing software upgrade after softfork activation")
assert(node_id != 0) # node0 is assumed to be a segwit-active bitcoind
# Make sure the nodes are all up
sync_blocks(self.nodes)
# Restart with the new binary
stop_node(node, node_id)
self.nodes[node_id] = start_node(node_id, self.options.tmpdir)
connect_nodes(self.nodes[0], node_id)
sync_blocks(self.nodes)
# Make sure that this peer thinks segwit has activated.
assert(get_bip9_status(node, 'segwit')['status'] == "active")
# Make sure this peers blocks match those of node0.
height = node.getblockcount()
while height >= 0:
block_hash = node.getblockhash(height)
assert_equal(block_hash, self.nodes[0].getblockhash(height))
assert_equal(self.nodes[0].getblock(block_hash), node.getblock(block_hash))
height -= 1
def test_witness_sigops(self):
'''Ensure sigop counting is correct inside witnesses.'''
self.log.info("Testing sigops limit")
assert(len(self.utxo))
# Keep this under MAX_OPS_PER_SCRIPT (201)
witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
sigops_per_script = 20*5 + 193*1
# We'll produce 2 extra outputs, one with a program that would take us
# over max sig ops, and one with a program that would exactly reach max
# sig ops
outputs = (MAX_SIGOP_COST // sigops_per_script) + 2
extra_sigops_available = MAX_SIGOP_COST % sigops_per_script
# We chose the number of checkmultisigs/checksigs to make this work:
assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT
# This script, when spent with the first
# N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction,
# would push us just over the block sigop limit.
witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF])
witness_hash_toomany = sha256(witness_program_toomany)
scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany])
# If we spend this script instead, we would exactly reach our sigop
# limit (for witness sigops).
witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available) + [OP_ENDIF])
witness_hash_justright = sha256(witness_program_justright)
scriptPubKey_justright = CScript([OP_0, witness_hash_justright])
# First split our available utxo into a bunch of outputs
split_value = self.utxo[0].nValue // outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
for i in range(outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.vout[-2].scriptPubKey = scriptPubKey_toomany
tx.vout[-1].scriptPubKey = scriptPubKey_justright
tx.rehash()
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
self.test_node.test_witness_block(block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
# too many sigops.
total_value = 0
for i in range(outputs-1):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ]
total_value += tx.vout[i].nValue
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ]
tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE])))
tx2.rehash()
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
self.test_node.test_witness_block(block_2, accepted=False)
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
checksig_count = (extra_sigops_available // 4) + 1
scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count)
tx2.vout.append(CTxOut(0, scriptPubKey_checksigs))
tx2.vin.pop()
tx2.wit.vtxinwit.pop()
tx2.vout[0].nValue -= tx.vout[-2].nValue
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
self.test_node.test_witness_block(block_3, accepted=False)
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
self.test_node.test_witness_block(block_4, accepted=True)
# Reset the tip back down for the next test
sync_blocks(self.nodes)
for x in self.nodes:
x.invalidateblock(block_4.hash)
# Try replacing the last input of tx2 to be spending the last
# output of tx
block_5 = self.build_next_block()
tx2.vout.pop()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs-1), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
self.test_node.test_witness_block(block_5, accepted=True)
# TODO: test p2sh sigop counting
def test_getblocktemplate_before_lockin(self):
self.log.info("Testing getblocktemplate setting of segwit versionbit (before lockin)")
# Node0 is segwit aware, node2 is not.
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate()
block_version = gbt_results['version']
# If we're not indicating segwit support, we will still be
# signalling for segwit activation.
assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0])
# If we don't specify the segwit rule, then we won't get a default
# commitment.
assert('default_witness_commitment' not in gbt_results)
# Workaround:
# Can either change the tip, or change the mempool and wait 5 seconds
# to trigger a recomputation of getblocktemplate.
txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16)
# Using mocktime lets us avoid sleep()
sync_mempools(self.nodes)
self.nodes[0].setmocktime(int(time.time())+10)
self.nodes[2].setmocktime(int(time.time())+10)
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate({"rules" : ["segwit"]})
block_version = gbt_results['version']
if node == self.nodes[2]:
# If this is a non-segwit node, we should still not get a witness
# commitment, nor a version bit signalling segwit.
assert_equal(block_version & (1 << VB_WITNESS_BIT), 0)
assert('default_witness_commitment' not in gbt_results)
else:
# For segwit-aware nodes, check the version bit and the witness
# commitment are correct.
assert(block_version & (1 << VB_WITNESS_BIT) != 0)
assert('default_witness_commitment' in gbt_results)
witness_commitment = gbt_results['default_witness_commitment']
# Check that default_witness_commitment is present.
witness_root = CBlock.get_merkle_root([ser_uint256(0),
ser_uint256(txid)])
script = get_witness_script(witness_root, 0)
assert_equal(witness_commitment, bytes_to_hex_str(script))
# undo mocktime
self.nodes[0].setmocktime(0)
self.nodes[2].setmocktime(0)
# Uncompressed pubkeys are no longer supported in default relay policy,
# but (for now) are still valid in blocks.
def test_uncompressed_pubkey(self):
self.log.info("Testing uncompressed pubkeys")
# Segwit transactions using uncompressed pubkeys are not accepted
# under default policy, but should still pass consensus.
key = CECKey()
key.set_secretbytes(b"9")
key.set_compressed(False)
pubkey = CPubKey(key.get_pubkey())
assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey
assert(len(self.utxo) > 0)
utxo = self.utxo.pop(0)
# Test 1: P2WPKH
# First create a P2WPKH output that uses an uncompressed pubkey
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b""))
tx.vout.append(CTxOut(utxo.nValue-1000, scriptPKH))
tx.rehash()
# Confirm it in a block.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Now try to spend it. Send it to a P2WSH output, which we'll
# use in the next test.
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptWSH = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptWSH))
script = GetP2PKHScript(pubkeyhash)
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ signature, pubkey ]
tx2.rehash()
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
# Test 2: P2WSH
# Try to spend the P2WSH output created in last test.
# Send it to a P2SH(P2WSH) output, which we'll use in the next test.
p2sh_witness_hash = hash160(scriptWSH)
scriptP2SH = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([scriptWSH])
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptP2SH))
tx3.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx3])
self.test_node.test_witness_block(block, accepted=True)
# Test 3: P2SH(P2WSH)
# Try to spend the P2SH output created in the last test.
# Send it to a P2PKH output, which we'll use in the next test.
scriptPubKey = GetP2PKHScript(pubkeyhash)
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), scriptSig))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, scriptPubKey))
tx4.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx4])
self.test_node.test_witness_block(block, accepted=True)
# Test 4: Uncompressed pubkeys should still be valid in non-segwit
# transactions.
tx5 = CTransaction()
tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b""))
tx5.vout.append(CTxOut(tx4.vout[0].nValue-1000, CScript([OP_TRUE])))
(sig_hash, err) = SignatureHash(scriptPubKey, tx5, 0, SIGHASH_ALL)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx5.vin[0].scriptSig = CScript([signature, pubkey])
tx5.rehash()
# Should pass policy and consensus.
self.test_node.test_transaction_acceptance(tx5, True, True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx5])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
def test_non_standard_witness(self):
self.log.info("Testing detection of non-standard P2WSH witness")
pad = chr(1).encode('latin-1')
# Create scripts for tests
scripts = []
scripts.append(CScript([OP_DROP] * 100))
scripts.append(CScript([OP_DROP] * 99))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61))
p2wsh_scripts = []
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# For each script, generate a pair of P2WSH and P2SH-P2WSH output.
outputvalue = (self.utxo[0].nValue - 1000) // (len(scripts) * 2)
for i in scripts:
p2wsh = CScript([OP_0, sha256(i)])
p2sh = hash160(p2wsh)
p2wsh_scripts.append(p2wsh)
tx.vout.append(CTxOut(outputvalue, p2wsh))
tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL])))
tx.rehash()
txid = tx.sha256
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Creating transactions for tests
p2wsh_txs = []
p2sh_txs = []
for i in range(len(scripts)):
p2wsh_tx = CTransaction()
p2wsh_tx.vin.append(CTxIn(COutPoint(txid,i*2)))
p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))])))
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.rehash()
p2wsh_txs.append(p2wsh_tx)
p2sh_tx = CTransaction()
p2sh_tx.vin.append(CTxIn(COutPoint(txid,i*2+1), CScript([p2wsh_scripts[i]])))
p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))])))
p2sh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_tx.rehash()
p2sh_txs.append(p2sh_tx)
# Testing native P2WSH
# Witness stack size, excluding witnessScript, over 100 is non-standard
p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
self.std_node.test_transaction_acceptance(p2wsh_txs[0], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[0], True, True)
# Stack element size over 80 bytes is non-standard
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
# Standard nodes should accept if element size is not over 80 bytes
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
# witnessScript size at 3600 bytes is standard
p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
self.test_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
self.std_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
# witnessScript size at 3601 bytes is non-standard
p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
self.std_node.test_transaction_acceptance(p2wsh_txs[3], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[3], True, True)
# Repeating the same tests with P2SH-P2WSH
p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
self.std_node.test_transaction_acceptance(p2sh_txs[0], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[0], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2sh_txs[1], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[1], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2sh_txs[1], True, True)
p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
self.test_node.test_transaction_acceptance(p2sh_txs[2], True, True)
self.std_node.test_transaction_acceptance(p2sh_txs[2], True, True)
p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
self.std_node.test_transaction_acceptance(p2sh_txs[3], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[3], True, True)
self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node
# Valid but non-standard transactions in a block should be accepted by standard node
sync_blocks(self.nodes)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.utxo.pop(0)
def run_test(self):
# Setup the p2p connections and start up the network thread.
self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK
self.old_node = TestNode() # only NODE_NETWORK
self.std_node = TestNode() # for testing node1 (fRequireStandard=true)
self.p2p_connections = [self.test_node, self.old_node]
self.connections = []
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS))
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK))
self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS))
self.test_node.add_connection(self.connections[0])
self.old_node.add_connection(self.connections[1])
self.std_node.add_connection(self.connections[2])
NetworkThread().start() # Start up network handling in another thread
# Keep a place to store utxo's that can be used in later tests
self.utxo = []
# Test logic begins here
self.test_node.wait_for_verack()
self.log.info("Starting tests before segwit lock in:")
self.test_witness_services() # Verifies NODE_WITNESS
self.test_non_witness_transaction() # non-witness tx's are accepted
self.test_unnecessary_witness_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
# Advance to segwit being 'started'
self.advance_to_segwit_started()
sync_blocks(self.nodes)
self.test_getblocktemplate_before_lockin()
sync_blocks(self.nodes)
# At lockin, nothing should change.
self.log.info("Testing behavior post lockin, pre-activation")
self.advance_to_segwit_lockin()
# Retest unnecessary witnesses
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
self.test_p2sh_witness(segwit_activated=False)
self.test_standardness_v0(segwit_activated=False)
sync_blocks(self.nodes)
# Now activate segwit
self.log.info("Testing behavior after segwit activation")
self.advance_to_segwit_active()
sync_blocks(self.nodes)
# Test P2SH witness handling again
self.test_p2sh_witness(segwit_activated=True)
self.test_witness_commitments()
self.test_block_malleability()
self.test_witness_block_size()
self.test_submit_block()
self.test_extra_witness_data()
self.test_max_witness_push_length()
self.test_max_witness_program_length()
self.test_witness_input_length()
self.test_block_relay(segwit_activated=True)
self.test_tx_relay_after_segwit_activation()
self.test_standardness_v0(segwit_activated=True)
self.test_segwit_versions()
self.test_premature_coinbase_witness_spend()
self.test_uncompressed_pubkey()
self.test_signature_version_1()
self.test_non_standard_witness()
sync_blocks(self.nodes)
self.test_upgrade_after_activation(self.nodes[2], 2)
self.test_witness_sigops()
if __name__ == '__main__':
SegWitTest().main()
| mit | -5,399,243,964,587,180,000 | 45.164959 | 152 | 0.634496 | false |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/idlelib/Bindings.py | 7 | 2976 | """Define the menu contents, hotkeys, and event bindings.
There is additional configuration information in the EditorWindow class (and
subclasses): the menus are created there based on the menu_specs (class)
variable, and menus not created are silently skipped in the code here. This
makes it possible, for example, to define a Debug menu which is only present in
the PythonShell window, and a Format menu which is only present in the Editor
windows.
"""
from idlelib.configHandler import idleConf
# Warning: menudefs is altered in macosxSupport.overrideRootMenu()
# after it is determined that an OS X Aqua Tk is in use,
# which cannot be done until after Tk() is first called.
# Do not alter the 'file', 'options', or 'help' cascades here
# without altering overrideRootMenu() as well.
# TODO: Make this more robust
menudefs = [
# underscore prefixes character to underscore
('file', [
('_New File', '<<open-new-window>>'),
('_Open...', '<<open-window-from-file>>'),
('Open _Module...', '<<open-module>>'),
('Class _Browser', '<<open-class-browser>>'),
('_Path Browser', '<<open-path-browser>>'),
None,
('_Save', '<<save-window>>'),
('Save _As...', '<<save-window-as-file>>'),
('Save Cop_y As...', '<<save-copy-of-window-as-file>>'),
None,
('Prin_t Window', '<<print-window>>'),
None,
('_Close', '<<close-window>>'),
('E_xit', '<<close-all-windows>>'),
]),
('edit', [
('_Undo', '<<undo>>'),
('_Redo', '<<redo>>'),
None,
('Cu_t', '<<cut>>'),
('_Copy', '<<copy>>'),
('_Paste', '<<paste>>'),
('Select _All', '<<select-all>>'),
None,
('_Find...', '<<find>>'),
('Find A_gain', '<<find-again>>'),
('Find _Selection', '<<find-selection>>'),
('Find in Files...', '<<find-in-files>>'),
('R_eplace...', '<<replace>>'),
('Go to _Line', '<<goto-line>>'),
]),
('format', [
('_Indent Region', '<<indent-region>>'),
('_Dedent Region', '<<dedent-region>>'),
('Comment _Out Region', '<<comment-region>>'),
('U_ncomment Region', '<<uncomment-region>>'),
('Tabify Region', '<<tabify-region>>'),
('Untabify Region', '<<untabify-region>>'),
('Toggle Tabs', '<<toggle-tabs>>'),
('New Indent Width', '<<change-indentwidth>>'),
]),
('run', [
('Python Shell', '<<open-python-shell>>'),
]),
('shell', [
('_View Last Restart', '<<view-restart>>'),
('_Restart Shell', '<<restart-shell>>'),
None,
('_Interrupt Execution', '<<interrupt-execution>>'),
]),
('debug', [
('_Go to File/Line', '<<goto-file-line>>'),
('!_Debugger', '<<toggle-debugger>>'),
('_Stack Viewer', '<<open-stack-viewer>>'),
('!_Auto-open Stack Viewer', '<<toggle-jit-stack-viewer>>'),
]),
('options', [
('Configure _IDLE', '<<open-config-dialog>>'),
None,
]),
('help', [
('_About IDLE', '<<about-idle>>'),
None,
('_IDLE Help', '<<help>>'),
('Python _Docs', '<<python-docs>>'),
]),
]
default_keydefs = idleConf.GetCurrentKeySet()
| gpl-3.0 | 2,501,025,880,566,990,000 | 31.703297 | 79 | 0.573925 | false |
johndpope/tensorflow | tensorflow/python/kernel_tests/shape_ops_test.py | 12 | 22678 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for various tensorflow.ops.tf."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import importer
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.platform import test
# TODO(zongheng): it'd be great to factor out this function and various random
# SparseTensor gen funcs.
def _sparsify(x, thresh=0.5, index_dtype=np.int64):
x[x < thresh] = 0
non_zero = np.where(x)
x_indices = np.vstack(non_zero).astype(index_dtype).T
x_values = x[non_zero]
x_shape = x.shape
return sparse_tensor.SparseTensor(
indices=x_indices, values=x_values, dense_shape=x_shape), len(x_values)
class ShapeOpsTest(test.TestCase):
def _compareShape(self, x, use_gpu=False):
np_ans = np.array(np.shape(x))
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.shape(x)
tf_ans_64 = array_ops.shape(x, out_type=dtypes.int64)
result = tf_ans.eval()
result_64 = tf_ans_64.eval()
self.assertAllEqual(np_ans, result)
self.assertAllEqual(np_ans, result_64)
self.assertShapeEqual(np_ans, tf_ans)
def _compareShapeSparse(self, x_np, use_gpu=False):
np_ans = np.array(np.shape(x_np))
x_tf, unused_nnz = _sparsify(x_np)
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.shape(x_tf)
result = tf_ans.eval()
self.assertAllEqual(np_ans, result)
self.assertShapeEqual(np_ans, tf_ans)
def _compareShapeN(self, x, use_gpu=False):
np_ans = np.array(np.shape(x))
with self.test_session(use_gpu=use_gpu) as sess:
tf_ans = array_ops.shape_n([x, x, x])
tf_ans_64 = array_ops.shape_n([x, x, x], out_type=dtypes.int64)
result = sess.run(tf_ans)
result_64 = sess.run(tf_ans_64)
for i in range(3):
self.assertAllEqual(np_ans, result[i])
self.assertAllEqual(np_ans, result_64[i])
self.assertShapeEqual(np_ans, tf_ans[i])
def _compareRank(self, x, use_gpu=False):
np_ans = np.asarray(np.ndim(x))
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.rank(x)
result = tf_ans.eval()
self.assertAllEqual(np_ans, result)
self.assertShapeEqual(np_ans, tf_ans)
def _compareRankSparse(self, x_np, use_gpu=False):
np_ans = np.asarray(np.ndim(x_np))
x_tf, unused_nnz = _sparsify(x_np)
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.rank(x_tf)
result = tf_ans.eval()
self.assertAllEqual(np_ans, result)
self.assertShapeEqual(np_ans, tf_ans)
def _compareSize(self, x, use_gpu=False):
np_ans = np.asarray(np.size(x))
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.size(x)
result = tf_ans.eval()
tf_ans_64 = array_ops.size(x, out_type=dtypes.int64)
result_64 = tf_ans_64.eval()
self.assertAllEqual(np_ans, result)
self.assertAllEqual(np_ans, result_64)
self.assertShapeEqual(np_ans, tf_ans)
def _compareSizeSparse(self, x_np, use_gpu=False):
np_ans = np.asarray(np.size(x_np))
x_tf, unused_nnz = _sparsify(x_np)
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.size(x_tf)
result = tf_ans.eval()
self.assertAllEqual(np_ans, result)
self.assertShapeEqual(np_ans, tf_ans)
def _testCpu(self, x):
self._compareShape(x, use_gpu=False)
self._compareShapeN(x, use_gpu=False)
self._compareRank(x, use_gpu=False)
self._compareSize(x, use_gpu=False)
self._compareShapeSparse(x, use_gpu=False)
self._compareRankSparse(x, use_gpu=False)
self._compareSizeSparse(x, use_gpu=False)
def _testGpu(self, x):
self._compareShape(x, use_gpu=True)
self._compareShapeN(x, use_gpu=True)
self._compareRank(x, use_gpu=True)
self._compareSize(x, use_gpu=True)
self._compareShapeSparse(x, use_gpu=True)
self._compareRankSparse(x, use_gpu=True)
self._compareSizeSparse(x, use_gpu=True)
def _testAll(self, x):
self._testCpu(x)
self._testGpu(x)
def testBasic(self):
self._testAll(np.random.randn(2))
self._testAll(np.random.randn(2, 3))
self._testAll(np.random.randn(2, 3, 5))
self._testAll(np.random.randn(2, 3, 5, 7))
self._testAll(np.random.randn(2, 3, 5, 7, 11))
self._testAll(np.random.randn(2, 3, 5, 7, 11, 13))
# Disabled because it takes too long to run, but manually verified
# as passing at time of writing.
def _test64BitOutput(self):
with self.test_session():
inp = array_ops.zeros([2**31])
num_elements = array_ops.size_internal(
inp, optimize=False, out_type=dtypes.int64)
self.assertEqual(2**31, num_elements.eval())
# Too large for tf.int32 output.
with self.assertRaises(errors_impl.InvalidArgumentError):
with self.test_session():
inp = array_ops.zeros([2**31])
num_elements = array_ops.size_internal(
inp, optimize=False, out_type=dtypes.int32)
self.assertEqual(2**31, num_elements.eval())
def _compareExpandDims(self, x, dim, use_gpu):
np_ans = np.expand_dims(x, axis=dim)
with self.test_session(use_gpu=use_gpu):
tensor = array_ops.expand_dims(x, dim)
tf_ans = tensor.eval()
self.assertShapeEqual(np_ans, tensor)
self.assertAllEqual(np_ans, tf_ans)
def _compareExpandDimsAll(self, x, dim):
self._compareExpandDims(x, dim, False)
self._compareExpandDims(x, dim, True)
def testExpandDims(self):
self._compareExpandDimsAll(np.zeros([2]), 0)
self._compareExpandDimsAll(np.zeros([2]), 1)
self._compareExpandDimsAll(np.zeros([2]), -1)
self._compareExpandDimsAll(np.zeros([2, 3]), 0)
self._compareExpandDimsAll(np.zeros([2, 3]), 1)
self._compareExpandDimsAll(np.zeros([2, 3]), 2)
self._compareExpandDimsAll(np.zeros([2, 3]), -1)
self._compareExpandDimsAll(np.zeros([2, 3]), -2)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), 0)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), 1)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), 2)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), 3)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), -1)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), -2)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), -3)
self._compareExpandDimsAll(np.zeros([2, 3, 5]), -4)
def testExpandDimsErrors(self):
with self.test_session():
self.assertRaises(ValueError, array_ops.expand_dims,
np.zeros([2, 3, 5]), -5)
self.assertRaises(ValueError, array_ops.expand_dims,
np.zeros([2, 3, 5]), 4)
def testExpandDimsGradient(self):
with self.test_session():
inp = constant_op.constant(
np.random.rand(4, 2).astype("f"), dtype=dtypes.float32)
squeezed = array_ops.expand_dims(inp, 1)
err = gradient_checker.compute_gradient_error(inp, [4, 2], squeezed,
[4, 1, 2])
self.assertLess(err, 1e-3)
def testExpandDimsScalar(self):
with self.test_session():
inp = constant_op.constant(7)
self.assertAllEqual([7], array_ops.expand_dims(inp, 0).eval())
self.assertAllEqual([7], array_ops.expand_dims(inp, -1).eval())
def _compareSqueeze(self, x, squeeze_dims, use_gpu):
with self.test_session(use_gpu=use_gpu):
if squeeze_dims:
np_ans = np.squeeze(x, axis=tuple(squeeze_dims))
tensor = array_ops.squeeze(x, squeeze_dims)
tf_ans = tensor.eval()
else:
np_ans = np.squeeze(x)
tensor = array_ops.squeeze(x)
tf_ans = tensor.eval()
self.assertShapeEqual(np_ans, tensor)
self.assertAllEqual(np_ans, tf_ans)
def _compareSqueezeAll(self, x, squeeze_dims=None):
if squeeze_dims is None:
squeeze_dims = []
self._compareSqueeze(x, squeeze_dims, False)
self._compareSqueeze(x, squeeze_dims, True)
def testSqueeze(self):
# Nothing to squeeze.
self._compareSqueezeAll(np.zeros([2]))
self._compareSqueezeAll(np.zeros([2, 3]))
# Squeeze the middle element away.
self._compareSqueezeAll(np.zeros([2, 1, 2]))
# Squeeze on both ends.
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]))
def testSqueezeSpecificDimension(self):
# Positive squeeze dim index.
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [0])
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [2, 4])
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [0, 4, 2])
# Negative squeeze dim index.
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [-1])
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [-3, -5])
self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [-3, -5, -1])
def testSqueezeAllOnes(self):
# Numpy squeezes a 1 element tensor into a zero dimensional tensor.
# Verify that we do the same.
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
tensor = array_ops.squeeze(np.zeros([1, 1, 1]), [])
self.assertEqual(np.shape(1), tensor.get_shape())
tf_ans = tensor.eval()
self.assertEqual(np.shape(1), tf_ans.shape)
def testSqueezeOnlyOnes(self):
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
input_1x1x3 = np.zeros([1, 1, 3])
self._compareSqueezeAll(input_1x1x3)
self._compareSqueezeAll(input_1x1x3, [0])
self._compareSqueezeAll(input_1x1x3, [1])
self.assertRaises(ValueError, array_ops.squeeze, input_1x1x3, [2])
def testSqueezeErrors(self):
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
self.assertRaises(ValueError, array_ops.squeeze,
np.zeros([1, 2, 1]), [-4])
self.assertRaises(ValueError, array_ops.squeeze,
np.zeros([1, 2, 1]), [0, -4])
self.assertRaises(ValueError, array_ops.squeeze,
np.zeros([1, 2, 1]), [3])
self.assertRaises(ValueError, array_ops.squeeze,
np.zeros([1, 2, 1]), [2, 3])
def testSqueezeGradient(self):
with self.test_session():
inp = np.random.rand(4, 2).astype("f")
a = array_ops.reshape(inp, [4, 1, 2])
squeezed = array_ops.squeeze(a, [])
err = gradient_checker.compute_gradient_error(a, [4, 1, 2], squeezed,
[4, 2])
self.assertLess(err, 1e-3)
def testSqueezeGradientWithSqueezeDims(self):
with self.test_session():
inp = np.random.rand(4, 2).astype("f")
a = array_ops.reshape(inp, [4, 1, 2, 1])
squeezed = array_ops.squeeze(a, [1])
err = gradient_checker.compute_gradient_error(a, [4, 1, 2, 1], squeezed,
[4, 2, 1])
self.assertLess(err, 1e-3)
def testSqueezeWithUnknownShape(self):
with self.test_session():
a = array_ops.placeholder(dtypes.float32, shape=[2, None])
squeezed = array_ops.squeeze(a, [1])
self.assertEqual([2], squeezed.get_shape().as_list())
squeezed = array_ops.squeeze(a)
self.assertEqual(None, squeezed.get_shape())
self.assertRaises(ValueError, array_ops.squeeze, a, [0])
self.assertRaises(ValueError, array_ops.squeeze, a, [100])
class TileTest(test.TestCase):
def testScalar(self):
for use_gpu in False, True:
with self.test_session(use_gpu=use_gpu):
a = constant_op.constant(7, shape=[], dtype=dtypes.float32)
tiled = array_ops.tile(a, [])
result = tiled.eval()
self.assertEqual(result.shape, ())
self.assertEqual([], tiled.get_shape())
self.assertEqual(7, result)
def testSimple(self):
with self.test_session():
inp = np.random.rand(4, 1).astype(np.float32)
a = constant_op.constant(inp)
tiled = array_ops.tile(a, [1, 4])
result = tiled.eval()
self.assertEqual(result.shape, (4, 4))
self.assertEqual([4, 4], tiled.get_shape())
self.assertTrue((result == np.tile(inp, (1, 4))).all())
def testIdentityTileAndGrad(self):
with self.test_session():
inp = np.random.rand(4, 1).astype(np.float32)
a = constant_op.constant(inp)
tiled = array_ops.tile(a, [1, 1])
result = tiled.eval()
self.assertEqual(result.shape, (4, 1))
self.assertEqual([4, 1], tiled.get_shape())
self.assertTrue((result == np.tile(inp, (1, 1))).all())
def testEmpty(self):
with self.test_session():
inp = np.random.rand(2, 3).astype(np.float32)
a = constant_op.constant(inp)
tiled = array_ops.tile(a, [5, 0])
result = tiled.eval()
self.assertEqual(result.shape, (10, 0))
self.assertEqual([10, 0], tiled.get_shape())
def testUnknownInputShape(self):
"""Importing can call _TileShape without shape of <multiples> known."""
with self.test_session():
inp = array_ops.placeholder(dtypes.float32) # unknown shape
multiples = constant_op.constant([1, 2, 3, 4], dtype=np.int32)
tiled = array_ops.tile(inp, multiples)
gdef = tiled.graph.as_graph_def()
# Move the tile op to the start of the graph so that shapes of its inputs
# are not available when the shape function runs on import.
swapped = False
for i, n in enumerate(gdef.node):
if n.op == "Tile":
# Swap tile op to be first in gdef.node
assert i != 0
new_node = node_def_pb2.NodeDef()
new_node.CopyFrom(gdef.node[i])
gdef.node[i].CopyFrom(gdef.node[0])
gdef.node[0].CopyFrom(new_node)
swapped = True
assert swapped
tiled_imported, = importer.import_graph_def(
gdef, return_elements=[tiled.name])
self.assertEqual(4, tiled_imported.get_shape().ndims)
def testTypes(self):
types_to_test = {
"bool": (dtypes.bool, bool),
"float32": (dtypes.float32, float),
"float64": (dtypes.float64, float),
"complex64": (dtypes.complex64, complex),
"complex128": (dtypes.complex128, complex),
"uint8": (dtypes.uint8, int),
"int32": (dtypes.int32, int),
"int64": (dtypes.int64, int),
bytes: (dtypes.string, bytes)
}
for dtype_np, (dtype_tf, cast) in types_to_test.items():
with self.test_session(use_gpu=True):
inp = np.random.rand(4, 1).astype(dtype_np)
a = constant_op.constant(
[cast(x) for x in inp.ravel(order="C")],
shape=[4, 1],
dtype=dtype_tf)
tiled = array_ops.tile(a, [1, 4])
result = tiled.eval()
self.assertEqual(result.shape, (4, 4))
self.assertEqual([4, 4], tiled.get_shape())
self.assertAllEqual(result, np.tile(inp, (1, 4)))
def testInvalidDim(self):
with self.test_session():
inp = np.random.rand(4, 1).astype("f")
a = constant_op.constant(
[float(x) for x in inp.ravel(order="C")],
shape=[4, 1],
dtype=dtypes.float32)
# Wrong length of multiples.
with self.assertRaises(ValueError):
array_ops.tile(a, [1, 4, 2])
# Wrong rank for multiples.
with self.assertRaises(ValueError):
array_ops.tile(a, [[2, 3], [3, 4]]).eval()
def _RunAndVerifyResult(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
# Random dims of rank 5
input_shape = np.random.randint(1, 4, size=5)
inp = np.random.rand(*input_shape).astype("f")
a = constant_op.constant(
[float(x) for x in inp.ravel(order="C")],
shape=input_shape,
dtype=dtypes.float32)
multiples = np.random.randint(1, 4, size=5).astype(np.int32)
tiled = array_ops.tile(a, multiples)
result = tiled.eval()
self.assertTrue((np.array(multiples) * np.array(inp.shape) == np.array(
result.shape)).all())
self.assertAllEqual(result, np.tile(inp, tuple(multiples)))
self.assertShapeEqual(result, tiled)
def testRandom(self):
for _ in range(5):
self._RunAndVerifyResult(use_gpu=False)
for _ in range(5):
self._RunAndVerifyResult(use_gpu=True)
def testGradientSimpleReduction(self):
with self.test_session():
inp = np.random.rand(4, 1).astype("f")
a = constant_op.constant(
[float(x) for x in inp.flatten()], shape=[4, 1], dtype=dtypes.float32)
tiled = array_ops.tile(a, [1, 4])
grad_shape = [4, 4]
grad_inp = np.random.rand(*grad_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=grad_shape)
grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0]
self.assertShapeEqual(inp, grad)
result = grad.eval()
self.assertAllClose(np.sum(grad_inp, axis=1).reshape(4, 1), result, 1e-3)
def testGradientStridedReduction(self):
with self.test_session():
inp = np.random.rand(4, 2).astype("f")
a = constant_op.constant(
[float(x) for x in inp.flatten()], shape=[4, 2], dtype=dtypes.float32)
tiled = array_ops.tile(a, [1, 2])
grad_shape = [4, 4]
grad_inp = np.random.rand(*grad_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=grad_shape)
grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0]
self.assertShapeEqual(inp, grad)
result = grad.eval()
expected_shape = [4, 2]
expected = np.zeros(expected_shape)
expected[:, 0] = grad_inp[:, 0] + grad_inp[:, 2]
expected[:, 1] = grad_inp[:, 1] + grad_inp[:, 3]
self.assertTrue((np.abs(expected - result) < 1e-3).all())
def testGradientSimpleReductionOnGPU(self):
with self.test_session(use_gpu=True):
inp = np.random.rand(4, 1).astype("f")
a = constant_op.constant(
[float(x) for x in inp.flatten()], shape=[4, 1], dtype=dtypes.float32)
tiled = array_ops.tile(a, [1, 4])
grad_shape = [4, 4]
grad_inp = np.random.rand(*grad_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=grad_shape)
grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0]
result = grad.eval()
self.assertAllClose(np.sum(grad_inp, axis=1).reshape(4, 1), result, 1e-3)
def testGradientStridedReductionOnGPU(self):
with self.test_session(use_gpu=True):
inp = np.random.rand(4, 2).astype("f")
a = constant_op.constant(
[float(x) for x in inp.flatten()], shape=[4, 2], dtype=dtypes.float32)
tiled = array_ops.tile(a, [1, 2])
grad_shape = [4, 4]
grad_inp = np.random.rand(*grad_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=grad_shape)
grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0]
result = grad.eval()
expected_shape = [4, 2]
expected = np.zeros(expected_shape)
expected[:, 0] = grad_inp[:, 0] + grad_inp[:, 2]
expected[:, 1] = grad_inp[:, 1] + grad_inp[:, 3]
self.assertAllClose(expected, result, 1e-3)
def _RunAndVerifyGradientResult(self, input_shape, multiples):
for use_gpu in False, True:
with self.test_session(use_gpu=use_gpu):
# Random values
inp = np.asarray(np.random.rand(*input_shape))
a = constant_op.constant(inp, dtype=dtypes.float64)
tiled = array_ops.tile(a, multiples)
grad_shape = list(np.array(multiples) * np.array(inp.shape))
err = gradient_checker.compute_gradient_error(
a, list(input_shape), tiled, grad_shape, x_init_value=inp)
print("tile(float) error = ", err)
self.assertLess(err, 1e-3)
def testGradientRandomScalar(self):
self._RunAndVerifyGradientResult([], [])
def testGradientRandom(self):
self._RunAndVerifyGradientResult([2, 2, 1, 1, 3], [1, 1, 1, 1, 1])
self._RunAndVerifyGradientResult([2, 2, 1, 1, 3], [1, 2, 1, 3, 1])
self._RunAndVerifyGradientResult([2, 3, 1, 1, 3], [3, 1, 1, 2, 2])
self._RunAndVerifyGradientResult([2, 1, 3, 3, 2], [1, 3, 3, 1, 2])
def testGradientStridedReductionGC(self):
with self.test_session():
inp = np.random.rand(4, 2).astype("f")
a = constant_op.constant(
[float(x) for x in inp.flatten()], shape=[4, 2], dtype=dtypes.float32)
tiled = array_ops.tile(a, [1, 2])
err = gradient_checker.compute_gradient_error(a, [4, 2], tiled, [4, 4])
self.assertLess(err, 1e-3)
def testShapeFunctionEdgeCases(self):
# Unknown multiples shape.
inp = constant_op.constant(0.0, shape=[4, 4, 4, 4])
tiled = array_ops.tile(inp, array_ops.placeholder(dtypes.int32))
self.assertEqual([None, None, None, None], tiled.get_shape().as_list())
# Unknown input shape.
inp = array_ops.placeholder(dtypes.float32)
tiled = array_ops.tile(inp, [2, 2, 2, 2])
self.assertEqual([None, None, None, None], tiled.get_shape().as_list())
# Unknown input and multiples shape.
inp = array_ops.placeholder(dtypes.float32)
tiled = array_ops.tile(inp, array_ops.placeholder(dtypes.int32))
self.assertIs(None, tiled.get_shape().ndims)
# Known input and partially known multiples.
inp = constant_op.constant(0.0, shape=[1, 1])
tiled = array_ops.tile(inp, [array_ops.placeholder(dtypes.int32), 7])
self.assertEqual([None, 7], tiled.get_shape().as_list())
# Mismatched input rank and multiples length.
inp = array_ops.placeholder(dtypes.float32, shape=[None, None])
with self.assertRaises(ValueError):
tiled = array_ops.tile(
inp, array_ops.placeholder(
dtypes.int32, shape=[3]))
if __name__ == "__main__":
test.main()
| apache-2.0 | -3,668,680,388,526,436,000 | 37.765812 | 80 | 0.626422 | false |
ideasman42/nirw-search | _misc/readme_update_helptext.py | 1 | 1880 | #!/usr/bin/env python3
# GPL License, Version 3.0 or later
import os
import subprocess
import textwrap
import re
BASE_DIR = os.path.join(os.path.dirname(__file__), "..")
COMMAND_NAME = 'nirw-search'
def patch_help_test(help_output):
help_output = help_output.replace(
'usage: ' + COMMAND_NAME,
'usage::\n'
'\n'
' ' + COMMAND_NAME,
)
help_output = help_output.replace(
"{auto,always,never}", "<auto,always,never>",
)
return help_output
def main():
p = subprocess.run(
[
'python3',
os.path.join(BASE_DIR, COMMAND_NAME),
'--help',
],
stdout=subprocess.PIPE,
)
help_output = (
p.stdout.decode('utf-8').rstrip() +
'\n\n'
)
# strip trailing space
help_output = re.sub(r'[ \t]+(\n|\Z)', r'\1', help_output)
help_output = patch_help_test(help_output)
# Try write reStructuredText directly!
# help_output = textwrap.indent(help_output, ' ')
help_output = (
'\nOutput of ``' + COMMAND_NAME + ' --help``\n\n' +
help_output
)
with open('readme.rst', 'r', encoding='utf-8') as f:
data = f.read()
help_begin_text = '.. BEGIN HELP TEXT'
help_end_text = '.. END HELP TEXT'
help_begin_index = data.find(help_begin_text)
help_end_index = data.find(help_end_text, help_begin_index)
if help_begin_index == -1:
print('Error: {!r} not found'.format(help_begin_text))
return
if help_end_index == -1:
print('Error: {!r} not found'.format(help_end_text))
return
help_begin_index += len(help_begin_text) + 1
data_update = data[:help_begin_index] + help_output + data[help_end_index:]
with open('readme.rst', 'w', encoding='utf-8') as f:
f.write(data_update)
if __name__ == "__main__":
main()
| gpl-3.0 | 6,781,015,439,794,729,000 | 23.415584 | 79 | 0.557447 | false |
andresgz/django | tests/migrations/test_graph.py | 99 | 11829 | import warnings
from django.db.migrations.exceptions import (
CircularDependencyError, NodeNotFoundError,
)
from django.db.migrations.graph import RECURSION_DEPTH_WARNING, MigrationGraph
from django.test import SimpleTestCase
from django.utils.encoding import force_text
class GraphTests(SimpleTestCase):
"""
Tests the digraph structure.
"""
def test_simple_graph(self):
"""
Tests a basic dependency graph:
app_a: 0001 <-- 0002 <--- 0003 <-- 0004
/
app_b: 0001 <-- 0002 <-/
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_a", "0004"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
# Test root migration case
self.assertEqual(
graph.forwards_plan(("app_a", "0001")),
[('app_a', '0001')],
)
# Test branch B only
self.assertEqual(
graph.forwards_plan(("app_b", "0002")),
[("app_b", "0001"), ("app_b", "0002")],
)
# Test whole graph
self.assertEqual(
graph.forwards_plan(("app_a", "0004")),
[
('app_b', '0001'), ('app_b', '0002'), ('app_a', '0001'),
('app_a', '0002'), ('app_a', '0003'), ('app_a', '0004'),
],
)
# Test reverse to b:0002
self.assertEqual(
graph.backwards_plan(("app_b", "0002")),
[('app_a', '0004'), ('app_a', '0003'), ('app_b', '0002')],
)
# Test roots and leaves
self.assertEqual(
graph.root_nodes(),
[('app_a', '0001'), ('app_b', '0001')],
)
self.assertEqual(
graph.leaf_nodes(),
[('app_a', '0004'), ('app_b', '0002')],
)
def test_complex_graph(self):
"""
Tests a complex dependency graph:
app_a: 0001 <-- 0002 <--- 0003 <-- 0004
\ \ / /
app_b: 0001 <-\ 0002 <-X /
\ \ /
app_c: \ 0001 <-- 0002 <-
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_a", "0004"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_node(("app_c", "0001"), None)
graph.add_node(("app_c", "0002"), None)
graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_c", "0002"))
graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_c", "0001"))
graph.add_dependency("app_c.0001", ("app_c", "0001"), ("app_b", "0001"))
graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_a", "0002"))
# Test branch C only
self.assertEqual(
graph.forwards_plan(("app_c", "0002")),
[('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002')],
)
# Test whole graph
self.assertEqual(
graph.forwards_plan(("app_a", "0004")),
[
('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'),
('app_a', '0002'), ('app_c', '0002'), ('app_b', '0002'),
('app_a', '0003'), ('app_a', '0004'),
],
)
# Test reverse to b:0001
self.assertEqual(
graph.backwards_plan(("app_b", "0001")),
[
('app_a', '0004'), ('app_c', '0002'), ('app_c', '0001'),
('app_a', '0003'), ('app_b', '0002'), ('app_b', '0001'),
],
)
# Test roots and leaves
self.assertEqual(
graph.root_nodes(),
[('app_a', '0001'), ('app_b', '0001'), ('app_c', '0001')],
)
self.assertEqual(
graph.leaf_nodes(),
[('app_a', '0004'), ('app_b', '0002'), ('app_c', '0002')],
)
def test_circular_graph(self):
"""
Tests a circular dependency graph.
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0003"))
# Test whole graph
self.assertRaises(
CircularDependencyError,
graph.forwards_plan, ("app_a", "0003"),
)
def test_circular_graph_2(self):
graph = MigrationGraph()
graph.add_node(('A', '0001'), None)
graph.add_node(('C', '0001'), None)
graph.add_node(('B', '0001'), None)
graph.add_dependency('A.0001', ('A', '0001'), ('B', '0001'))
graph.add_dependency('B.0001', ('B', '0001'), ('A', '0001'))
graph.add_dependency('C.0001', ('C', '0001'), ('B', '0001'))
self.assertRaises(
CircularDependencyError,
graph.forwards_plan, ('C', '0001')
)
def test_graph_recursive(self):
graph = MigrationGraph()
root = ("app_a", "1")
graph.add_node(root, None)
expected = [root]
for i in range(2, 750):
parent = ("app_a", str(i - 1))
child = ("app_a", str(i))
graph.add_node(child, None)
graph.add_dependency(str(i), child, parent)
expected.append(child)
leaf = expected[-1]
forwards_plan = graph.forwards_plan(leaf)
self.assertEqual(expected, forwards_plan)
backwards_plan = graph.backwards_plan(root)
self.assertEqual(expected[::-1], backwards_plan)
def test_graph_iterative(self):
graph = MigrationGraph()
root = ("app_a", "1")
graph.add_node(root, None)
expected = [root]
for i in range(2, 1000):
parent = ("app_a", str(i - 1))
child = ("app_a", str(i))
graph.add_node(child, None)
graph.add_dependency(str(i), child, parent)
expected.append(child)
leaf = expected[-1]
with warnings.catch_warnings(record=True) as w:
forwards_plan = graph.forwards_plan(leaf)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(str(w[-1].message), RECURSION_DEPTH_WARNING)
self.assertEqual(expected, forwards_plan)
with warnings.catch_warnings(record=True) as w:
backwards_plan = graph.backwards_plan(root)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(str(w[-1].message), RECURSION_DEPTH_WARNING)
self.assertEqual(expected[::-1], backwards_plan)
def test_plan_invalid_node(self):
"""
Tests for forwards/backwards_plan of nonexistent node.
"""
graph = MigrationGraph()
message = "Node ('app_b', '0001') not a valid node"
with self.assertRaisesMessage(NodeNotFoundError, message):
graph.forwards_plan(("app_b", "0001"))
with self.assertRaisesMessage(NodeNotFoundError, message):
graph.backwards_plan(("app_b", "0001"))
def test_missing_parent_nodes(self):
"""
Tests for missing parent nodes.
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
msg = "Migration app_a.0001 dependencies reference nonexistent parent node ('app_b', '0002')"
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"))
def test_missing_child_nodes(self):
"""
Tests for missing child nodes.
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
msg = "Migration app_a.0002 dependencies reference nonexistent child node ('app_a', '0002')"
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
def test_infinite_loop(self):
"""
Tests a complex dependency graph:
app_a: 0001 <-
\
app_b: 0001 <- x 0002 <-
/ \
app_c: 0001<- <------------- x 0002
And apply squashing on app_c.
"""
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_node(("app_c", "0001_squashed_0002"), None)
graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_c", "0001_squashed_0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_a", "0001"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
graph.add_dependency("app_c.0001_squashed_0002", ("app_c", "0001_squashed_0002"), ("app_b", "0002"))
with self.assertRaises(CircularDependencyError):
graph.forwards_plan(("app_c", "0001_squashed_0002"))
def test_stringify(self):
graph = MigrationGraph()
self.assertEqual(force_text(graph), "Graph: 0 nodes, 0 edges")
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002"))
self.assertEqual(force_text(graph), "Graph: 5 nodes, 3 edges")
self.assertEqual(repr(graph), "<MigrationGraph: nodes=5, edges=3>")
| bsd-3-clause | 3,032,008,089,554,108,400 | 38.962838 | 108 | 0.507482 | false |
ex1usive-m4d/TemplateDocx | controllers/phpdocx/lib/openoffice/openoffice.org/basis3.4/program/python-core-2.6.1/lib/encodings/unicode_internal.py | 827 | 1196 | """ Python 'unicode-internal' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_internal_encode
decode = codecs.unicode_internal_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_internal_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_internal_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-internal',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| bsd-3-clause | 5,250,936,364,363,388,000 | 25.577778 | 69 | 0.721572 | false |
galtys/odoo | addons/website/models/ir_actions.py | 363 | 3074 | # -*- coding: utf-8 -*-
import urlparse
from openerp.http import request
from openerp.osv import fields, osv
class actions_server(osv.Model):
""" Add website option in server actions. """
_name = 'ir.actions.server'
_inherit = ['ir.actions.server']
def _compute_website_url(self, cr, uid, id, website_path, xml_id, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url', context=context)
link = website_path or xml_id or (id and '%d' % id) or ''
if base_url and link:
path = '%s/%s' % ('/website/action', link)
return '%s' % urlparse.urljoin(base_url, path)
return ''
def _get_website_url(self, cr, uid, ids, name, args, context=None):
res = dict.fromkeys(ids, False)
for action in self.browse(cr, uid, ids, context=context):
if action.state == 'code' and action.website_published:
res[action.id] = self._compute_website_url(cr, uid, action.id, action.website_path, action.xml_id, context=context)
return res
_columns = {
'xml_id': fields.function(
osv.osv.get_xml_id, type='char', string="External ID",
help="ID of the action if defined in a XML file"),
'website_path': fields.char('Website Path'),
'website_url': fields.function(
_get_website_url, type='char', string='Website URL',
help='The full URL to access the server action through the website.'),
'website_published': fields.boolean(
'Available on the Website', copy=False,
help='A code server action can be executed from the website, using a dedicated'
'controller. The address is <base>/website/action/<website_path>.'
'Set this field as True to allow users to run this action. If it'
'set to is False the action cannot be run through the website.'),
}
def on_change_website_path(self, cr, uid, ids, website_path, xml_id, context=None):
values = {
'website_url': self._compute_website_url(cr, uid, ids and ids[0] or None, website_path, xml_id, context=context)
}
return {'value': values}
def _get_eval_context(self, cr, uid, action, context=None):
""" Override to add the request object in eval_context. """
eval_context = super(actions_server, self)._get_eval_context(cr, uid, action, context=context)
if action.state == 'code':
eval_context['request'] = request
return eval_context
def run_action_code_multi(self, cr, uid, action, eval_context=None, context=None):
""" Override to allow returning response the same way action is already
returned by the basic server action behavior. Note that response has
priority over action, avoid using both. """
res = super(actions_server, self).run_action_code_multi(cr, uid, action, eval_context, context)
if 'response' in eval_context:
return eval_context['response']
return res
| agpl-3.0 | -1,722,944,007,512,355,300 | 46.292308 | 131 | 0.619714 | false |
schumi2004/NOT_UPDATED_Sick-Beard-Dutch | lib/requests/compat.py | 289 | 2433 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import charade as chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
except ImportError:
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| gpl-3.0 | 277,311,219,310,263,360 | 20.530973 | 132 | 0.633785 | false |
Distrotech/PyQt-x11 | examples/animation/stickman/stickman_rc3.py | 5 | 44472 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: Wed Mar 20 11:18:27 2013
# by: The Resource Compiler for PyQt (Qt v4.8.4)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = b"\
\x00\x00\x05\x1c\
\xff\
\xff\xff\xff\x00\x00\x00\x05\x00\x00\x00\x10\x00\x00\x00\x00\x00\
\x00\x00\x00\xc0\x62\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\xc0\x59\x00\x00\x00\x00\x00\x00\xc0\x49\x00\x00\x00\
\x00\x00\x00\xc0\x49\x00\x00\x00\x00\x00\x00\x40\x49\x00\x00\x00\
\x00\x00\x00\xc0\x49\x00\x00\x00\x00\x00\x00\xc0\x39\x00\x00\x00\
\x00\x00\x00\x40\x49\x00\x00\x00\x00\x00\x00\x40\x39\x00\x00\x00\
\x00\x00\x00\x40\x49\x00\x00\x00\x00\x00\x00\xc0\x59\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\x5f\x40\x00\x00\
\x00\x00\x00\x40\x49\x00\x00\x00\x00\x00\x00\x40\x59\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x5f\x40\x00\x00\
\x00\x00\x00\x40\x49\x00\x00\x00\x00\x00\x00\xc0\x41\x80\x00\x00\
\x00\x00\x00\x40\x52\xc0\x00\x00\x00\x00\x00\x40\x41\x80\x00\x00\
\x00\x00\x00\x40\x52\xc0\x00\x00\x00\x00\x00\xc0\x39\x00\x00\x00\
\x00\x00\x00\x40\x69\x00\x00\x00\x00\x00\x00\xc0\x3e\x00\x00\x00\
\x00\x00\x00\x40\x72\xc0\x00\x00\x00\x00\x00\x40\x39\x00\x00\x00\
\x00\x00\x00\x40\x69\x00\x00\x00\x00\x00\x00\x40\x3e\x00\x00\x00\
\x00\x00\x00\x40\x72\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x10\xc0\
\x3c\x89\x78\xef\x64\x41\xf9\xc0\x66\xd9\xe8\x90\xe1\x7d\x15\xc0\
\x31\xe3\x24\x6e\x1a\x35\x4b\xc0\x60\xbe\xa8\xcb\xa3\x98\xe8\xc0\
\x4b\x26\x9c\xdd\x08\xb6\xce\xc0\x52\x55\xf3\x5e\x1e\x9a\xcc\x40\
\x45\x5c\xfa\x9b\x0f\x7d\x05\xc0\x58\x63\x42\x06\x4d\xc8\xaa\xc0\
\x17\x5f\xc1\xb2\xd2\x1c\x6d\x40\x31\x9d\xf7\x2f\xd9\x40\x6c\x40\
\x45\x56\x51\xec\xae\x7d\xee\x40\x16\x28\xbe\x3a\x2e\x6a\x36\xc0\
\x55\x8e\x3f\x19\x86\x36\x44\xc0\x24\x7d\xe2\xe5\x56\x5d\x8e\xc0\
\x5d\x38\xe5\xb0\x63\x56\x32\x40\x42\x3f\x2b\xb1\x33\xe6\xab\x40\
\x47\x27\x75\x17\xb2\xff\xe1\xc0\x65\x05\x80\x69\xc9\x45\xe4\x40\
\x46\xa5\x8b\x3a\x6c\xa7\xa8\xc0\x6c\x02\x0f\xa6\x73\xff\xbc\xc0\
\x23\x07\xbb\xb5\xb5\x51\x7d\x40\x46\x24\x27\xde\x37\xae\x65\x40\
\x4d\x34\xca\x20\x80\xfa\x8f\x40\x3b\x64\xbd\x21\x5f\x80\x81\xc0\
\x37\xe9\x56\x97\x59\x09\x15\x40\x65\x1b\x2f\x86\xb4\x04\x4d\xc0\
\x3c\xb5\xb5\xc5\x5a\xd5\x30\x40\x70\xcd\xc0\x3b\x0e\xef\xd6\x40\
\x43\xce\x38\x01\x33\x34\xdd\x40\x62\xec\x1a\xba\x71\x62\x00\x40\
\x3f\x5e\x72\xef\x9e\x8c\x25\x40\x6f\x65\x3a\x4d\xed\xc0\xd9\x00\
\x00\x00\x10\xc0\x3c\x89\xb8\xa7\xeb\xa9\x01\xc0\x66\xda\x0d\x17\
\x53\x31\x17\xc0\x31\xe1\x8c\xf0\xba\xdf\x75\xc0\x60\xbe\xd1\xa6\
\x64\x43\x82\xc0\x4b\x26\x88\x07\x92\xca\xbf\xc0\x52\x56\x94\xaa\
\x30\x5d\x72\x40\x45\x5d\x52\x6d\x57\x9c\xd9\xc0\x58\x62\xcc\x60\
\x74\xfe\x54\xc0\x17\x69\xa4\x0b\x5f\xcf\x0e\x40\x31\xa0\xd0\x0e\
\xd0\x62\xed\x40\x45\x54\xc3\x45\xee\xd2\xcc\x40\x16\x23\xd3\x38\
\xd6\x69\x28\xc0\x55\x8e\x03\x71\x6e\x9d\x31\xc0\x24\x82\xdb\x79\
\xc3\x1b\x6e\xc0\x5d\x38\x8a\xc7\x14\xca\x74\x40\x42\x3e\x16\x8f\
\x10\x50\x33\x40\x47\x27\x55\xb2\x6a\xa5\x2d\xc0\x65\x05\x3e\xa8\
\x00\x01\x15\x40\x46\xa5\x8f\xfd\x66\x67\x23\xc0\x6c\x01\xce\x0e\
\xaa\x42\x46\xc0\x23\x05\x39\x8c\x95\xa5\x0f\x40\x46\x26\xe0\xb5\
\x34\x5e\x5e\x40\x4d\x34\xad\x03\x49\x41\x5e\x40\x3b\x5f\xf9\x28\
\x4b\xa6\x62\xc0\x37\xea\x6c\x93\xfc\xa6\xb3\x40\x65\x1b\xec\xb0\
\x7b\xa8\xfc\xc0\x3c\xb5\xfe\xd2\xbb\xf3\x4b\x40\x70\xce\x1f\x6d\
\x37\x5e\x7d\x40\x43\xce\xcc\x1c\x47\x26\xc2\x40\x62\xeb\x73\x80\
\x3a\x8b\xaf\x40\x3f\x5e\xe2\xb7\xae\x5f\xb6\x40\x6f\x64\x91\x2c\
\x26\xca\x21\x00\x00\x00\x10\xc0\x38\x01\x30\x6b\xba\xdc\x1b\xc0\
\x64\xeb\xbe\xf4\x99\xf2\x9b\xc0\x13\xa3\x04\x1c\x30\xdb\xb6\xc0\
\x5e\x4a\x12\x9f\x80\xef\xb8\xc0\x48\xf6\x8c\x7d\xe9\xbd\xf2\xc0\
\x50\xa8\x10\x7e\x26\x5b\x6d\x40\x48\xcf\x7a\x59\xa1\x24\xc5\xc0\
\x53\x09\x35\x0a\x19\xb4\xbb\xc0\x2f\x0b\x6a\x35\x97\xd3\x7e\x40\
\x3e\x88\x20\x7b\xc1\xb4\x2f\x40\x41\x20\x0b\x54\x51\x2e\x7e\x40\
\x39\xc9\x9e\xf7\xb2\xd8\xc6\xc0\x55\xfe\xc6\x23\xb0\x4f\x25\xc0\
\x1c\x1c\x60\x3d\x66\x92\xe7\xc0\x5d\xba\xce\x5e\x2a\x54\x25\x40\
\x43\xc4\x24\x4f\x00\x90\x62\x40\x58\x0b\xce\x4b\xd9\x22\xe1\xc0\
\x36\xed\xde\xb4\x40\x2c\x73\x40\x5a\xd1\x4e\xef\xc4\x03\x6b\x40\
\x3f\xdc\xbc\xa5\x13\x81\x98\xc0\x37\x1b\xea\x35\xa6\x66\xff\x40\
\x4c\x2e\x2d\x1e\x28\x01\x86\x40\x47\x49\xa3\x1a\xd5\xe2\xf0\x40\
\x48\xdd\x6f\xf6\xf0\x01\x68\xc0\x38\xc1\x32\x29\x64\xfb\x5b\x40\
\x66\xb7\xea\x61\x5e\xb1\x57\xc0\x3d\x45\x4c\x15\xef\x6f\x50\x40\
\x71\x9c\x53\x80\x4b\x64\x0c\x40\x42\x8c\x98\x55\x36\x48\x2a\x40\
\x65\xd8\xb5\x79\xf6\x83\x89\x40\x3d\xa6\xc3\x54\x49\x2b\x98\x40\
\x71\x29\xea\x7e\x10\x5d\xc3\x00\x00\x00\x10\x40\x31\xc2\x13\x60\
\x88\x8b\x4e\xc0\x68\x3c\xec\xdf\xc9\x5c\x2f\x40\x19\x5a\x83\xca\
\x1f\xc9\xc5\xc0\x62\x27\x36\xb4\x9b\x52\x60\xc0\x48\x60\x10\xa3\
\xc6\x74\xb4\xc0\x59\x39\x43\xd2\xf9\x7b\x4b\x40\x49\x55\xa6\x3b\
\x8b\xb1\xc9\xc0\x56\x88\xbb\x33\x01\xdc\x6b\xc0\x41\x53\xb3\x9c\
\x85\xff\x25\x3f\xf3\x70\x84\x8f\xed\xe9\x9b\x40\x2e\x1c\xbe\xb0\
\x48\x52\x79\x40\x1a\x5f\x89\xdb\x5b\x45\xc0\xc0\x50\xc0\xce\xa2\
\xc3\xe5\x44\xc0\x65\x26\x9c\x40\xed\xa0\x1a\xc0\x54\xa2\x72\xde\
\x2f\x48\xc9\xc0\x6b\xdd\x16\xce\x9e\x56\xb3\x40\x55\xdc\x36\x69\
\xb4\x6e\x69\xc0\x3d\xbd\x2f\xf0\x4e\x71\xd7\x40\x5a\x39\x0b\xe0\
\xb9\x21\xc7\x40\x37\x5e\x81\xe7\xe6\x46\x06\xc0\x47\xa4\x74\xb8\
\xaf\xb7\xee\x40\x38\xfe\xaa\x2d\x0e\x97\x7e\x40\x36\x4f\x1c\x98\
\xc9\x38\x7d\x40\x40\x43\x0a\x66\x30\x52\xdc\xc0\x3f\xd3\xf0\xcb\
\x87\x9d\x38\x40\x62\xae\x04\x10\x53\xde\x8c\xc0\x3d\x00\x25\x84\
\x92\xd2\x45\x40\x6f\x30\xbc\x58\x1f\xbc\xdf\x40\x41\x44\xa6\xc0\
\x0b\x29\xd8\x40\x63\xaa\x6a\x7a\x0d\xd9\x34\x40\x3e\xab\x98\x09\
\xd9\x42\x57\x40\x70\x16\x03\x15\xe6\x2f\x0a\
\x00\x00\x01\x0c\
\xff\
\xff\xff\xff\x00\x00\x00\x01\x00\x00\x00\x10\xc0\x72\x75\xd0\xbc\
\x4a\x7e\x3b\x40\x6e\xa8\x3a\x76\xcc\x4a\x4f\xc0\x6e\xc4\x15\x8d\
\xdb\x8a\x81\x40\x6d\x91\x95\x4a\xaf\x90\x71\xc0\x69\x4c\xfe\x50\
\x6c\x77\x6e\x40\x72\x41\xf5\x29\xa3\x44\xca\xc0\x67\xd1\xbb\x6b\
\x66\xf4\xfc\x40\x68\x1a\x7c\x41\xab\x27\xf0\xc0\x59\x09\x41\xf0\
\xd6\x29\x49\x40\x71\x72\x68\x78\x60\x60\x84\xc0\x57\x8e\x4b\x36\
\x5e\x8b\x6f\x40\x6c\xb0\x0b\xce\x10\xc5\x22\xc0\x70\xf5\x12\xcb\
\xa7\x78\x95\x40\x71\x44\x4e\x7a\x50\x85\x9f\xc0\x74\x72\x66\x5a\
\x1e\x1b\xae\x40\x71\x17\xf1\xe4\xf6\x7f\x7e\xc0\x64\x2e\x57\x5b\
\xe3\x6f\x6c\x40\x70\x14\x53\x5e\x5b\x6a\x7a\xc0\x60\x2d\xc9\x35\
\x26\xaf\xd0\x40\x72\xf1\x84\xb6\xf0\xd8\xae\xc0\x53\x20\x21\x2d\
\x64\x5b\xb3\x40\x72\x40\xa0\x97\xbf\x55\x8f\xc0\x51\x0d\xcf\x0f\
\x1b\x48\x3c\x40\x6b\xd1\x05\xa8\x2a\xdf\x62\x40\x44\x77\x7b\xec\
\x05\x04\x13\x40\x6f\x01\xfc\x1b\x65\x69\x69\x40\x61\x21\xf2\x32\
\xa1\xd8\xd9\x40\x71\x41\x25\x0f\x44\xd0\xa8\x40\x4c\x88\xe7\x4e\
\xbb\xeb\xb8\x40\x6c\x7b\xe4\x15\x28\x7a\x0a\x40\x62\x87\x8c\xab\
\xb5\x95\xf1\x40\x70\xd4\x4b\x4c\x03\x9d\x83\
\x00\x00\x19\x6c\
\xff\
\xff\xff\xff\x00\x00\x00\x19\x00\x00\x00\x10\x40\x38\x4c\xca\x3f\
\x4a\x24\xde\xc0\x62\x97\x1b\x32\x01\x5d\xf3\x40\x3a\x05\x25\x01\
\xce\x14\x45\xc0\x58\xb0\x20\xdd\x20\x86\x54\xc0\x40\xc3\x31\xca\
\x2d\xe7\xe8\xc0\x4e\x4e\xc6\x85\x2f\x62\xd8\x40\x50\x0a\x03\xb2\
\x01\xc4\x5d\xc0\x43\x9a\x8e\xca\x3e\xac\x2f\xc0\x3e\x83\x6a\xb1\
\x60\x3b\xb1\x40\x45\x35\x2f\x6e\x30\x18\x5c\x40\x32\x53\x99\xcb\
\xc5\x8d\xfc\x40\x4a\x8f\xc7\x9e\xd3\xec\x98\xc0\x44\x7e\x7d\xc3\
\xb1\x2d\x7e\xc0\x60\x5d\xcb\xb7\xd6\xde\xd5\x40\x22\xa2\xe4\x37\
\x5a\x12\x12\xc0\x63\x69\xe1\x79\x61\x98\x47\x40\x50\x78\x0c\x5a\
\xfa\x18\xc4\xc0\x5b\x79\x6e\x7b\xba\xd3\x77\x40\x3c\x78\x0c\xfd\
\x1b\x24\x7a\xc0\x62\xee\x0e\xa0\x41\x5e\x50\xc0\x46\xd1\x51\x1c\
\xc7\x57\xf6\x40\x50\x2c\x59\x00\x05\x07\x96\x40\x36\xbd\x5a\xba\
\x55\x33\x3c\x40\x53\xeb\xd5\xdc\x95\x84\x02\xc0\x64\x7c\xe6\xa7\
\x82\xd4\x73\x40\x5a\x98\x3a\x9d\x99\xd2\x25\xc0\x67\x9f\x24\xfc\
\xa8\x8b\x88\x40\x69\x6a\x0c\xdc\x04\x07\xad\x40\x61\x88\x18\xa6\
\x50\x33\xdf\x40\x5e\xdc\xf7\xac\x43\x1d\x93\x40\x60\xa1\xe2\xfa\
\x78\x8e\x27\x40\x6b\xea\x33\x3e\x3c\x8e\x55\x00\x00\x00\x10\x40\
\x38\x4c\xcb\x75\x7c\x04\xe4\xc0\x62\x97\x16\xcb\x46\x05\x9e\x40\
\x3a\x04\xf9\x90\xa0\xa1\x90\xc0\x58\xb0\x1c\x43\xe6\x16\xc6\xc0\
\x40\xc3\x37\x29\xb0\xad\x71\xc0\x4e\x4e\xa0\x29\xf8\xaf\x63\x40\
\x50\x0a\x04\xf7\x7a\x75\xf9\xc0\x43\x9a\xa3\xc7\xff\x8e\xe7\xc0\
\x3e\x83\x24\xc7\x88\x06\x31\x40\x45\x35\x0f\x82\x42\x98\x23\x40\
\x32\x53\xbe\x35\xfa\x10\xf1\x40\x4a\x8f\xeb\x86\x7e\xa9\xe7\xc0\
\x44\x7e\x73\x44\xb1\xe1\x51\xc0\x60\x5d\xc2\x5a\x55\xc7\x63\x40\
\x22\xa2\xcb\x69\x59\x50\x4d\xc0\x63\x69\xe0\xb9\x0f\x32\xde\x40\
\x50\x78\x10\xb9\x1c\xd7\xaa\xc0\x5b\x79\x7a\x66\xb3\x79\x6b\x40\
\x3c\x77\xfd\x1b\x3d\x06\x10\xc0\x62\xee\x10\xd4\xaa\xf0\xf0\xc0\
\x46\xd1\x4e\xde\x36\xe2\x04\x40\x50\x2c\x31\x73\xb4\xd5\x4d\x40\
\x36\xbd\x01\xb5\xb5\x00\xbf\x40\x53\xeb\xec\x32\x09\x0f\xf3\xc0\
\x64\x7c\xde\xd4\xf9\xea\x5a\x40\x5a\x98\x39\x7f\xc7\xf1\x43\xc0\
\x67\x9f\x24\x5c\x76\xd5\xf3\x40\x69\x6a\x0a\x70\x9e\x4e\xa4\x40\
\x61\x88\x12\x1f\x05\xdd\x03\x40\x5e\xdc\xf4\xca\xc1\xac\x79\x40\
\x60\xa1\xe3\x0c\x44\xff\x6e\x40\x6b\xea\x32\x47\x27\x58\xb5\x00\
\x00\x00\x10\x40\x38\x4c\xcb\x75\x7c\x04\xe4\xc0\x62\x97\x16\xcb\
\x46\x05\x9e\x40\x3a\x04\xf9\x90\xa0\xa1\x90\xc0\x58\xb0\x1c\x43\
\xe6\x16\xc6\xc0\x40\xc3\x37\x29\xb0\xad\x71\xc0\x4e\x4e\xa0\x29\
\xf8\xaf\x63\x40\x50\x0a\x04\xf7\x7a\x75\xf9\xc0\x43\x9a\xa3\xc7\
\xff\x8e\xe7\xc0\x3e\x83\x24\xc7\x88\x06\x31\x40\x45\x35\x0f\x82\
\x42\x98\x23\x40\x32\x53\xbe\x35\xfa\x10\xf1\x40\x4a\x8f\xeb\x86\
\x7e\xa9\xe7\xc0\x44\x7e\x73\x44\xb1\xe1\x51\xc0\x60\x5d\xc2\x5a\
\x55\xc7\x63\x40\x22\xa2\xcb\x69\x59\x50\x4d\xc0\x63\x69\xe0\xb9\
\x0f\x32\xde\x40\x50\x78\x10\xb9\x1c\xd7\xaa\xc0\x5b\x79\x7a\x66\
\xb3\x79\x6b\x40\x3c\x77\xfd\x1b\x3d\x06\x10\xc0\x62\xee\x10\xd4\
\xaa\xf0\xf0\xc0\x46\xd1\x4e\xde\x36\xe2\x04\x40\x50\x2c\x31\x73\
\xb4\xd5\x4d\x40\x36\xbd\x01\xb5\xb5\x00\xbf\x40\x53\xeb\xec\x32\
\x09\x0f\xf3\xc0\x64\x7c\xde\xd4\xf9\xea\x5a\x40\x5a\x98\x39\x7f\
\xc7\xf1\x43\xc0\x67\x9f\x24\x5c\x76\xd5\xf3\x40\x69\x6a\x0a\x70\
\x9e\x4e\xa4\x40\x61\x88\x12\x1f\x05\xdd\x03\x40\x5e\xdc\xf4\xca\
\xc1\xac\x79\x40\x60\xa1\xe3\x0c\x44\xff\x6e\x40\x6b\xea\x32\x47\
\x27\x58\xb5\x00\x00\x00\x10\x40\x38\x4c\xc9\x02\x38\xef\xcd\xc0\
\x62\x97\x1f\xb1\x2b\xf9\x2a\x40\x3a\x05\x52\xd2\x95\xd7\xab\xc0\
\x58\xb0\x25\xaa\x53\xdb\xfb\xc0\x40\xc3\x2b\xf2\x52\x75\x9c\xc0\
\x4e\x4e\xee\xcf\x9b\x90\x20\x40\x50\x0a\x02\x68\x2b\x0a\x79\xc0\
\x43\x9a\x79\x01\x1a\xe6\xb9\xc0\x3e\x83\xb4\x36\x4e\x91\x1d\x40\
\x45\x35\x51\x17\x06\xbb\x13\x40\x32\x53\x73\x48\x82\xb8\xb3\x40\
\x4a\x8f\xa1\xde\xcc\x91\x1f\xc0\x44\x7e\x88\xc8\x99\xcc\xe3\xc0\
\x60\x5d\xd5\x8c\xe2\xd4\xd6\x40\x22\xa2\xfe\x41\x73\x93\xb3\xc0\
\x63\x69\xe2\x43\x44\x69\xd7\x40\x50\x78\x07\xd5\xb0\x97\xb1\xc0\
\x5b\x79\x62\x25\xed\x42\x8d\x40\x3c\x78\x1d\x6d\x64\xc2\xa2\xc0\
\x62\xee\x0c\x58\x0f\x63\xac\xc0\x46\xd1\x53\xa5\xee\xcf\x4a\x40\
\x50\x2c\x82\x94\x9a\xd5\x79\x40\x36\xbd\xb8\x76\x84\x73\x91\x40\
\x53\xeb\xbe\x59\x7d\x1d\xee\xc0\x64\x7c\xee\xed\xf6\x5d\x09\x40\
\x5a\x98\x3b\xcb\xed\xe6\x6c\xc0\x67\x9f\x25\xa6\x1c\x6e\xe9\x40\
\x69\x6a\x0f\x6b\x3b\xad\xe5\x40\x61\x88\x1f\x86\x72\xb7\xce\x40\
\x5e\xdc\xfa\xb5\x02\x69\xbf\x40\x60\xa1\xe2\xe7\xb9\x5c\x6a\x40\
\x6b\xea\x34\x42\x75\x16\xa6\x00\x00\x00\x10\x40\x38\x4c\xc8\xf1\
\x73\x46\x04\xc0\x62\x97\x1f\xee\x0c\xf5\x37\x40\x3a\x05\x55\x49\
\x9a\x94\xdf\xc0\x58\xb0\x25\xec\x1f\xd6\x66\xc0\x40\xc3\x2b\xa0\
\x8b\xcc\x68\xc0\x4e\x4e\xf0\xf9\x4c\x7d\xc5\x40\x50\x0a\x02\x56\
\xcf\x1b\x74\xc0\x43\x9a\x77\xd7\x9d\x67\xb9\xc0\x3e\x83\xb8\x29\
\x68\x18\xac\x40\x45\x35\x52\xe6\x88\x0c\x08\x40\x32\x53\x71\x35\
\x56\x27\xbe\x40\x4a\x8f\x9f\xd7\xb3\x3a\x64\xc0\x44\x7e\x89\x5f\
\xfb\xd6\xa6\xc0\x60\x5d\xd6\x13\xf6\xcd\x94\x40\x22\xa2\xff\xa7\
\x2c\xa7\x01\xc0\x63\x69\xe2\x4e\x19\xcc\x87\x40\x50\x78\x07\x98\
\x0d\x09\xcb\xc0\x5b\x79\x61\x7d\xbc\xc0\x33\x40\x3c\x78\x1e\x4d\
\x88\xf4\x35\xc0\x62\xee\x0c\x38\xf1\xc4\xe8\xc0\x46\xd1\x53\xca\
\x0b\x08\xfb\x40\x50\x2c\x84\xd0\x5d\x55\x8c\x40\x36\xbd\xbd\x80\
\x5a\xad\x4a\x40\x53\xeb\xbd\x15\xf5\x07\xab\xc0\x64\x7c\xef\x60\
\x1c\x2f\x98\x40\x5a\x98\x3b\xdc\x37\xef\xe9\xc0\x67\x9f\x25\xaf\
\x3d\xbf\xd4\x40\x69\x6a\x0f\x8e\x89\x77\x9f\x40\x61\x88\x1f\xe5\
\x0f\x7e\xd1\x40\x5e\xdc\xfa\xde\xc3\x4f\x44\x40\x60\xa1\xe2\xe6\
\xb7\x5f\xeb\x40\x6b\xea\x34\x50\x71\xf2\x5e\x00\x00\x00\x10\x40\
\x38\x4c\xc8\xe4\xe9\x4a\xe6\xc0\x62\x97\x20\x1b\x90\x2c\x4f\x40\
\x3a\x05\x57\x22\x0c\x24\x7d\xc0\x58\xb0\x26\x1d\x5d\x86\x5d\xc0\
\x40\xc3\x2b\x63\x3b\xcf\x8c\xc0\x4e\x4e\xf2\x97\xc3\x66\xc7\x40\
\x50\x0a\x02\x49\xd6\x8c\x4d\xc0\x43\x9a\x76\xf9\x0d\x28\xdf\xc0\
\x3e\x83\xbb\x1e\x53\x10\x07\x40\x45\x35\x54\x41\x8c\xf4\x10\x40\
\x32\x53\x6f\xa7\x9a\x5e\xd5\x40\x4a\x8f\x9e\x53\x1a\xe6\x20\xc0\
\x44\x7e\x89\xd1\x4c\x49\xec\xc0\x60\x5d\xd6\x79\x12\xe1\x41\x40\
\x22\xa3\x00\xb2\xf0\x65\xf7\xc0\x63\x69\xe2\x56\x35\xbe\x3d\x40\
\x50\x78\x07\x69\xf1\x50\x4e\xc0\x5b\x79\x60\xff\xec\xcb\x67\x40\
\x3c\x78\x1e\xf5\x33\x9c\xb3\xc0\x62\xee\x0c\x21\xab\x29\xe4\xc0\
\x46\xd1\x53\xe5\x27\x99\x97\x40\x50\x2c\x86\x7c\x61\xad\xa6\x40\
\x36\xbd\xc1\x45\xf8\xa0\x3f\x40\x53\xeb\xbc\x23\xbf\xbf\x32\xc0\
\x64\x7c\xef\xb5\x95\x62\x2d\x40\x5a\x98\x3b\xe8\x6a\x6d\xa5\xc0\
\x67\x9f\x25\xb6\x13\xea\x8f\x40\x69\x6a\x0f\xa8\xf8\xea\x0c\x40\
\x61\x88\x20\x2b\xe3\xfb\xb4\x40\x5e\xdc\xfa\xfe\x05\x65\x16\x40\
\x60\xa1\xe2\xe5\xf6\x3c\x86\x40\x6b\xea\x34\x5a\xea\xb2\xfe\x00\
\x00\x00\x10\x40\x38\x4c\xc8\xd4\x84\xf3\x4d\xc0\x62\x97\x20\x57\
\x0f\x3b\xd7\x40\x3a\x05\x59\x8c\x8a\x5e\xce\xc0\x58\xb0\x26\x5d\
\xcd\x2f\x15\xc0\x40\xc3\x2b\x12\xda\xcc\x87\xc0\x4e\x4e\xf4\xb6\
\x3e\xa1\x52\x40\x50\x0a\x02\x38\xe4\x10\x8c\xc0\x43\x9a\x75\xd5\
\xe6\xa1\x5c\xc0\x3e\x83\xbe\xfd\x1d\x6c\x0f\x40\x45\x35\x56\x07\
\xd6\x3e\xb6\x40\x32\x53\x6d\x9e\xdc\x7b\x7c\x40\x4a\x8f\x9c\x56\
\x73\x8e\x7b\xc0\x44\x7e\x8a\x65\x9b\x9a\xc2\xc0\x60\x5d\xd6\xfd\
\x68\xf0\x6a\x40\x22\xa3\x02\x11\x65\xf2\x80\xc0\x63\x69\xe2\x60\
\xd2\xcd\x1f\x40\x50\x78\x07\x2d\xa2\x09\x53\xc0\x5b\x79\x60\x5b\
\x5c\xc5\x6c\x40\x3c\x78\x1f\xd0\x82\x80\xbc\xc0\x62\xee\x0c\x03\
\x39\x4a\xd4\xc0\x46\xd1\x54\x08\xbe\xbb\x9e\x40\x50\x2c\x88\xac\
\xa3\x4c\x7c\x40\x36\xbd\xc6\x35\xfc\x61\x64\x40\x53\xeb\xba\xe6\
\xb1\x05\x48\xc0\x64\x7c\xf0\x25\x7e\x80\x23\x40\x5a\x98\x3b\xf8\
\x62\xb1\x2e\xc0\x67\x9f\x25\xbf\x07\x67\xde\x40\x69\x6a\x0f\xcb\
\x95\x87\xec\x40\x61\x88\x20\x88\x9b\xc5\x2f\x40\x5e\xdc\xfb\x26\
\xf0\x4e\x91\x40\x60\xa1\xe2\xe4\xf9\x69\xcb\x40\x6b\xea\x34\x68\
\x9f\xdc\x0a\x00\x00\x00\x10\x40\x38\x4c\xc8\xcf\x45\x6d\x70\xc0\
\x62\x97\x20\x6a\x1b\x9c\x23\x40\x3a\x05\x5a\x52\xc4\x7b\x1c\xc0\
\x58\xb0\x26\x72\x72\x66\xea\xc0\x40\xc3\x2a\xf9\x10\xed\xb9\xc0\
\x4e\x4e\xf5\x64\x15\x69\xb0\x40\x50\x0a\x02\x33\x77\xa5\x56\xc0\
\x43\x9a\x75\x78\xa3\x77\x6b\xc0\x3e\x83\xc0\x3a\xa2\x9b\x18\x40\
\x45\x35\x56\x99\x6e\xfb\xd2\x40\x32\x53\x6c\xf7\xf3\x6f\x59\x40\
\x4a\x8f\x9b\xb3\x71\xd9\x7e\xc0\x44\x7e\x8a\x95\x22\x08\xc3\xc0\
\x60\x5d\xd7\x27\xd0\xff\x24\x40\x22\xa3\x02\x81\xb3\x6a\x1c\xc0\
\x63\x69\xe2\x64\x39\x77\x1e\x40\x50\x78\x07\x1a\x50\xf1\xd4\xc0\
\x5b\x79\x60\x26\xa7\x76\x2b\x40\x3c\x78\x20\x16\xc0\xae\xfd\xc0\
\x62\xee\x0b\xf9\x78\xf7\x9a\xc0\x46\xd1\x54\x14\x2c\xd4\x30\x40\
\x50\x2c\x89\x60\x2e\x92\x45\x40\x36\xbd\xc7\xcb\x13\x78\x86\x40\
\x53\xeb\xba\x81\x14\xb0\x4f\xc0\x64\x7c\xf0\x49\x5d\x6b\x81\x40\
\x5a\x98\x3b\xfd\x81\x19\xae\xc0\x67\x9f\x25\xc1\xe5\xe5\x65\x40\
\x69\x6a\x0f\xd6\xad\x9f\xef\x40\x61\x88\x20\xa6\x52\x94\x24\x40\
\x5e\xdc\xfb\x34\x0d\x4b\xd1\x40\x60\xa1\xe2\xe4\xa8\x63\x78\x40\
\x6b\xea\x34\x6d\x04\x78\x67\x00\x00\x00\x10\x40\x38\x4c\xc8\xc3\
\x53\xf0\xd7\xc0\x62\x97\x20\x95\x74\xbf\xc7\x40\x3a\x05\x5c\x16\
\x3e\x0f\x2d\xc0\x58\xb0\x26\xa1\x74\xe5\x62\xc0\x40\xc3\x2a\xbe\
\x48\x1a\xc9\xc0\x4e\x4e\xf6\xef\xf7\x35\xd6\x40\x50\x0a\x02\x27\
\x21\x30\x73\xc0\x43\x9a\x74\xa4\x51\xd1\x0d\xc0\x3e\x83\xc3\x0d\
\xc2\x0e\xf4\x40\x45\x35\x57\xe5\x09\x34\x5f\x40\x32\x53\x6b\x7b\
\xc7\x91\x61\x40\x4a\x8f\x9a\x40\x37\x7d\xc1\xc0\x44\x7e\x8b\x01\
\x5c\x12\x71\xc0\x60\x5d\xd7\x88\x63\x00\x6f\x40\x22\xa3\x03\x81\
\x71\x1e\xdd\xc0\x63\x69\xe2\x6b\xf8\x2e\x8c\x40\x50\x78\x06\xee\
\x57\xe8\x39\xc0\x5b\x79\x5f\xae\xab\x35\x22\x40\x3c\x78\x20\xb6\
\xa7\x72\x0b\xc0\x62\xee\x0b\xe3\x46\x4f\xd6\xc0\x46\xd1\x54\x2e\
\x40\x0a\x1b\x40\x50\x2c\x8a\xf9\x12\xa8\xf6\x40\x36\xbd\xcb\x65\
\xa5\xcc\xd1\x40\x53\xeb\xb9\x99\xaa\xcf\x03\xc0\x64\x7c\xf0\x9b\
\x11\xbc\x05\x40\x5a\x98\x3c\x09\x29\xdc\x73\xc0\x67\x9f\x25\xc8\
\x6e\xde\xfd\x40\x69\x6a\x0f\xef\xf2\x9d\x38\x40\x61\x88\x20\xe9\
\xfe\xa9\x62\x40\x5e\xdc\xfb\x51\xea\xa9\x9f\x40\x60\xa1\xe2\xe3\
\xef\xdb\xd6\x40\x6b\xea\x34\x77\x05\xb5\xb1\x00\x00\x00\x10\x40\
\x38\x4c\xc8\xc2\x67\xdd\x57\xc0\x62\x97\x20\x98\xcd\x94\x1a\x40\
\x3a\x05\x5c\x39\x1f\x5d\xdb\xc0\x58\xb0\x26\xa5\x16\x7d\x84\xc0\
\x40\xc3\x2a\xb9\xbc\xba\x60\xc0\x4e\x4e\xf7\x0e\x8c\x4a\xb1\x40\
\x50\x0a\x02\x26\x2d\x64\x0a\xc0\x43\x9a\x74\x93\xeb\xe8\x2d\xc0\
\x3e\x83\xc3\x45\x9f\x4d\x2f\x40\x45\x35\x57\xfe\xa7\x9e\xb3\x40\
\x32\x53\x6b\x5e\x68\x2f\x32\x40\x4a\x8f\x9a\x23\x89\xc3\x22\xc0\
\x44\x7e\x8b\x09\xb8\x5e\x6c\xc0\x60\x5d\xd7\x8f\xd8\xc8\x46\x40\
\x22\xa3\x03\x95\x32\xa9\x80\xc0\x63\x69\xe2\x6c\x91\x58\xc3\x40\
\x50\x78\x06\xea\xf2\x89\xaa\xc0\x5b\x79\x5f\xa5\x67\x05\xe0\x40\
\x3c\x78\x20\xc3\x00\xcb\xb0\xc0\x62\xee\x0b\xe1\x8f\x72\x2a\xc0\
\x46\xd1\x54\x30\x44\x61\xe5\x40\x50\x2c\x8b\x18\xa9\x44\x23\x40\
\x36\xbd\xcb\xac\xeb\xcc\x19\x40\x53\xeb\xb9\x87\xca\x0f\x26\xc0\
\x64\x7c\xf0\xa1\x61\xc8\xea\x40\x5a\x98\x3c\x0a\x10\x78\x9d\xc0\
\x67\x9f\x25\xc8\xf0\x21\x1c\x40\x69\x6a\x0f\xf1\xe6\x6c\xcd\x40\
\x61\x88\x20\xef\x39\x09\x1b\x40\x5e\xdc\xfb\x54\x39\x4e\x8a\x40\
\x60\xa1\xe2\xe3\xe1\x9a\x53\x40\x6b\xea\x34\x77\xcb\x94\x20\x00\
\x00\x00\x10\x40\x38\x4c\xc8\xc1\x32\xe4\xf9\xc0\x62\x97\x20\x9d\
\x2e\xf8\xde\x40\x3a\x05\x5c\x66\xc6\xea\x0b\xc0\x58\xb0\x26\xa9\
\xd7\x32\x7d\xc0\x40\xc3\x2a\xb3\xc9\xdf\x14\xc0\x4e\x4e\xf7\x36\
\x93\x96\x7f\x40\x50\x0a\x02\x24\xee\x51\x65\xc0\x43\x9a\x74\x7e\
\x75\x91\x91\xc0\x3e\x83\xc3\x8e\xbe\x00\x43\x40\x45\x35\x58\x20\
\x2f\xea\x4a\x40\x32\x53\x6b\x37\xf6\x02\x6f\x40\x4a\x8f\x99\xfe\
\x00\x4b\x9f\xc0\x44\x7e\x8b\x14\xa9\xbf\x4b\xc0\x60\x5d\xd7\x99\
\x9c\x71\xc0\x40\x22\xa3\x03\xaf\x0e\x58\x32\xc0\x63\x69\xe2\x6d\
\x59\xd1\xf7\x40\x50\x78\x06\xe6\x80\xb1\x85\xc0\x5b\x79\x5f\x99\
\x46\x45\x3d\x40\x3c\x78\x20\xd3\x2a\x6b\xb5\xc0\x62\xee\x0b\xdf\
\x51\x0d\x21\xc0\x46\xd1\x54\x32\xe8\x5b\x66\x40\x50\x2c\x8b\x42\
\x01\xb4\x84\x40\x36\xbd\xcc\x0a\x35\xc8\xee\x40\x53\xeb\xb9\x70\
\x63\x9a\x1b\xc0\x64\x7c\xf0\xa9\xa5\x0c\xbe\x40\x5a\x98\x3c\x0b\
\x3e\x51\xa0\xc0\x67\x9f\x25\xc9\x99\x51\x18\x40\x69\x6a\x0f\xf4\
\x74\xa1\xea\x40\x61\x88\x20\xf6\x10\xd1\x9d\x40\x5e\xdc\xfb\x57\
\x3e\x65\x10\x40\x60\xa1\xe2\xe3\xce\xf1\x84\x40\x6b\xea\x34\x78\
\xce\x91\x2c\x00\x00\x00\x10\x40\x38\x4c\xc8\xc1\x32\xe4\xf9\xc0\
\x62\x97\x20\x9d\x2e\xf8\xde\x40\x3a\x05\x5c\x66\xc6\xea\x0b\xc0\
\x58\xb0\x26\xa9\xd7\x32\x7d\xc0\x40\xc3\x2a\xb3\xc9\xdf\x14\xc0\
\x4e\x4e\xf7\x36\x93\x96\x7f\x40\x50\x0a\x02\x24\xee\x51\x65\xc0\
\x43\x9a\x74\x7e\x75\x91\x91\xc0\x3e\x83\xc3\x8e\xbe\x00\x43\x40\
\x45\x35\x58\x20\x2f\xea\x4a\x40\x32\x53\x6b\x37\xf6\x02\x6f\x40\
\x4a\x8f\x99\xfe\x00\x4b\x9f\xc0\x44\x7e\x8b\x14\xa9\xbf\x4b\xc0\
\x60\x5d\xd7\x99\x9c\x71\xc0\x40\x22\xa3\x03\xaf\x0e\x58\x32\xc0\
\x63\x69\xe2\x6d\x59\xd1\xf7\x40\x50\x78\x06\xe6\x80\xb1\x85\xc0\
\x5b\x79\x5f\x99\x46\x45\x3d\x40\x3c\x78\x20\xd3\x2a\x6b\xb5\xc0\
\x62\xee\x0b\xdf\x51\x0d\x21\xc0\x46\xd1\x54\x32\xe8\x5b\x66\x40\
\x50\x2c\x8b\x42\x01\xb4\x84\x40\x36\xbd\xcc\x0a\x35\xc8\xee\x40\
\x53\xeb\xb9\x70\x63\x9a\x1b\xc0\x64\x7c\xf0\xa9\xa5\x0c\xbe\x40\
\x5a\x98\x3c\x0b\x3e\x51\xa0\xc0\x67\x9f\x25\xc9\x99\x51\x18\x40\
\x69\x6a\x0f\xf4\x74\xa1\xea\x40\x61\x88\x20\xf6\x10\xd1\x9d\x40\
\x5e\xdc\xfb\x57\x3e\x65\x10\x40\x60\xa1\xe2\xe3\xce\xf1\x84\x40\
\x6b\xea\x34\x78\xce\x91\x2c\x00\x00\x00\x10\x40\x38\x4c\xc8\xbf\
\xcf\xfc\x3c\xc0\x62\x97\x20\xa2\x37\x18\xbc\x40\x3a\x05\x5c\x9b\
\x39\x94\x7a\xc0\x58\xb0\x26\xaf\x4c\xea\x97\xc0\x40\xc3\x2a\xac\
\xf4\x31\x63\xc0\x4e\x4e\xf7\x64\x8f\x9a\x25\x40\x50\x0a\x02\x23\
\x7f\xd0\x06\xc0\x43\x9a\x74\x65\xcd\xff\xd0\xc0\x3e\x83\xc3\xe2\
\xbe\x25\x12\x40\x45\x35\x58\x46\xb5\xb3\x85\x40\x32\x53\x6b\x0b\
\xcb\x0d\x92\x40\x4a\x8f\x99\xd2\xe0\xed\x42\xc0\x44\x7e\x8b\x21\
\x3b\xf0\xf6\xc0\x60\x5d\xd7\xa4\xd4\x07\xaf\x40\x22\xa3\x03\xcc\
\xc2\xf6\xcf\xc0\x63\x69\xe2\x6e\x40\x1f\x33\x40\x50\x78\x06\xe1\
\x65\x9e\x9c\xc0\x5b\x79\x5f\x8b\x57\xc0\xf2\x40\x3c\x78\x20\xe5\
\xbb\x6d\x77\xc0\x62\xee\x0b\xdc\xbd\x3a\x54\xc0\x46\xd1\x54\x35\
\xf1\x1f\xf9\x40\x50\x2c\x8b\x71\x81\x27\x09\x40\x36\xbd\xcc\x75\
\x61\x96\xaf\x40\x53\xeb\xb9\x55\x81\xb9\x30\xc0\x64\x7c\xf0\xb3\
\x23\x22\x31\x40\x5a\x98\x3c\x0c\x99\x17\x47\xc0\x67\x9f\x25\xca\
\x5b\xaf\x44\x40\x69\x6a\x0f\xf7\x64\x34\xea\x40\x61\x88\x20\xfd\
\xed\x47\xed\x40\x5e\xdc\xfb\x5a\xb6\x86\x41\x40\x60\xa1\xe2\xe3\
\xb9\x81\xe1\x40\x6b\xea\x34\x79\xf8\x18\x55\x00\x00\x00\x10\x40\
\x38\x4c\xc8\xc0\x4e\x4f\x3d\xc0\x62\x97\x20\xa0\x6c\x9c\x10\x40\
\x3a\x05\x5c\x88\x8e\x66\x1c\xc0\x58\xb0\x26\xad\x5b\x68\x24\xc0\
\x40\xc3\x2a\xaf\x63\x02\x46\xc0\x4e\x4e\xf7\x54\x31\x68\xc3\x40\
\x50\x0a\x02\x24\x02\x43\xaf\xc0\x43\x9a\x74\x6e\x94\x89\x25\xc0\
\x3e\x83\xc3\xc4\xd7\xce\x09\x40\x45\x35\x58\x38\xff\x69\xa6\x40\
\x32\x53\x6b\x1b\x83\xc1\xd0\x40\x4a\x8f\x99\xe2\x3a\x5a\x44\xc0\
\x44\x7e\x8b\x1c\xc2\x70\xf4\xc0\x60\x5d\xd7\xa0\xd5\xe7\x48\x40\
\x22\xa3\x03\xc2\x30\x23\x5c\xc0\x63\x69\xe2\x6d\xee\x25\xab\x40\
\x50\x78\x06\xe3\x36\xdb\x68\xc0\x5b\x79\x5f\x90\x4d\x36\x90\x40\
\x3c\x78\x20\xdf\x1f\xa6\xa9\xc0\x62\xee\x0b\xdd\xa8\x15\x8e\xc0\
\x46\xd1\x54\x34\xdc\x9c\x16\x40\x50\x2c\x8b\x60\x99\x08\x6c\x40\
\x36\xbd\xcc\x4f\x3b\xe3\xde\x40\x53\xeb\xb9\x5f\x13\x4e\xf5\xc0\
\x64\x7c\xf0\xaf\xc2\x25\x0e\x40\x5a\x98\x3c\x0c\x1d\xa8\x48\xc0\
\x67\x9f\x25\xca\x16\x7f\xd6\x40\x69\x6a\x0f\xf6\x58\xaf\x08\x40\
\x61\x88\x20\xfb\x20\xf3\x53\x40\x5e\xdc\xfb\x59\x7a\x65\xa7\x40\
\x60\xa1\xe2\xe3\xc1\x23\x31\x40\x6b\xea\x34\x79\x8e\x30\xd7\x00\
\x00\x00\x10\x40\x38\x4c\xc8\xc0\x24\xc9\x42\xc0\x62\x97\x20\xa1\
\x03\x51\x04\x40\x3a\x05\x5c\x8e\xb1\x4a\x18\xc0\x58\xb0\x26\xad\
\xfe\xf0\x2a\xc0\x40\xc3\x2a\xae\x96\x4c\x84\xc0\x4e\x4e\xf7\x59\
\x92\xb9\xe9\x40\x50\x0a\x02\x23\xd7\x61\x93\xc0\x43\x9a\x74\x6b\
\xb2\x17\xa4\xc0\x3e\x83\xc3\xce\xab\xca\x54\x40\x45\x35\x58\x3d\
\x81\x3f\x26\x40\x32\x53\x6b\x16\x58\xd6\x2c\x40\x4a\x8f\x99\xdd\
\x2e\xc0\x07\xc0\x44\x7e\x8b\x1e\x3a\xf7\xb4\xc0\x60\x5d\xd7\xa2\
\x25\xe0\x7c\x40\x22\xa3\x03\xc5\xa9\xdf\x80\xc0\x63\x69\xe2\x6e\
\x09\x17\x9f\x40\x50\x78\x06\xe2\x9d\xee\x8d\xc0\x5b\x79\x5f\x8e\
\xab\xef\xf2\x40\x3c\x78\x20\xe1\x4b\xbe\x79\xc0\x62\xee\x0b\xdd\
\x5a\xe2\xd6\xc0\x46\xd1\x54\x35\x37\x7f\x10\x40\x50\x2c\x8b\x66\
\x27\xb0\x54\x40\x36\xbd\xcc\x5b\xc5\xe0\xc4\x40\x53\xeb\xb9\x5b\
\xee\x21\x36\xc0\x64\x7c\xf0\xb0\xde\x77\x49\x40\x5a\x98\x3c\x0c\
\x46\x3a\xd9\xc0\x67\x9f\x25\xca\x2d\x3d\x92\x40\x69\x6a\x0f\xf6\
\xb0\x9e\x4b\x40\x61\x88\x20\xfc\x0c\x68\x9b\x40\x5e\xdc\xfb\x59\
\xe2\x4e\xeb\x40\x60\xa1\xe2\xe3\xbe\xa1\x23\x40\x6b\xea\x34\x79\
\xb1\x00\x66\x00\x00\x00\x10\x40\x38\x4c\xc8\xbf\xee\x70\x14\xc0\
\x62\x97\x20\xa1\xc8\x92\x26\x40\x3a\x05\x5c\x96\xb9\x6c\xbd\xc0\
\x58\xb0\x26\xae\xd4\xfb\x26\xc0\x40\xc3\x2a\xad\x8a\x57\xfb\xc0\
\x4e\x4e\xf7\x60\x9d\x78\x70\x40\x50\x0a\x02\x23\x9f\x41\xd1\xc0\
\x43\x9a\x74\x67\xeb\x91\x0b\xc0\x3e\x83\xc3\xdb\x88\xea\x29\x40\
\x45\x35\x58\x43\x67\x7a\x47\x40\x32\x53\x6b\x0f\x95\x49\xb5\x40\
\x4a\x8f\x99\xd6\x94\x32\x19\xc0\x44\x7e\x8b\x20\x27\xcb\x9d\xc0\
\x60\x5d\xd7\xa3\xdd\xa0\x33\x40\x22\xa3\x03\xca\x36\x6d\xdb\xc0\
\x63\x69\xe2\x6e\x2c\x5c\x37\x40\x50\x78\x06\xe1\xd5\xc5\xef\xc0\
\x5b\x79\x5f\x8c\x89\xc7\x6e\x40\x3c\x78\x20\xe4\x23\x98\x6b\xc0\
\x62\xee\x0b\xdc\xf5\xd8\x14\xc0\x46\xd1\x54\x35\xae\x76\x39\x40\
\x50\x2c\x8b\x6d\x6d\xc6\x31\x40\x36\xbd\xcc\x6c\x2f\x5e\xd3\x40\
\x53\xeb\xb9\x57\xd0\x3e\xef\xc0\x64\x7c\xf0\xb2\x52\x9c\x24\x40\
\x5a\x98\x3c\x0c\x7b\x55\xad\xc0\x67\x9f\x25\xca\x4b\x01\x8e\x40\
\x69\x6a\x0f\xf7\x23\xb6\xf6\x40\x61\x88\x20\xfd\x40\x98\x5e\x40\
\x5e\xdc\xfb\x5a\x6a\x50\xcc\x40\x60\xa1\xe2\xe3\xbb\x58\xc4\x40\
\x6b\xea\x34\x79\xde\x90\x91\x00\x00\x00\x10\x40\x38\x4c\xc8\xbf\
\xdd\x05\x9e\xc0\x62\x97\x20\xa2\x07\xc7\xe2\x40\x3a\x05\x5c\x99\
\x4c\x59\xcf\xc0\x58\xb0\x26\xaf\x19\x92\x90\xc0\x40\xc3\x2a\xad\
\x34\x79\x79\xc0\x4e\x4e\xf7\x62\xdf\x2a\xa1\x40\x50\x0a\x02\x23\
\x8d\x45\xef\xc0\x43\x9a\x74\x66\xb5\xd7\xe9\xc0\x3e\x83\xc3\xdf\
\xa8\x34\xb2\x40\x45\x35\x58\x45\x4b\x6f\x42\x40\x32\x53\x6b\x0d\
\x6a\x68\xe4\x40\x4a\x8f\x99\xd4\x76\x73\x36\xc0\x44\x7e\x8b\x20\
\xc5\xb9\x45\xc0\x60\x5d\xd7\xa4\x6a\x8b\x7a\x40\x22\xa3\x03\xcb\
\xab\x9d\x74\xc0\x63\x69\xe2\x6e\x37\xa9\x75\x40\x50\x78\x06\xe1\
\x95\xa1\xdb\xc0\x5b\x79\x5f\x8b\xda\xc3\x2a\x40\x3c\x78\x20\xe5\
\x0c\xd5\xed\xc0\x62\xee\x0b\xdc\xd5\x77\x11\xc0\x46\xd1\x54\x35\
\xd4\x96\x1e\x40\x50\x2c\x8b\x6f\xc2\x7c\x13\x40\x36\xbd\xcc\x71\
\x71\xc0\xb2\x40\x53\xeb\xb9\x56\x7e\x86\x5a\xc0\x64\x7c\xf0\xb2\
\xc9\xdd\x72\x40\x5a\x98\x3c\x0c\x8c\x5a\x33\xc0\x67\x9f\x25\xca\
\x54\x8b\x69\x40\x69\x6a\x0f\xf7\x48\x99\x09\x40\x61\x88\x20\xfd\
\xa3\x5a\xc4\x40\x5e\xdc\xfb\x5a\x95\xe6\x4c\x40\x60\xa1\xe2\xe3\
\xba\x4b\x77\x40\x6b\xea\x34\x79\xed\x2a\x63\x00\x00\x00\x10\x40\
\x38\x4c\xc8\xbf\xa9\x3c\xf7\xc0\x62\x97\x20\xa2\xc3\xba\x2f\x40\
\x3a\x05\x5c\xa0\xf4\xb7\xf1\xc0\x58\xb0\x26\xaf\xe5\x96\xe4\xc0\
\x40\xc3\x2a\xac\x34\xe6\xf4\xc0\x4e\x4e\xf7\x69\x95\xca\xcd\x40\
\x50\x0a\x02\x23\x57\xdb\x46\xc0\x43\x9a\x74\x63\x1c\x4c\x49\xc0\
\x3e\x83\xc3\xeb\xeb\x58\x7a\x40\x45\x35\x58\x4a\xeb\x0b\xde\x40\
\x32\x53\x6b\x06\xf7\xe1\xda\x40\x4a\x8f\x99\xce\x2a\xed\x2d\xc0\
\x44\x7e\x8b\x22\x9b\x7b\xcb\xc0\x60\x5d\xd7\xa6\x0d\xc7\x6c\x40\
\x22\xa3\x03\xd0\x01\xd2\x5f\xc0\x63\x69\xe2\x6e\x59\x48\xae\x40\
\x50\x78\x06\xe0\xd6\xe9\xc6\xc0\x5b\x79\x5f\x89\xd1\xfd\x18\x40\
\x3c\x78\x20\xe7\xc2\xd5\xed\xc0\x62\xee\x0b\xdc\x75\x1f\x54\xc0\
\x46\xd1\x54\x36\x45\xdf\xf2\x40\x50\x2c\x8b\x76\xb1\x88\xd7\x40\
\x36\xbd\xcc\x81\x16\xe5\x42\x40\x53\xeb\xb9\x52\x91\xe6\x77\xc0\
\x64\x7c\xf0\xb4\x2c\x9e\xd1\x40\x5a\x98\x3c\x0c\xbe\x99\x62\xc0\
\x67\x9f\x25\xca\x70\xda\x68\x40\x69\x6a\x0f\xf7\xb6\x0f\x69\x40\
\x61\x88\x20\xfe\xc9\x33\xea\x40\x5e\xdc\xfb\x5b\x17\x3b\x26\x40\
\x60\xa1\xe2\xe3\xb7\x2f\x1f\x40\x6b\xea\x34\x7a\x18\x57\x7f\x00\
\x00\x00\x10\x40\x39\x3a\xfb\x80\x37\xf3\x20\xc0\x61\xfc\xcb\xa3\
\xcd\x26\xb3\x40\x36\xe1\xc0\x3b\x54\x6a\xc7\xc0\x57\x7d\x1f\x83\
\x86\xf3\x1c\xc0\x42\x8a\xc6\x53\xae\xe3\x90\xc0\x4c\x3d\xf4\x90\
\xee\x6c\x3f\x40\x4e\x2d\x2a\x66\xb7\xf4\x4a\xc0\x40\xfe\x98\xb5\
\x59\x0c\x29\xc0\x41\x9c\x26\x3c\x27\x7a\x60\x40\x47\x49\xd1\x47\
\xd2\xac\x49\x40\x2a\xff\x46\x2d\x2a\xf5\xdd\x40\x4c\xe9\x81\x90\
\xd0\x9c\x7c\xc0\x43\xec\x12\xf4\xe3\xc3\x79\xc0\x5f\xc9\x03\x0f\
\xff\x7b\x76\x40\x21\x4c\x2e\x53\x8d\xf5\xb6\xc0\x63\x5e\x70\x2e\
\xf1\x12\x4a\x40\x60\x35\xaf\x9d\x23\xb9\xb3\xc0\x47\xf5\xbd\xee\
\x88\x2b\x80\x40\x5f\x27\xde\x05\x0a\xe0\xcd\xc0\x59\xe5\xec\x76\
\x6e\x20\x9e\xc0\x49\x4b\x2f\xeb\x9c\x1a\x49\x40\x51\x2b\xee\x40\
\x71\xae\x98\x40\x31\x9e\x54\x8b\xa5\x0f\x9e\x40\x55\x1b\xb6\xcb\
\xa1\xcb\x36\xc0\x65\x42\x19\x74\x05\x9a\xf9\x40\x5a\xb0\xf1\x49\
\x6b\xdf\x29\xc0\x67\xab\x60\x4e\xa0\x19\x85\x40\x69\xa0\x71\xdc\
\x2e\x28\xe1\x40\x61\x1f\xc5\xf2\x50\xd7\x8a\x40\x5e\xb5\x77\xa5\
\x7e\x2d\xd8\x40\x60\xa2\xa4\x00\xc9\xca\x1b\x40\x6b\xdc\x49\x66\
\x6f\x08\x2d\x00\x00\x00\x10\x40\x39\x6d\xdd\x13\x57\xa1\xd5\xc0\
\x61\x88\xae\xf7\xb1\x07\xb9\x40\x3d\xcf\xf4\x8d\x6a\x17\x82\xc0\
\x56\x9d\xa9\x7a\xdd\x4a\xee\xc0\x3f\xd1\x0f\xe0\xc8\xca\x1b\xc0\
\x4b\xe5\xb8\x99\x8a\x88\x81\x40\x50\x1e\xc0\x45\x2f\x34\x10\xc0\
\x3c\xd5\xaf\x28\x25\x54\xfa\xc0\x41\x59\xfd\x10\xd6\x49\xbe\x40\
\x47\x9e\x2c\x2b\x96\x3e\x8e\x40\x2a\xe2\xd5\x8c\x54\xd6\x15\x40\
\x4e\x5c\xbb\xef\xb1\x92\x66\xc0\x43\xbf\xfb\xa6\x99\x4b\x21\xc0\
\x5f\x85\x8a\xa8\x9e\x7f\x25\x40\x20\xe3\xcd\x5f\xf2\x68\xe2\xc0\
\x63\x5a\x9f\x77\xe2\x62\x79\x40\x60\xd6\xe3\xac\xcc\x5f\x90\xc0\
\x34\xa5\x13\xa5\x48\xae\xc2\x40\x64\xe3\x80\x6b\x21\x27\x96\xc0\
\x50\x8d\x0b\x23\xa9\xc3\xe4\xc0\x49\x89\xcb\x5b\xbe\x67\xd3\x40\
\x51\x26\xf3\x40\xd2\x0d\xc1\x40\x30\x53\x3d\xec\x2b\xcb\xe3\x40\
\x55\xdf\xb6\x87\x83\x72\xc9\xc0\x65\x50\x90\x7a\x79\xec\x90\x40\
\x5a\xb3\x5d\xfd\x75\x1b\xbb\xc0\x67\xac\x23\x85\x4c\x3e\xc1\x40\
\x69\xa4\x50\xfd\xc1\x1a\xbe\x40\x61\x15\x08\x2c\x04\x80\x35\x40\
\x5e\xb2\x16\xd8\x69\xeb\x99\x40\x60\xa2\xb0\x72\x32\x30\x48\x40\
\x6b\xdb\x00\x4c\xb4\x72\x34\x00\x00\x00\x10\x40\x39\x79\xe6\xd8\
\x96\x75\x3a\xc0\x61\xc0\x16\x90\x87\x0e\xed\x40\x38\x8f\xec\xd9\
\xc7\x51\xb7\xc0\x57\x00\xb8\x99\x9b\x0c\x77\xc0\x42\x03\xf3\x87\
\x9c\x0b\x95\xc0\x4b\xc8\x06\x2f\x17\xcb\x6f\x40\x4e\x81\x5b\x7b\
\x07\x5f\xac\xc0\x3f\x6b\x1b\x49\x9f\xef\xa8\xc0\x41\xf5\x1a\xbf\
\x81\x76\xdc\x40\x47\xc1\xb0\x33\x3e\x15\x87\x40\x29\x35\xe9\x42\
\xe0\x6f\x26\x40\x4d\xcb\x37\x6d\xa6\xc4\xbd\xc0\x43\xc2\xd2\x31\
\x5b\xf1\xae\xc0\x5f\x8b\xf9\x65\x4b\xb3\xb9\x40\x20\xf2\x38\xea\
\x61\xb5\x68\xc0\x63\x5b\x25\xb1\x29\x73\x44\x40\x60\x74\xb0\xf1\
\xaf\x14\x87\xc0\x3c\x2b\x5b\x9f\xd5\xcc\x3a\x40\x5f\xd1\xff\x1a\
\xb3\xb4\x69\xc0\x54\xf9\x9f\x72\x33\xf9\x81\xc0\x49\xd3\xfa\x57\
\xad\xf2\x84\x40\x51\x56\xe6\x30\xad\x36\xf7\x40\x30\x45\x7a\x8e\
\xd7\x55\xf9\x40\x55\x90\xcf\xcc\x04\x2a\x2d\xc0\x65\x6a\x17\x30\
\x58\x3c\xd3\x40\x5a\xb7\x3e\x31\xfb\xc0\xbc\xc0\x67\xad\x5a\x2e\
\xcc\x97\xbb\x40\x69\xaa\xd2\x12\x76\xcf\xc6\x40\x61\x08\x27\xfd\
\xad\x8d\xa9\x40\x5e\xad\xed\x6b\x24\xb9\x5c\x40\x60\xa2\xbe\x9a\
\x9d\xff\x01\x40\x6b\xd9\x5b\x1b\x78\x99\x99\x00\x00\x00\x10\x40\
\x3a\x1c\xba\xdc\xfa\x1a\x5d\xc0\x60\xf5\xc8\x41\xf4\x13\xe6\x40\
\x41\xfa\x03\xbb\xf4\x1d\x4a\xc0\x55\xaa\x26\xe9\xec\x4c\xf0\xc0\
\x3b\x21\x4c\xc6\x54\x3f\x5c\xc0\x4b\x5b\x50\x0c\x7c\x8f\x98\x40\
\x50\xf9\x7f\xcb\x19\x0b\xae\xc0\x37\x93\x4b\x4f\x3b\xf3\x58\xc0\
\x41\x41\x68\xf8\x0c\x13\x54\x40\x48\x0c\x99\x85\x2c\x7f\x15\x40\
\x2a\x01\x64\x6f\x7e\xaa\x70\x40\x4f\xd5\xa4\x26\x6b\xe0\x61\xc0\
\x43\x74\x8b\x1b\xb1\x98\xf2\xc0\x5f\x1b\xe4\x8e\x02\x7c\x6d\x40\
\x20\x42\xe5\x35\xba\xbe\x4c\xc0\x63\x54\x64\xe8\xa8\x03\x5e\x40\
\x61\x53\x48\x12\x1c\xc4\xf0\xc0\x38\x89\xd7\xe7\xcb\xc4\x43\x40\
\x65\x76\x6e\x3f\xaf\x20\x22\xc0\x51\x65\xa7\xda\xaa\x26\x75\xc0\
\x49\xe6\x46\x82\xd0\x24\xb9\x40\x51\x2f\x67\xdc\xf2\x10\xf5\x40\
\x2d\x70\x6e\xe1\xf3\xcb\xfa\x40\x56\xa2\x91\x0e\xe0\x3f\x9d\xc0\
\x65\x68\x72\x72\x08\xfb\x25\x40\x5a\xb7\x12\xd7\xa9\xc0\x26\xc0\
\x67\xad\x49\xf8\x95\x22\x31\x40\x69\xaa\x71\xed\xc5\x2f\x89\x40\
\x60\xfd\xd6\x05\x90\x17\x3d\x40\x5e\xab\x02\x86\x36\xeb\xc5\x40\
\x60\xa2\xc7\x56\x35\x46\xe9\x40\x6b\xd8\x35\x4f\xe4\xc4\x52\x00\
\x00\x00\x10\x40\x3a\x22\xa0\xc0\xc8\x8c\x68\xc0\x61\x7a\xe1\x71\
\x57\xe2\xd0\x40\x37\x88\x57\x2b\xb4\x27\x65\xc0\x56\x7a\x19\x92\
\x2d\x24\x6c\xc0\x42\xaa\x2f\xed\x9b\x21\x34\xc0\x4a\xf4\xa5\x85\
\xef\xfb\x8d\x40\x4d\xc3\xc6\xac\x45\x1a\x79\xc0\x3d\x0b\xa3\xc1\
\xb3\x70\xbf\xc0\x42\xfd\x76\x03\x11\x38\x1a\x40\x48\x95\x81\x53\
\xec\x54\xe6\x40\x24\xe6\xbc\x4c\xf9\xb4\xf7\x40\x4e\xcc\x4f\xe2\
\x80\x6a\x4e\xc0\x43\x7b\x0d\x6e\x7d\x0a\xc0\xc0\x5f\x26\x9b\xe0\
\x90\x2d\x70\x40\x20\x57\xdc\x86\xe0\xd3\xec\xc0\x63\x55\x29\x47\
\x39\xb3\x08\x40\x60\x47\xa0\x6e\xe2\x7b\xe5\xc0\x3d\x8e\x03\x79\
\xb3\xc5\xd8\x40\x59\xf4\xd9\x6e\xa3\x7d\xca\xc0\x53\xb4\xae\x8b\
\xb8\xe6\x2d\xc0\x4a\xf0\xb2\x63\x73\x25\x8f\x40\x51\xb9\x89\xae\
\x4e\xeb\xcd\x40\x2b\xd9\x2c\xa9\xd0\xb1\x70\x40\x56\x13\x03\x84\
\xe7\x17\x50\xc0\x65\xbe\x60\xdc\xb9\x87\x4b\x40\x5a\xc4\x9a\x9e\
\x26\x0d\xb2\xc0\x67\xb0\xd3\x88\x9b\xee\x09\x40\x69\xbf\x4a\x43\
\x04\x51\xc2\x40\x60\xd2\x70\x29\x96\x40\xc4\x40\x5e\x9e\x5d\xed\
\x9c\x03\x41\x40\x60\xa2\xe4\x5f\xbf\xef\x51\x40\x6b\xd2\xd4\x01\
\xf5\xda\x8f\x00\x00\x00\x10\x40\x39\x93\x07\x18\x7c\x00\x57\xc0\
\x61\xed\x38\x48\x04\xf6\x05\x40\x39\xce\xd4\x3d\x16\x4f\xe1\xc0\
\x57\x5a\x79\x80\xd1\xee\x90\xc0\x41\x5a\xb0\x5b\xd9\x84\xa7\xc0\
\x4c\x6b\x4b\xa6\xc5\x05\x67\x40\x4f\x31\x11\x79\x69\xca\xc4\xc0\
\x40\x72\xd8\x87\x3f\x4a\xdb\xc0\x41\x30\x33\x06\x0e\xbd\x6d\x40\
\x47\x1e\x92\xf6\x5f\x87\x9d\x40\x2c\x56\xb7\x92\x4c\x0f\x54\x40\
\x4d\x1a\xcc\x86\x22\xf3\x62\xc0\x43\xed\x68\x86\x6d\xf1\xf7\xc0\
\x5f\xd7\x21\x49\x55\x4f\x8d\x40\x21\x78\xc7\xca\xdd\xae\x33\xc0\
\x63\x60\x08\x68\x37\x94\x7d\x40\x55\x61\x16\x46\xec\xb7\xbc\xc0\
\x58\xed\xdd\x9f\xdd\x31\xf6\x40\x4d\x34\xd5\x5d\xce\x73\x18\xc0\
\x62\x93\x74\x7f\x87\x8c\xe0\xc0\x49\x09\x16\x63\x43\xbe\xc4\x40\
\x51\x07\x88\xc1\x11\x6b\x6e\x40\x31\xe4\xaf\x64\x08\xd2\xab\x40\
\x55\x38\x17\x72\x80\xb5\xac\xc0\x65\x2c\x52\x57\x7f\x38\xd6\x40\
\x5a\xad\x44\x85\x31\xfe\xd5\xc0\x67\xaa\x28\xdf\x87\x91\xd0\x40\
\x69\x9a\x80\x20\x4d\x49\x84\x40\x61\x2c\x4a\x92\x0c\x64\x0a\x40\
\x5e\xba\x92\xb8\xd5\x32\x10\x40\x60\xa2\x8e\x79\x47\xf5\xf0\x40\
\x6b\xde\x52\x7b\xcd\xff\x76\x00\x00\x00\x10\x40\x39\xa0\x87\x98\
\xb8\xe0\xec\xc0\x62\x2e\x9b\xf4\x0c\x9a\x1f\x40\x38\x3b\x99\x1e\
\x7f\x21\x57\xc0\x57\xde\x76\x78\x0a\x75\x02\xc0\x41\xdb\xc9\xce\
\x7f\xdb\x4c\xc0\x4c\xfd\x56\x17\x6b\xc4\x02\x40\x4e\xdd\x63\x67\
\xe8\x83\x36\xc0\x41\xc3\x56\x92\x55\x6f\xcc\xc0\x40\xe7\x7e\x05\
\xfc\x43\x82\x40\x46\x8a\x57\x00\x41\xd6\xd1\x40\x2d\xd4\x62\x54\
\xe0\x04\x59\x40\x4c\x27\x56\xc2\xcd\x8b\x08\xc0\x42\x5a\xf0\x4d\
\x5a\xcb\xe6\xc0\x60\x15\xda\x3c\x46\xaf\x80\x40\x2e\xe6\x21\xc4\
\x8c\x1a\x04\xc0\x62\x99\x50\xe9\xf4\x73\xa0\x40\x50\x8a\xee\x4b\
\xfe\x05\x2c\xc0\x5a\x86\x37\xad\x7c\x15\x19\x40\x3e\x0c\x8f\x47\
\x89\x42\x31\xc0\x62\x98\x5b\xa9\x88\xc3\x3a\xc0\x48\x95\x4f\x39\
\x7f\x7d\xf0\x40\x50\xcc\x9d\xf9\xdf\xe9\xd6\x40\x33\x0b\xd4\x0c\
\x5e\xaf\x8e\x40\x54\xba\x84\x35\x41\xd8\x1e\xc0\x65\x06\xae\x6d\
\x56\x5a\x3b\x40\x5a\xa7\x7d\x7e\x97\xc5\xb4\xc0\x67\xa7\xfe\xa3\
\xda\x60\x69\x40\x69\x90\x32\x32\xe0\xf7\x43\x40\x61\x3b\xaa\x06\
\x78\xb7\xc1\x40\x5e\xc0\x47\x72\x5e\x23\xb6\x40\x60\xa2\x75\x46\
\x84\x56\xc3\x40\x6b\xe0\x78\xd5\xcc\x8c\x07\
\x00\x00\x09\x2c\
\xff\
\xff\xff\xff\x00\x00\x00\x09\x00\x00\x00\x10\xc0\x36\xc1\x31\xf7\
\x71\x07\xcd\xc0\x62\xc4\x31\x65\x15\x96\x34\xc0\x22\x59\x75\x1b\
\x5e\x3f\xf0\xc0\x59\x80\x9a\x15\x13\xe9\x09\xc0\x47\x52\xc9\x1c\
\x2f\x1a\x7a\xc0\x45\x06\x29\x2a\x67\x31\x79\x40\x49\x64\xe4\x36\
\x8c\xfe\x06\xc0\x50\x22\x71\x64\xe5\x3f\xcb\x3f\xc9\x7e\x46\xc2\
\x82\x74\x41\x40\x48\xe2\x7c\x67\xa7\x02\xf2\x40\x48\x75\xb8\xda\
\x6c\xe4\xac\x40\x43\x43\x8c\xb1\x63\x97\xf6\xc0\x59\x94\x32\xd7\
\x73\x76\xf3\x3f\xf8\xd1\x11\x6b\x2c\x82\x25\xc0\x5f\x82\xbf\x09\
\xb5\x16\xad\x40\x4a\x15\x6d\x1b\x87\x1c\x19\x40\x55\x32\x73\xaa\
\x94\xec\x77\xc0\x04\x4e\x49\x68\xe6\x95\xd3\x40\x60\xe5\xde\x62\
\x15\x6c\x95\xc0\x3a\xbb\x5f\xa9\x86\x06\x7e\xc0\x0f\x68\xca\xdb\
\xdc\xdd\x36\x40\x53\x18\x3a\x74\x13\x56\x51\x40\x50\x12\x0e\xb5\
\x6f\x61\x5c\x40\x4e\x52\x58\x0d\xd3\x0c\x6d\xc0\x38\xbe\x69\x34\
\x34\x0d\xd7\x40\x69\x01\x3e\xaa\x3a\x4e\xf3\xc0\x3d\xff\xa9\xa5\
\x4e\x81\xa7\x40\x72\xc0\x69\xcc\x0a\xb2\x0e\x40\x3f\xa2\xaf\xee\
\xb2\x7a\xa3\x40\x66\xb7\x06\x0c\x87\xf6\x2d\x40\x3d\xae\x9a\x40\
\x3c\x67\x56\x40\x71\x9d\x34\xa4\xbb\x47\x7d\x00\x00\x00\x10\xc0\
\x30\xaf\xc1\x69\x97\x5a\x95\xc0\x63\x49\x63\x4e\x8e\x9f\xdc\xc0\
\x1e\xaf\xb7\x04\x39\xf0\x0c\xc0\x5a\x46\x95\xdc\xfb\x24\x8e\xc0\
\x49\x9c\xd2\x5c\x93\x47\x4c\xc0\x48\xb5\xcf\xe8\x99\x9c\xdd\x40\
\x48\x02\x8f\xa5\xd6\xdc\x31\xc0\x4e\xc7\x1a\x4e\x1b\x4a\x1c\xc0\
\x2c\x94\xbf\x13\x77\x29\x41\x40\x47\x68\xf0\xdd\xb4\xba\x6a\x40\
\x41\xac\x5d\x06\x4f\x13\x18\x40\x44\x62\x29\x5f\x79\x2c\x69\xc0\
\x58\x78\xf9\x10\x42\x39\x8c\x40\x0d\x9e\x34\x27\x91\xbd\x70\xc0\
\x63\x03\xb5\x2e\x0b\xf6\xb5\x40\x31\x49\xc5\x70\x34\x57\xdc\x40\
\x55\x55\x0b\xb2\x96\x65\xb2\xbf\xf7\xd0\xc3\xa3\x37\x60\x82\x40\
\x60\xe6\x74\x90\x7c\xd3\xd5\xc0\x3a\xbd\xb7\xf4\x49\xd6\x6d\xc0\
\x35\x33\x14\x59\x01\x4b\xa9\x40\x52\x36\xaa\xf8\xcd\xd9\x51\x40\
\x48\x24\x95\xbf\x77\x92\xaf\x40\x50\x19\x5d\x72\x39\xa4\xee\xc0\
\x38\x3c\x43\x48\x09\x4c\x70\x40\x68\xc7\x21\x6e\x01\xfb\xd3\xc0\
\x3d\xe6\x09\x44\x84\xd2\x4f\x40\x72\xa2\xff\xdd\x04\x92\x32\x40\
\x3d\x30\xda\x3a\xb4\x2e\xfe\x40\x67\x8a\xdf\x98\x83\xb8\xf6\x40\
\x3d\xa4\x89\x7f\x23\xcd\xd5\x40\x72\x07\x6b\x4d\x22\x90\x7e\x00\
\x00\x00\x10\x3f\xe8\xdc\xd8\x6c\x4e\xe2\x7a\xc0\x63\x6c\xe1\x17\
\xf2\xa0\xbf\xc0\x1c\xa0\x1f\x76\xa3\x67\x0e\xc0\x5a\x82\x4b\xa2\
\xc3\x07\x44\xc0\x4e\x6a\xa4\x92\xae\xdd\x89\xc0\x4e\x00\xb1\x06\
\xb8\xfd\xe4\x40\x43\x6f\xe2\x4f\x4e\xd0\x7e\xc0\x4a\x2d\xf6\xac\
\x0f\xbe\x52\xc0\x45\xc6\xbd\x2e\xcf\xf5\x06\x40\x44\xce\x84\x52\
\x4a\x5a\x37\x40\x19\x34\x31\x96\xfb\x44\x7f\x40\x46\xb7\xe1\xb0\
\xec\xbd\xa9\xc0\x58\xfd\xf7\xfd\x56\xdc\x11\xbf\xf1\xca\x67\x3c\
\x06\x01\x31\xc0\x63\x1e\x02\x33\x3c\x22\x49\xc0\x32\xfa\x9d\x13\
\x66\xaf\x41\x40\x54\x9e\xa9\xbe\x7c\x61\xe3\x40\x0a\x72\xb1\x55\
\x98\x28\x33\x40\x61\x32\xb3\x01\xb0\xbc\x2d\xc0\x18\x63\x1b\xa0\
\x7e\x49\xee\xc0\x4b\xb7\xac\x9e\xf7\xef\xea\x40\x50\x71\xa3\x37\
\xe4\x2e\xa7\x40\x2c\xb8\x60\xb8\xfe\xc6\xc2\x40\x51\xc8\x31\x75\
\xbd\x97\x6c\xc0\x3b\x25\x5d\xe3\x55\xb6\xc3\x40\x67\x7e\x28\x3a\
\x2f\xcf\x04\xc0\x3d\x79\xfb\x74\x2a\xf4\x86\x40\x72\x00\xa4\xae\
\xf2\x81\x95\x40\x3c\xfa\x8b\x66\xc9\x22\x30\x40\x68\x75\x84\x1e\
\x7d\x6d\xd4\x40\x3d\xb5\xf8\x8b\xe7\x75\x6d\x40\x72\x7c\xb6\xc6\
\x9a\xe9\x9a\x00\x00\x00\x10\x3f\xe4\x9d\xa9\x42\x5e\xe7\x43\xc0\
\x63\x56\x3a\x89\xf9\xf5\x03\xc0\x1b\xb9\xb9\x7a\xc0\xda\x37\xc0\
\x5a\x51\x6d\x68\xd6\x38\x91\xc0\x4f\xf0\xb8\xf8\x76\x60\x54\xc0\
\x4f\xad\xc6\x15\x51\x43\xe4\x40\x41\x7d\xdd\x1f\xa6\x7e\x97\xc0\
\x48\x29\x8c\x8c\x88\xc5\xd5\xc0\x4b\x19\x1c\xf8\xef\x04\xe9\x40\
\x43\xa1\x52\x38\x86\xcc\xe1\xc0\x13\x11\x86\xaf\xfa\x18\xd1\x40\
\x47\x64\x45\xcd\x84\x52\xf0\xc0\x5d\x6b\xc7\x4d\xed\xcd\x82\xc0\
\x31\x79\xa5\x95\x16\x3c\x6c\xc0\x64\x78\xba\xa8\x85\x58\x74\xc0\
\x48\x8e\x36\x95\x37\x4b\xa5\x40\x54\xcc\x34\x76\x69\xe8\x4e\x40\
\x0b\x3e\xdf\x5f\x9e\x8d\xa3\x40\x60\x65\x54\x53\xc4\x38\x71\x40\
\x40\x0c\xb3\x84\x9f\xd2\x9c\xc0\x50\xf6\x2c\x1c\x95\xbd\x9f\x40\
\x4f\x3b\xb0\x51\x91\x96\x07\x3f\xf5\xb6\xbb\x4c\x89\xfb\xa7\x40\
\x52\x40\x4d\xcb\x0f\x40\xae\xc0\x3e\x45\x9a\x76\x56\xfb\xac\x40\
\x66\xc3\x4b\x9c\xe2\xcb\x0b\xc0\x3d\x73\xb5\x3b\x21\x32\xb6\x40\
\x71\xa3\x97\xbc\x79\xdf\x84\x40\x3c\xd5\xf8\x27\x62\x64\x9d\x40\
\x68\x6b\x6d\x0c\x69\x1b\x7e\x40\x3d\xb5\x37\x81\x76\xb1\xee\x40\
\x72\x77\xa6\xa6\x21\xce\x5f\x00\x00\x00\x10\xbf\xe3\x54\xf9\xd1\
\xc3\xbb\x96\xc0\x62\x39\x5d\x48\x40\x62\x61\xc0\x13\xa4\x40\xfe\
\x23\x41\x66\xc0\x57\xfe\x9e\x7a\x5a\x8c\x65\xc0\x51\x53\x87\x2f\
\x98\x9f\x2f\xc0\x50\xb1\x13\x00\xa7\xbf\x57\x40\x38\x4d\x1d\xa7\
\x39\x9a\x03\xc0\x3f\x95\x6d\x6a\x72\xf6\x4d\xc0\x54\x45\x8b\x2f\
\x57\x8e\x27\x40\x41\xd1\x53\x04\xab\x6e\xf7\xc0\x41\x24\x47\xc5\
\x1a\x30\x9b\x40\x4a\x9d\x0a\xaa\x84\xfc\x0d\xc0\x61\x26\xda\xae\
\x5a\xbe\x0d\xc0\x47\x87\xad\x31\xe6\x56\x49\xc0\x65\xf1\xab\xa2\
\x9d\x71\x7a\xc0\x55\xef\x9f\x23\x26\xbf\xfd\x40\x44\x02\xa4\x34\
\x49\x99\xb4\x40\x42\xad\xc5\x39\xfc\xc3\xc9\x40\x54\x77\x1a\x63\
\x55\xdd\x55\x3f\xd2\x3b\x7b\x22\x6a\xd6\x1f\xc0\x58\xcf\x8d\xc1\
\xb5\x07\x54\x40\x4b\xc2\x62\x63\xab\x19\x62\xc0\x40\xdb\xfa\x46\
\x22\x87\x89\x40\x54\x09\x64\xf2\x9a\x80\x2a\xc0\x44\xeb\x29\x9a\
\x2f\x94\x49\x40\x64\xe0\x36\x35\x4c\x3c\x43\xc0\x3e\x7e\x4a\x3b\
\x7b\x99\xfd\x40\x70\xa7\xca\x20\xaa\x48\xd9\x40\x39\x90\x46\xa5\
\x32\x46\xfc\x40\x67\xd4\xc0\x6c\x37\x15\xaa\x40\x3d\x93\xe0\x30\
\xd5\xa0\xe6\x40\x72\x2b\x16\x2e\xda\x91\xd2\x00\x00\x00\x10\xbf\
\xd7\x9d\xe7\xa1\x97\x39\xc0\xc0\x62\xbc\xd5\xa4\x38\x2b\x33\xc0\
\x2b\x24\x07\xa2\xf7\xbc\x3a\xc0\x59\x6b\x39\x1d\x48\x8d\x2d\xc0\
\x50\xfc\x22\x5d\x98\xd8\xe0\xc0\x4c\x3b\x80\x46\xaf\xdc\x40\x40\
\x3f\xa3\xe1\x10\x37\xbc\x7c\xc0\x47\xa7\x33\xb0\x1d\xd6\x38\xc0\
\x4a\x1a\x00\x9f\xcb\xe1\x48\x40\x46\xb3\xbc\xc6\xce\x1c\xfe\xc0\
\x03\x4e\xc7\x08\x76\xc7\x1c\x40\x48\xfd\xf5\x33\x7e\x4b\x81\xc0\
\x60\x62\xd0\x37\xd0\x3e\x25\xc0\x38\xa5\xfa\x53\xfc\x7a\x7d\xc0\
\x64\x52\x8c\x8e\xde\xf0\x6d\xc0\x51\xb5\x7b\xf0\xe3\x85\x21\x40\
\x52\x61\x8e\x3b\xeb\xd1\xde\x40\x23\x54\x72\x4d\x7e\x66\xb6\x40\
\x5a\xc6\x30\x18\x78\x65\x5e\xc0\x41\x84\x35\xad\x81\xc8\x84\xc0\
\x50\x1c\xe1\x3c\xef\x41\x96\x40\x51\x58\x7f\x41\x27\xa7\x22\x40\
\x15\x04\xa4\x11\x8b\xa5\x4c\x40\x52\xf2\xe2\x10\x1d\xbe\x94\xc0\
\x3a\xad\xdb\xf6\xe0\xcd\x29\x40\x67\x9e\xa8\x6e\xa4\x7f\x43\xc0\
\x3d\xa4\xca\x4b\x1f\x0f\x46\x40\x72\x10\xa0\x16\xce\x0d\xd1\x40\
\x3d\xed\xb9\x7b\x1d\xf5\x8a\x40\x68\xd7\xc6\xd6\xaf\x31\x50\x40\
\x3d\xb4\xa6\x32\xb3\x72\x09\x40\x72\xad\xe2\x15\x3d\xe8\xd8\x00\
\x00\x00\x10\xc0\x34\xf7\x01\x4a\xe8\x31\x37\xc0\x63\x08\xe7\x3c\
\xd7\x52\x6e\xc0\x32\x87\x01\x16\xce\x22\x54\xc0\x59\x95\x8a\xc3\
\xc9\x1c\xb8\xc0\x51\x35\xc9\xdc\xb0\xdf\xb7\xc0\x4a\x53\xba\xaa\
\x59\xe9\x6d\x40\x3f\x27\xe3\x21\x77\x13\xbc\xc0\x4a\x03\x47\xa6\
\x3c\x27\xd1\xc0\x46\x3c\x28\x27\x7d\x72\x7b\x40\x47\xbf\xcc\xb2\
\x9b\x75\xf7\x40\x16\x1c\xbe\x7d\x45\x21\x89\x40\x47\xe9\x2b\xf5\
\x69\xa9\x7a\xc0\x5e\xfa\xe4\x52\xe2\xee\x3d\xc0\x20\x9f\xc3\x12\
\xc5\x03\x2a\xc0\x63\x2d\x67\xd4\x68\xd9\xd5\xc0\x4b\xe6\x04\xf7\
\xc6\xd3\xb2\x40\x56\x9a\x17\x7f\xf0\x8a\x61\xc0\x2a\xde\xdc\xce\
\x99\x86\xc2\x40\x5d\x35\xe2\x20\x89\xba\x10\xc0\x4f\x58\x95\x85\
\x01\x84\xd4\xc0\x4b\x51\x42\xfa\x2b\xa5\xef\x40\x52\x1b\x84\x34\
\xad\x36\xdf\x40\x2e\xba\x76\xa5\xb1\xb8\x45\x40\x52\x39\x02\x49\
\xaa\x30\x3e\xc0\x37\xb9\x6f\x87\xfd\xd0\x77\x40\x68\x3e\xb5\xb9\
\xe8\xcd\x8a\xc0\x3d\xdf\x2f\x40\x7a\xc8\xd0\x40\x72\x5e\x54\xd0\
\xa5\x34\x44\x40\x3d\x8f\xfd\xe1\x90\x52\x8a\x40\x68\xaf\x88\xff\
\xd8\xfc\x30\x40\x3d\xb4\x96\xe1\xc9\x47\x14\x40\x72\x99\xc3\xc3\
\x16\xad\x07\x00\x00\x00\x10\xc0\x44\x1f\xc5\xb0\x8d\xff\xf8\xc0\
\x62\xd7\xcc\xdb\x51\x37\xe9\xc0\x38\x70\x1f\x26\x3f\xdd\xd1\xc0\
\x59\xd3\xca\x12\xd1\x6d\x66\xc0\x50\x9b\x5f\xdf\x54\x98\x82\xc0\
\x47\x35\x38\xde\x3e\x59\xd1\x40\x40\x3a\x4b\xde\xb1\x11\x5e\xc0\
\x4e\xa8\xe3\x73\xc2\x1e\x04\xc0\x3a\xcd\xa4\x3c\x9c\xa3\x4a\x40\
\x48\x5e\xe9\xc8\x41\x4a\x06\x40\x36\xa3\x67\x5c\xdf\x00\x72\x40\
\x44\xa5\x11\xd4\x30\xbb\xbe\xc0\x58\xc8\x06\xa1\x52\x29\xa4\x40\
\x30\x47\xe1\xaa\x31\x64\x3e\xc0\x63\x5d\xb2\xe9\xcc\x7a\xe2\x40\
\x29\xe9\x00\x97\x5b\x70\xaa\x40\x58\x50\x0c\x6f\xef\x08\x06\xc0\
\x40\x80\xf5\x5f\x55\x35\x51\x40\x60\xea\x26\xcc\x19\xc3\x62\xc0\
\x52\x7c\x87\x94\x54\x11\xcd\xc0\x40\x7b\x99\x9a\x58\x7d\x4e\x40\
\x52\xbc\xf9\x47\x74\x9c\xd3\x40\x42\x20\x54\x4b\xe3\x87\xa0\x40\
\x50\x21\x47\x81\x50\x58\x25\xc0\x35\xd8\x1b\x0b\x45\x8c\xdd\x40\
\x68\xfb\x74\x42\x1b\x4c\x50\xc0\x3e\x4d\x9f\x09\xae\x3a\xcb\x40\
\x72\xb9\xff\x8f\xad\x92\x5a\x40\x3d\x6b\xf8\x3a\xfe\x5b\xcc\x40\
\x67\xb7\x76\x0a\x83\x1d\x2b\x40\x3d\xaf\x2e\x02\x24\x00\xa5\x40\
\x72\x1d\xb9\x4a\x83\x5a\x36\x00\x00\x00\x10\xc0\x56\x21\x7b\x1a\
\x2d\x27\x74\xc0\x5d\x7f\x81\xf7\x72\x97\xeb\xc0\x45\x6a\xce\x4b\
\x4a\x59\x0e\xc0\x58\x6b\x98\xca\x1a\xce\x5b\xc0\x50\x79\x21\x8d\
\x02\xed\x0a\xc0\x3e\xd5\xf2\x5c\x01\x40\xa0\x40\x38\x02\xd4\x2c\
\x79\x7b\xb7\xc0\x52\xa7\xde\x66\xb0\xaa\x42\x3f\xd7\xb5\xe2\x7b\
\x89\xa4\xca\x40\x48\x0f\x83\x05\x1a\x5b\x33\x40\x46\xa9\x42\x70\
\x82\xf2\xfc\x40\x3a\x3a\x42\xb9\x85\x37\x29\xc0\x54\xe2\xc5\x27\
\x1d\x51\x6a\x40\x42\xd1\x7b\x2a\xa1\x08\x15\xc0\x61\x0b\x0a\x8b\
\x23\x7a\xc0\x40\x33\x47\x9a\x49\x82\xe6\x81\x40\x57\xac\xeb\x9b\
\x79\x30\xfa\xc0\x53\x13\x15\xe5\x3e\x1d\x05\x40\x60\x43\xab\x51\
\xbf\x87\xef\xc0\x5d\xe3\x5c\x69\xf3\x2d\x89\x40\x02\x9d\x80\x74\
\x60\xb9\xea\x40\x52\xbe\x74\x1e\x0d\x17\x2d\x40\x50\x50\x9b\xe2\
\xa5\xba\x4e\x40\x46\x29\x92\xd8\xeb\x51\xb2\xc0\x32\x67\x07\xb9\
\x5f\xcb\x99\x40\x68\xd4\xcc\x9f\x63\x2f\x21\xc0\x3e\x08\x79\x67\
\x07\x43\x5b\x40\x72\xa1\x8d\xaf\x2d\x86\xe8\x40\x43\x38\x54\x39\
\x89\x5f\xf6\x40\x64\xda\x52\xe8\xbe\xbd\x10\x40\x3e\xaa\x65\xda\
\xb3\xd9\xcf\x40\x70\xaa\x52\xf4\x99\xff\x0c\
"
qt_resource_name = b"\
\x00\x0a\
\x03\x88\x10\x53\
\x00\x61\
\x00\x6e\x00\x69\x00\x6d\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x73\
\x00\x07\
\x01\xc4\x70\xa7\
\x00\x6a\
\x00\x75\x00\x6d\x00\x70\x00\x69\x00\x6e\x00\x67\
\x00\x04\
\x00\x06\xab\x74\
\x00\x64\
\x00\x65\x00\x61\x00\x64\
\x00\x08\
\x0f\x03\x25\x67\
\x00\x63\
\x00\x68\x00\x69\x00\x6c\x00\x6c\x00\x69\x00\x6e\x00\x67\
\x00\x07\
\x0a\x84\xa0\x87\
\x00\x64\
\x00\x61\x00\x6e\x00\x63\x00\x69\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\
\x00\x00\x00\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x05\x20\
\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x52\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xa0\
\x00\x00\x00\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x06\x30\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| gpl-2.0 | 656,845,548,716,472,400 | 61.285714 | 96 | 0.726322 | false |
AICP/external_chromium_org | tools/cr/cr/commands/init.py | 59 | 5608 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the init command."""
import os
import cr
# The set of variables to store in the per output configuration.
OUT_CONFIG_VARS = [
'CR_VERSION',
cr.Platform.SELECTOR, cr.BuildType.SELECTOR, cr.Arch.SELECTOR,
'CR_OUT_BASE', 'CR_OUT_FULL',
]
class InitCommand(cr.Command):
"""The implementation of the init command.
The init command builds or updates an output directory.
It then uses the Prepare and Select commands to get that directory
ready to use.
"""
def __init__(self):
super(InitCommand, self).__init__()
self.requires_build_dir = False
self.help = 'Create and configure an output directory'
self.description = ("""
If the .cr directory is not present, build it and add
the specified configuration.
If the file already exists, update the configuration with any
additional settings.
""")
self._settings = []
def AddArguments(self, subparsers):
"""Overridden from cr.Command."""
parser = super(InitCommand, self).AddArguments(subparsers)
cr.Platform.AddArguments(parser)
cr.BuildType.AddArguments(parser)
cr.Arch.AddArguments(parser)
cr.SelectCommand.AddPrepareArguments(parser)
parser.add_argument(
'-s', '--set', dest='_settings', metavar='settings',
action='append',
help='Configuration overrides.'
)
return parser
def EarlyArgProcessing(self):
base_settings = getattr(cr.context.args, '_settings', None)
if base_settings:
self._settings.extend(base_settings)
# Do not call super early processing, we do not want to apply
# the output arg...
out = cr.base.client.GetOutArgument()
if out:
# Output directory is fully specified
# We need to deduce other settings from it's name
base, buildtype = os.path.split(out)
if not (base and buildtype):
print 'Specified output directory must be two levels'
exit(1)
if not cr.BuildType.FindPlugin(buildtype):
print 'Specified build type', buildtype, 'is not valid'
print 'Must be one of', ','.join(p.name for p in cr.BuildType.Plugins())
exit(1)
if (cr.context.args.CR_BUILDTYPE and
cr.context.args.CR_BUILDTYPE != buildtype):
print 'If --type and --out are both specified, they must match'
print 'Got', cr.context.args.CR_BUILDTYPE, 'and', buildtype
exit(1)
platform = cr.context.args.CR_PLATFORM
if not platform:
# Try to guess platform based on output name
platforms = [p.name for p in cr.Platform.AllPlugins()]
matches = [p for p in platforms if p in base]
if len(matches) != 1:
print 'Platform is not set, and could not be guessed from', base
print 'Should be one of', ','.join(platforms)
if len(matches) > 1:
print 'Matched all of', ','.join(matches)
exit(1)
platform = matches[0]
cr.context.derived.Set(
CR_OUT_FULL=out,
CR_OUT_BASE=base,
CR_PLATFORM=platform,
CR_BUILDTYPE=buildtype,
)
if not 'CR_OUT_BASE' in cr.context:
cr.context.derived['CR_OUT_BASE'] = 'out_{CR_PLATFORM}'
if not 'CR_OUT_FULL' in cr.context:
cr.context.derived['CR_OUT_FULL'] = os.path.join(
'{CR_OUT_BASE}', '{CR_BUILDTYPE}')
def Run(self):
"""Overridden from cr.Command."""
src_path = cr.context.Get('CR_SRC')
if not os.path.isdir(src_path):
print cr.context.Substitute('Path {CR_SRC} is not a valid client')
exit(1)
# Ensure we have an output directory override ready to fill in
# This will only be missing if we are creating a brand new output
# directory
build_package = cr.auto.build
# Collect the old version (and float convert)
old_version = cr.context.Find('CR_VERSION')
try:
old_version = float(old_version)
except (ValueError, TypeError):
old_version = 0.0
is_new = not hasattr(build_package, 'config')
if is_new:
class FakeModule(object):
OVERRIDES = cr.Config('OVERRIDES')
def __init__(self):
self.__name__ = 'config'
old_version = None
config = FakeModule()
setattr(build_package, 'config', config)
cr.plugin.ChainModuleConfigs(config)
# Force override the version
build_package.config.OVERRIDES.Set(CR_VERSION=cr.base.client.VERSION)
# Add all the variables that we always want to have
for name in OUT_CONFIG_VARS:
value = cr.context.Find(name)
build_package.config.OVERRIDES[name] = value
# Apply the settings from the command line
for setting in self._settings:
name, separator, value = setting.partition('=')
name = name.strip()
if not separator:
value = True
else:
value = cr.Config.ParseValue(value.strip())
build_package.config.OVERRIDES[name] = value
# Run all the output directory init hooks
for hook in cr.InitHook.Plugins():
hook.Run(old_version, build_package.config)
# Redo activations, they might have changed
cr.plugin.Activate()
# Write out the new configuration, and select it as the default
cr.base.client.WriteConfig(cr.context.Get('CR_BUILD_DIR'),
build_package.config.OVERRIDES.exported)
# Prepare the platform in here, using the updated config
cr.Platform.Prepare()
cr.SelectCommand.Select()
| bsd-3-clause | 7,889,578,943,912,553,000 | 34.27044 | 80 | 0.644793 | false |
jve/rabbittop | rabbittop/terminal.py | 1 | 5872 | """
The MIT License (MIT)
Copyright (c) 2014 Jozef van Eenbergen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import curses
class Terminal(object):
def __init__(self, scrn=None):
self._screen = scrn if scrn else curses.initscr()
self._screen = curses.initscr()
curses.noecho()
curses.cbreak()
# curses.curs_set(0)
self._screen.keypad(1)
self._refresh_rate = 3
self._screen.timeout(self._refresh_rate * 1000)
self.selected_row = None
self.start_row = 0
curses.start_color()
curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_RED)
curses.init_pair(3, curses.COLOR_WHITE, curses.COLOR_GREEN)
curses.init_pair(4, curses.COLOR_WHITE, curses.COLOR_BLUE)
curses.init_pair(5, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
curses.init_pair(6, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(7, curses.COLOR_GREEN, curses.COLOR_BLACK)
curses.init_pair(8, curses.COLOR_BLUE, curses.COLOR_BLACK)
curses.init_pair(9, curses.COLOR_MAGENTA, curses.COLOR_BLACK)
curses.init_pair(10, curses.COLOR_BLACK, curses.COLOR_YELLOW)
curses.init_pair(11, curses.COLOR_BLACK, curses.COLOR_WHITE)
self._colors_list = {
'DEFAULT': curses.color_pair(1),
'UNDERLINE': curses.A_UNDERLINE,
'BOLD': curses.A_BOLD,
'SORT': curses.A_BOLD,
'OK': curses.color_pair(7),
'TITLE': curses.A_BOLD,
'PROCESS': curses.color_pair(7),
'STATUS': curses.color_pair(7),
'NICE': curses.color_pair(9),
'CAREFUL': curses.color_pair(8),
'WARNING': curses.color_pair(9),
'CRITICAL': curses.color_pair(6),
'OK_LOG': curses.color_pair(3),
'CAREFUL_LOG': curses.color_pair(4),
'WARNING_LOG': curses.color_pair(5),
'CRITICAL_LOG': curses.color_pair(2),
'SEPARATOR': curses.color_pair(10),
'REVERSE': curses.color_pair(11),
}
self._panels = {}
self._windows = {}
@property
def colors(self):
return self._colors_list
@property
def panels(self):
return self._panels
@property
def windows(self):
return self._windows
def getch(self):
return self._screen.getch()
def refresh(self):
return self._screen.refresh()
def get_size(self):
return self._screen.getmaxyx()
def stop(self):
curses.nocbreak()
self._screen.keypad(0)
curses.echo()
curses.endwin()
def create_window(self, name, height, width, top, left):
panel = Window(height, width, top, left, self)
self._windows[name] = panel
return panel
def create_panel(self, name, height, width):
panel = Panel(height, width)
self._panels[name] = panel
return panel
def add_line(self, text, top, left, color=None):
self._screen.addstr(top, left, text, color)
def up(self):
self.selected_row -= 1
def down(self):
self.selected_row += 1
class Window(object):
def __init__(self, height, width, top, left, parent):
self._panel = parent._screen.subwin(height, width, top, left)
self._parent = parent
self._panel.scrollok(1)
self._panel.idlok(1)
self._panel.touchwin()
def add_line(self, text, top, left, color=None):
self._panel.addstr(top, left, text, color)
def refresh(self):
return self._panel.refresh()
class Panel(object):
""" Wrapped newpad object
"""
def __init__(self, height, width):
self._panel = curses.newpad(height, width)
self.selected_row = 0
self.ptopy = 0
self.ptopx = 0
self.stopy = 0
self.stopx = 0
self.sbottomy = 0
self.sbottomx = 0
self.max = height
def set_max(self, value):
self.max = value
def add_line(self, text, top, left, color=None):
self._panel.addstr(top, left, text, color)
def refresh(self, ptopy, ptopx, stopy, stopx, sbottomy, sbottomx):
self.ptopx = ptopx
self.ptopy = ptopy
self.stopy = stopy
self.stopx = stopx
self.sbottomy = sbottomy
self.sbottomx = sbottomx
return self._panel.refresh(self.ptopy, self.ptopx, self.stopy, self.stopx, self.sbottomy, self.sbottomx)
def getch(self):
return self._panel.getch()
def scroll_up(self):
self.refresh(max(self.ptopy - 1, 0), self.ptopx, self.stopy, self.stopx, self.sbottomy, self.sbottomx)
def scroll_down(self):
self.refresh(min(self.ptopy+1, self.max), self.ptopx, self.stopy, self.stopx, self.sbottomy, self.sbottomx) | mit | -8,038,196,374,112,639,000 | 31.627778 | 115 | 0.626192 | false |
manasapte/pants | src/python/pants/java/nailgun_client.py | 7 | 6693 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import signal
import socket
import sys
from pants.java.nailgun_io import NailgunStreamReader
from pants.java.nailgun_protocol import ChunkType, NailgunProtocol
from pants.util.socket import RecvBufferedSocket
logger = logging.getLogger(__name__)
class NailgunClientSession(NailgunProtocol):
"""Handles a single nailgun client session."""
def __init__(self, sock, in_fd, out_fd, err_fd):
self._sock = sock
self._input_reader = NailgunStreamReader(in_fd, self._sock) if in_fd else None
self._stdout = out_fd
self._stderr = err_fd
self.remote_pid = None
def _maybe_start_input_reader(self):
if self._input_reader:
self._input_reader.start()
def _maybe_stop_input_reader(self):
if self._input_reader:
self._input_reader.stop()
def _process_session(self):
"""Process the outputs of the nailgun session."""
try:
for chunk_type, payload in self.iter_chunks(self._sock, return_bytes=True):
if chunk_type == ChunkType.STDOUT:
self._stdout.write(payload)
self._stdout.flush()
elif chunk_type == ChunkType.STDERR:
self._stderr.write(payload)
self._stderr.flush()
elif chunk_type == ChunkType.EXIT:
self._stdout.flush()
self._stderr.flush()
return int(payload)
elif chunk_type == ChunkType.PID:
self.remote_pid = int(payload)
elif chunk_type == ChunkType.START_READING_INPUT:
self._maybe_start_input_reader()
else:
raise self.ProtocolError('received unexpected chunk {} -> {}'.format(chunk_type, payload))
finally:
# Bad chunk types received from the server can throw NailgunProtocol.ProtocolError in
# NailgunProtocol.iter_chunks(). This ensures the NailgunStreamReader is always stopped.
self._maybe_stop_input_reader()
def execute(self, working_dir, main_class, *arguments, **environment):
# Send the nailgun request.
self.send_request(self._sock, working_dir, main_class, *arguments, **environment)
# Process the remainder of the nailgun session.
return self._process_session()
class NailgunClient(object):
"""A python nailgun client (see http://martiansoftware.com/nailgun for more info)."""
class NailgunError(Exception):
"""Indicates an error interacting with a nailgun server."""
class NailgunConnectionError(NailgunError):
"""Indicates an error upon initial connect to the nailgun server."""
# For backwards compatibility with nails expecting the ng c client special env vars.
ENV_DEFAULTS = dict(NAILGUN_FILESEPARATOR=os.sep, NAILGUN_PATHSEPARATOR=os.pathsep)
DEFAULT_NG_HOST = '127.0.0.1'
DEFAULT_NG_PORT = 2113
def __init__(self, host=DEFAULT_NG_HOST, port=DEFAULT_NG_PORT, ins=sys.stdin, out=None, err=None,
workdir=None):
"""Creates a nailgun client that can be used to issue zero or more nailgun commands.
:param string host: the nailgun server to contact (defaults to '127.0.0.1')
:param int port: the port the nailgun server is listening on (defaults to the default nailgun
port: 2113)
:param file ins: a file to read command standard input from (defaults to stdin) - can be None
in which case no input is read
:param file out: a stream to write command standard output to (defaults to stdout)
:param file err: a stream to write command standard error to (defaults to stderr)
:param string workdir: the default working directory for all nailgun commands (defaults to CWD)
"""
self._host = host
self._port = port
self._stdin = ins
self._stdout = out or sys.stdout
self._stderr = err or sys.stderr
self._workdir = workdir or os.path.abspath(os.path.curdir)
self._session = None
def try_connect(self):
"""Creates a socket, connects it to the nailgun and returns the connected socket.
:returns: a connected `socket.socket`.
:raises: `NailgunClient.NailgunConnectionError` on failure to connect.
"""
sock = RecvBufferedSocket(socket.socket(socket.AF_INET, socket.SOCK_STREAM))
try:
sock.connect((self._host, self._port))
except (socket.error, socket.gaierror) as e:
logger.debug('Encountered socket exception {!r} when attempting connect to nailgun'.format(e))
sock.close()
raise self.NailgunConnectionError(
'Problem connecting to nailgun server at {}:{}: {!r}'.format(self._host, self._port, e))
else:
return sock
def send_control_c(self):
"""Sends SIGINT to a nailgun server using pid information from the active session."""
if self._session and self._session.remote_pid is not None:
os.kill(self._session.remote_pid, signal.SIGINT)
def execute(self, main_class, cwd=None, *args, **environment):
"""Executes the given main_class with any supplied args in the given environment.
:param string main_class: the fully qualified class name of the main entrypoint
:param string cwd: Set the working directory for this command
:param list args: any arguments to pass to the main entrypoint
:param dict environment: an env mapping made available to native nails via the nail context
:returns: the exit code of the main_class.
"""
environment = dict(self.ENV_DEFAULTS.items() + environment.items())
cwd = cwd or self._workdir
# N.B. This can throw NailgunConnectionError (catchable via NailgunError).
sock = self.try_connect()
self._session = NailgunClientSession(sock, self._stdin, self._stdout, self._stderr)
try:
return self._session.execute(cwd, main_class, *args, **environment)
except socket.error as e:
raise self.NailgunError('Problem communicating with nailgun server at {}:{}: {!r}'
.format(self._host, self._port, e))
except NailgunProtocol.ProtocolError as e:
raise self.NailgunError('Problem in nailgun protocol with nailgun server at {}:{}: {!r}'
.format(self._host, self._port, e))
finally:
sock.close()
self._session = None
def __repr__(self):
return 'NailgunClient(host={!r}, port={!r}, workdir={!r})'.format(self._host,
self._port,
self._workdir)
| apache-2.0 | 1,414,510,970,747,774,000 | 40.571429 | 100 | 0.666368 | false |
detrout/pykolab | pykolab/cli/cmd_remove_mailaddress.py | 1 | 3308 | # -*- coding: utf-8 -*-
# Copyright 2010-2012 Kolab Systems AG (http://www.kolabsys.com)
#
# Jeroen van Meeuwen (Kolab Systems) <vanmeeuwen a kolabsys.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3 or, at your option, any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
import sys
import commands
import pykolab
from pykolab.auth import Auth
from pykolab import utils
from pykolab.translate import _
log = pykolab.getLogger('pykolab.cli')
conf = pykolab.getConf()
def __init__():
commands.register('remove_mail', execute, description=description())
def description():
return """Remove a recipient's mail address."""
def execute(*args, **kw):
try:
email_address = conf.cli_args.pop(0)
except IndexError, errmsg:
email_address = utils.ask_question("Email address to remove")
# Get the domain from the email address
if len(email_address.split('@')) > 1:
domain = email_address.split('@')[1]
else:
log.error(_("Invalid or unqualified email address."))
sys.exit(1)
auth = Auth()
auth.connect(domain=domain)
recipients = auth.find_recipient(email_address)
if len(recipients) == 0:
log.error(_("No recipient found for email address %r") % (email_address))
sys.exit(1)
log.debug(_("Found the following recipient(s): %r") % (recipients), level=8)
mail_attributes = conf.get_list(domain, 'mail_attributes')
if mail_attributes == None or len(mail_attributes) < 1:
mail_attributes = conf.get_list(conf.get('kolab', 'auth_mechanism'), 'mail_attributes')
log.debug(_("Using the following mail attributes: %r") % (mail_attributes), level=8)
if isinstance(recipients, basestring):
recipient = recipients
# Only a single recipient found, remove the address
attributes = auth.get_entry_attributes(domain, recipient, mail_attributes)
# See which attribute holds the value we're trying to remove
for attribute in attributes.keys():
if isinstance(attributes[attribute], list):
if email_address in attributes[attribute]:
attributes[attribute].pop(attributes[attribute].index(email_address))
replace_attributes = {
attribute: attributes[attribute]
}
auth.set_entry_attributes(domain, recipient, replace_attributes)
else:
if email_address == attributes[attribute]:
auth.set_entry_attributes(domain, recipient, {attribute: None})
pass
else:
print >> sys.stderr, _("Found the following recipients:")
for recipient in recipients:
print recipient
| gpl-3.0 | 4,002,420,812,717,639,000 | 34.569892 | 95 | 0.66052 | false |
klonage/nlt-gcs | packages/IronPython.StdLib.2.7.4/content/Lib/pickle.py | 42 | 46516 | """Create portable serialized representations of Python objects.
See module cPickle for a (much) faster implementation.
See module copy_reg for a mechanism for registering custom picklers.
See module pickletools source for extensive comments.
Classes:
Pickler
Unpickler
Functions:
dump(object, file)
dumps(object) -> string
load(file) -> object
loads(string) -> object
Misc variables:
__version__
format_version
compatible_formats
"""
__version__ = "$Revision$" # Code version
from types import *
from copy_reg import dispatch_table
from copy_reg import _extension_registry, _inverted_registry, _extension_cache
import marshal
import sys
import struct
import re
__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
"Unpickler", "dump", "dumps", "load", "loads"]
# These are purely informational; no code uses these.
format_version = "2.0" # File format version we write
compatible_formats = ["1.0", # Original protocol 0
"1.1", # Protocol 0 with INST added
"1.2", # Original protocol 1
"1.3", # Protocol 1 with BINFLOAT added
"2.0", # Protocol 2
] # Old format versions we can read
# Keep in synch with cPickle. This is the highest protocol number we
# know how to read.
HIGHEST_PROTOCOL = 2
# Why use struct.pack() for pickling but marshal.loads() for
# unpickling? struct.pack() is 40% faster than marshal.dumps(), but
# marshal.loads() is twice as fast as struct.unpack()!
mloads = marshal.loads
class PickleError(Exception):
"""A common base class for the other pickling exceptions."""
pass
class PicklingError(PickleError):
"""This exception is raised when an unpicklable object is passed to the
dump() method.
"""
pass
class UnpicklingError(PickleError):
"""This exception is raised when there is a problem unpickling an object,
such as a security violation.
Note that other exceptions may also be raised during unpickling, including
(but not necessarily limited to) AttributeError, EOFError, ImportError,
and IndexError.
"""
pass
# An instance of _Stop is raised by Unpickler.load_stop() in response to
# the STOP opcode, passing the object that is the result of unpickling.
class _Stop(Exception):
def __init__(self, value):
self.value = value
# Jython has PyStringMap; it's a dict subclass with string keys
try:
from org.python.core import PyStringMap
except ImportError:
PyStringMap = None
# UnicodeType may or may not be exported (normally imported from types)
try:
UnicodeType
except NameError:
UnicodeType = None
# Pickle opcodes. See pickletools.py for extensive docs. The listing
# here is in kind-of alphabetical order of 1-character pickle code.
# pickletools groups them by purpose.
MARK = '(' # push special markobject on stack
STOP = '.' # every pickle ends with STOP
POP = '0' # discard topmost stack item
POP_MARK = '1' # discard stack top through topmost markobject
DUP = '2' # duplicate top stack item
FLOAT = 'F' # push float object; decimal string argument
INT = 'I' # push integer or bool; decimal string argument
BININT = 'J' # push four-byte signed int
BININT1 = 'K' # push 1-byte unsigned int
LONG = 'L' # push long; decimal string argument
BININT2 = 'M' # push 2-byte unsigned int
NONE = 'N' # push None
PERSID = 'P' # push persistent object; id is taken from string arg
BINPERSID = 'Q' # " " " ; " " " " stack
REDUCE = 'R' # apply callable to argtuple, both on stack
STRING = 'S' # push string; NL-terminated string argument
BINSTRING = 'T' # push string; counted binary string argument
SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes
UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument
BINUNICODE = 'X' # " " " ; counted UTF-8 string argument
APPEND = 'a' # append stack top to list below it
BUILD = 'b' # call __setstate__ or __dict__.update()
GLOBAL = 'c' # push self.find_class(modname, name); 2 string args
DICT = 'd' # build a dict from stack items
EMPTY_DICT = '}' # push empty dict
APPENDS = 'e' # extend list on stack by topmost stack slice
GET = 'g' # push item from memo on stack; index is string arg
BINGET = 'h' # " " " " " " ; " " 1-byte arg
INST = 'i' # build & push class instance
LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg
LIST = 'l' # build list from topmost stack items
EMPTY_LIST = ']' # push empty list
OBJ = 'o' # build & push class instance
PUT = 'p' # store stack top in memo; index is string arg
BINPUT = 'q' # " " " " " ; " " 1-byte arg
LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg
SETITEM = 's' # add key+value pair to dict
TUPLE = 't' # build tuple from topmost stack items
EMPTY_TUPLE = ')' # push empty tuple
SETITEMS = 'u' # modify dict by adding topmost key+value pairs
BINFLOAT = 'G' # push float; arg is 8-byte float encoding
TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py
FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py
# Protocol 2
PROTO = '\x80' # identify pickle protocol
NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple
EXT1 = '\x82' # push object from extension registry; 1-byte index
EXT2 = '\x83' # ditto, but 2-byte index
EXT4 = '\x84' # ditto, but 4-byte index
TUPLE1 = '\x85' # build 1-tuple from stack top
TUPLE2 = '\x86' # build 2-tuple from two topmost stack items
TUPLE3 = '\x87' # build 3-tuple from three topmost stack items
NEWTRUE = '\x88' # push True
NEWFALSE = '\x89' # push False
LONG1 = '\x8a' # push long from < 256 bytes
LONG4 = '\x8b' # push really big long
_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
del x
# Pickling machinery
class Pickler:
def __init__(self, file, protocol=None):
"""This takes a file-like object for writing a pickle data stream.
The optional protocol argument tells the pickler to use the
given protocol; supported protocols are 0, 1, 2. The default
protocol is 0, to be backwards compatible. (Protocol 0 is the
only protocol that can be written to a file opened in text
mode and read back successfully. When using a protocol higher
than 0, make sure the file is opened in binary mode, both when
pickling and unpickling.)
Protocol 1 is more efficient than protocol 0; protocol 2 is
more efficient than protocol 1.
Specifying a negative protocol version selects the highest
protocol version supported. The higher the protocol used, the
more recent the version of Python needed to read the pickle
produced.
The file parameter must have a write() method that accepts a single
string argument. It can thus be an open file object, a StringIO
object, or any other custom object that meets this interface.
"""
if protocol is None:
protocol = 0
if protocol < 0:
protocol = HIGHEST_PROTOCOL
elif not 0 <= protocol <= HIGHEST_PROTOCOL:
raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
self.write = file.write
self.memo = {}
self.proto = int(protocol)
self.bin = protocol >= 1
self.fast = 0
def clear_memo(self):
"""Clears the pickler's "memo".
The memo is the data structure that remembers which objects the
pickler has already seen, so that shared or recursive objects are
pickled by reference and not by value. This method is useful when
re-using picklers.
"""
self.memo.clear()
def dump(self, obj):
"""Write a pickled representation of obj to the open file."""
if self.proto >= 2:
self.write(PROTO + chr(self.proto))
self.save(obj)
self.write(STOP)
def memoize(self, obj):
"""Store an object in the memo."""
# The Pickler memo is a dictionary mapping object ids to 2-tuples
# that contain the Unpickler memo key and the object being memoized.
# The memo key is written to the pickle and will become
# the key in the Unpickler's memo. The object is stored in the
# Pickler memo so that transient objects are kept alive during
# pickling.
# The use of the Unpickler memo length as the memo key is just a
# convention. The only requirement is that the memo values be unique.
# But there appears no advantage to any other scheme, and this
# scheme allows the Unpickler memo to be implemented as a plain (but
# growable) array, indexed by memo key.
if self.fast:
return
assert id(obj) not in self.memo
memo_len = len(self.memo)
self.write(self.put(memo_len))
self.memo[id(obj)] = memo_len, obj
# Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
def put(self, i, pack=struct.pack):
if self.bin:
if i < 256:
return BINPUT + chr(i)
else:
return LONG_BINPUT + pack("<i", i)
return PUT + repr(i) + '\n'
# Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
def get(self, i, pack=struct.pack):
if self.bin:
if i < 256:
return BINGET + chr(i)
else:
return LONG_BINGET + pack("<i", i)
return GET + repr(i) + '\n'
def save(self, obj):
# Check for persistent id (defined by a subclass)
pid = self.persistent_id(obj)
if pid:
self.save_pers(pid)
return
# Check the memo
x = self.memo.get(id(obj))
if x:
self.write(self.get(x[0]))
return
# Check the type dispatch table
t = type(obj)
f = self.dispatch.get(t)
if f:
f(self, obj) # Call unbound method with explicit self
return
# Check for a class with a custom metaclass; treat as regular class
try:
issc = issubclass(t, TypeType)
except TypeError: # t is not a class (old Boost; see SF #502085)
issc = 0
if issc:
self.save_global(obj)
return
# Check copy_reg.dispatch_table
reduce = dispatch_table.get(t)
if reduce:
rv = reduce(obj)
else:
# Check for a __reduce_ex__ method, fall back to __reduce__
reduce = getattr(obj, "__reduce_ex__", None)
if reduce:
rv = reduce(self.proto)
else:
reduce = getattr(obj, "__reduce__", None)
if reduce:
rv = reduce()
else:
raise PicklingError("Can't pickle %r object: %r" %
(t.__name__, obj))
# Check for string returned by reduce(), meaning "save as global"
if type(rv) is StringType:
self.save_global(obj, rv)
return
# Assert that reduce() returned a tuple
if type(rv) is not TupleType:
raise PicklingError("%s must return string or tuple" % reduce)
# Assert that it returned an appropriately sized tuple
l = len(rv)
if not (2 <= l <= 5):
raise PicklingError("Tuple returned by %s must have "
"two to five elements" % reduce)
# Save the reduce() output and finally memoize the object
self.save_reduce(obj=obj, *rv)
def persistent_id(self, obj):
# This exists so a subclass can override it
return None
def save_pers(self, pid):
# Save a persistent id reference
if self.bin:
self.save(pid)
self.write(BINPERSID)
else:
self.write(PERSID + str(pid) + '\n')
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
# This API is called by some subclasses
# Assert that args is a tuple or None
if not isinstance(args, TupleType):
raise PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
# A __reduce__ implementation can direct protocol 2 to
# use the more efficient NEWOBJ opcode, while still
# allowing protocol 0 and 1 to work normally. For this to
# work, the function returned by __reduce__ should be
# called __newobj__, and its first argument should be a
# new-style class. The implementation for __newobj__
# should be as follows, although pickle has no way to
# verify this:
#
# def __newobj__(cls, *args):
# return cls.__new__(cls, *args)
#
# Protocols 0 and 1 will pickle a reference to __newobj__,
# while protocol 2 (and above) will pickle a reference to
# cls, the remaining args tuple, and the NEWOBJ code,
# which calls cls.__new__(cls, *args) at unpickling time
# (see load_newobj below). If __reduce__ returns a
# three-tuple, the state from the third tuple item will be
# pickled regardless of the protocol, calling __setstate__
# at unpickling time (see load_build below).
#
# Note that no standard __newobj__ implementation exists;
# you have to provide your own. This is to enforce
# compatibility with Python 2.2 (pickles written using
# protocol 0 or 1 in Python 2.3 should be unpicklable by
# Python 2.2).
cls = args[0]
if not hasattr(cls, "__new__"):
raise PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
save(args)
write(NEWOBJ)
else:
save(func)
save(args)
write(REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(BUILD)
# Methods below this point are dispatched through the dispatch table
dispatch = {}
def save_none(self, obj):
self.write(NONE)
dispatch[NoneType] = save_none
def save_bool(self, obj):
if self.proto >= 2:
self.write(obj and NEWTRUE or NEWFALSE)
else:
self.write(obj and TRUE or FALSE)
dispatch[bool] = save_bool
def save_int(self, obj, pack=struct.pack):
if self.bin:
# If the int is small enough to fit in a signed 4-byte 2's-comp
# format, we can store it more efficiently than the general
# case.
# First one- and two-byte unsigned ints:
if obj >= 0:
if obj <= 0xff:
self.write(BININT1 + chr(obj))
return
if obj <= 0xffff:
self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
return
# Next check for 4-byte signed ints:
high_bits = obj >> 31 # note that Python shift sign-extends
if high_bits == 0 or high_bits == -1:
# All high bits are copies of bit 2**31, so the value
# fits in a 4-byte signed int.
self.write(BININT + pack("<i", obj))
return
# Text pickle, or int too big to fit in signed 4-byte format.
self.write(INT + repr(obj) + '\n')
dispatch[IntType] = save_int
def save_long(self, obj, pack=struct.pack):
if self.proto >= 2:
bytes = encode_long(obj)
n = len(bytes)
if n < 256:
self.write(LONG1 + chr(n) + bytes)
else:
self.write(LONG4 + pack("<i", n) + bytes)
return
self.write(LONG + repr(obj) + '\n')
dispatch[LongType] = save_long
def save_float(self, obj, pack=struct.pack):
if self.bin:
self.write(BINFLOAT + pack('>d', obj))
else:
self.write(FLOAT + repr(obj) + '\n')
dispatch[FloatType] = save_float
def save_string(self, obj, pack=struct.pack):
if self.bin:
n = len(obj)
if n < 256:
self.write(SHORT_BINSTRING + chr(n) + obj)
else:
self.write(BINSTRING + pack("<i", n) + obj)
else:
self.write(STRING + repr(obj) + '\n')
self.memoize(obj)
dispatch[StringType] = save_string
def save_unicode(self, obj, pack=struct.pack):
if self.bin:
encoding = obj.encode('utf-8')
n = len(encoding)
self.write(BINUNICODE + pack("<i", n) + encoding)
else:
obj = obj.replace("\\", "\\u005c")
obj = obj.replace("\n", "\\u000a")
self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
self.memoize(obj)
dispatch[UnicodeType] = save_unicode
if StringType is UnicodeType:
# This is true for Jython
def save_string(self, obj, pack=struct.pack):
unicode = obj.isunicode()
if self.bin:
if unicode:
obj = obj.encode("utf-8")
l = len(obj)
if l < 256 and not unicode:
self.write(SHORT_BINSTRING + chr(l) + obj)
else:
s = pack("<i", l)
if unicode:
self.write(BINUNICODE + s + obj)
else:
self.write(BINSTRING + s + obj)
else:
if unicode:
obj = obj.replace("\\", "\\u005c")
obj = obj.replace("\n", "\\u000a")
obj = obj.encode('raw-unicode-escape')
self.write(UNICODE + obj + '\n')
else:
self.write(STRING + repr(obj) + '\n')
self.memoize(obj)
dispatch[StringType] = save_string
def save_tuple(self, obj):
write = self.write
proto = self.proto
n = len(obj)
if n == 0:
if proto:
write(EMPTY_TUPLE)
else:
write(MARK + TUPLE)
return
save = self.save
memo = self.memo
if n <= 3 and proto >= 2:
for element in obj:
save(element)
# Subtle. Same as in the big comment below.
if id(obj) in memo:
get = self.get(memo[id(obj)][0])
write(POP * n + get)
else:
write(_tuplesize2code[n])
self.memoize(obj)
return
# proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
# has more than 3 elements.
write(MARK)
for element in obj:
save(element)
if id(obj) in memo:
# Subtle. d was not in memo when we entered save_tuple(), so
# the process of saving the tuple's elements must have saved
# the tuple itself: the tuple is recursive. The proper action
# now is to throw away everything we put on the stack, and
# simply GET the tuple (it's already constructed). This check
# could have been done in the "for element" loop instead, but
# recursive tuples are a rare thing.
get = self.get(memo[id(obj)][0])
if proto:
write(POP_MARK + get)
else: # proto 0 -- POP_MARK not available
write(POP * (n+1) + get)
return
# No recursion.
self.write(TUPLE)
self.memoize(obj)
dispatch[TupleType] = save_tuple
# save_empty_tuple() isn't used by anything in Python 2.3. However, I
# found a Pickler subclass in Zope3 that calls it, so it's not harmless
# to remove it.
def save_empty_tuple(self, obj):
self.write(EMPTY_TUPLE)
def save_list(self, obj):
write = self.write
if self.bin:
write(EMPTY_LIST)
else: # proto 0 -- can't use EMPTY_LIST
write(MARK + LIST)
self.memoize(obj)
self._batch_appends(iter(obj))
dispatch[ListType] = save_list
# Keep in synch with cPickle's BATCHSIZE. Nothing will break if it gets
# out of synch, though.
_BATCHSIZE = 1000
def _batch_appends(self, items):
# Helper to batch up APPENDS sequences
save = self.save
write = self.write
if not self.bin:
for x in items:
save(x)
write(APPEND)
return
r = xrange(self._BATCHSIZE)
while items is not None:
tmp = []
for i in r:
try:
x = items.next()
tmp.append(x)
except StopIteration:
items = None
break
n = len(tmp)
if n > 1:
write(MARK)
for x in tmp:
save(x)
write(APPENDS)
elif n:
save(tmp[0])
write(APPEND)
# else tmp is empty, and we're done
def save_dict(self, obj):
write = self.write
if self.bin:
write(EMPTY_DICT)
else: # proto 0 -- can't use EMPTY_DICT
write(MARK + DICT)
self.memoize(obj)
self._batch_setitems(obj.iteritems())
dispatch[DictionaryType] = save_dict
if not PyStringMap is None:
dispatch[PyStringMap] = save_dict
def _batch_setitems(self, items):
# Helper to batch up SETITEMS sequences; proto >= 1 only
save = self.save
write = self.write
if not self.bin:
for k, v in items:
save(k)
save(v)
write(SETITEM)
return
r = xrange(self._BATCHSIZE)
while items is not None:
tmp = []
for i in r:
try:
tmp.append(items.next())
except StopIteration:
items = None
break
n = len(tmp)
if n > 1:
write(MARK)
for k, v in tmp:
save(k)
save(v)
write(SETITEMS)
elif n:
k, v = tmp[0]
save(k)
save(v)
write(SETITEM)
# else tmp is empty, and we're done
def save_inst(self, obj):
cls = obj.__class__
memo = self.memo
write = self.write
save = self.save
if hasattr(obj, '__getinitargs__'):
args = obj.__getinitargs__()
len(args) # XXX Assert it's a sequence
_keep_alive(args, memo)
else:
args = ()
write(MARK)
if self.bin:
save(cls)
for arg in args:
save(arg)
write(OBJ)
else:
for arg in args:
save(arg)
write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
self.memoize(obj)
try:
getstate = obj.__getstate__
except AttributeError:
stuff = obj.__dict__
else:
stuff = getstate()
_keep_alive(stuff, memo)
save(stuff)
write(BUILD)
dispatch[InstanceType] = save_inst
def save_global(self, obj, name=None, pack=struct.pack):
write = self.write
memo = self.memo
if name is None:
name = obj.__name__
module = getattr(obj, "__module__", None)
if module is None:
module = whichmodule(obj, name)
try:
__import__(module)
mod = sys.modules[module]
klass = getattr(mod, name)
except (ImportError, KeyError, AttributeError):
raise PicklingError(
"Can't pickle %r: it's not found as %s.%s" %
(obj, module, name))
else:
if klass is not obj:
raise PicklingError(
"Can't pickle %r: it's not the same object as %s.%s" %
(obj, module, name))
if self.proto >= 2:
code = _extension_registry.get((module, name))
if code:
assert code > 0
if code <= 0xff:
write(EXT1 + chr(code))
elif code <= 0xffff:
write("%c%c%c" % (EXT2, code&0xff, code>>8))
else:
write(EXT4 + pack("<i", code))
return
write(GLOBAL + module + '\n' + name + '\n')
self.memoize(obj)
dispatch[ClassType] = save_global
dispatch[FunctionType] = save_global
dispatch[BuiltinFunctionType] = save_global
dispatch[TypeType] = save_global
# Pickling helpers
def _keep_alive(x, memo):
"""Keeps a reference to the object x in the memo.
Because we remember objects by their id, we have
to assure that possibly temporary objects are kept
alive by referencing them.
We store a reference at the id of the memo, which should
normally not be used unless someone tries to deepcopy
the memo itself...
"""
try:
memo[id(memo)].append(x)
except KeyError:
# aha, this is the first one :-)
memo[id(memo)]=[x]
# A cache for whichmodule(), mapping a function object to the name of
# the module in which the function was found.
classmap = {} # called classmap for backwards compatibility
def whichmodule(func, funcname):
"""Figure out the module in which a function occurs.
Search sys.modules for the module.
Cache in classmap.
Return a module name.
If the function cannot be found, return "__main__".
"""
# Python functions should always get an __module__ from their globals.
mod = getattr(func, "__module__", None)
if mod is not None:
return mod
if func in classmap:
return classmap[func]
for name, module in sys.modules.items():
if module is None:
continue # skip dummy package entries
if name != '__main__' and getattr(module, funcname, None) is func:
break
else:
name = '__main__'
classmap[func] = name
return name
# Unpickling machinery
class Unpickler:
def __init__(self, file):
"""This takes a file-like object for reading a pickle data stream.
The protocol version of the pickle is detected automatically, so no
proto argument is needed.
The file-like object must have two methods, a read() method that
takes an integer argument, and a readline() method that requires no
arguments. Both methods should return a string. Thus file-like
object can be a file object opened for reading, a StringIO object,
or any other custom object that meets this interface.
"""
self.readline = file.readline
self.read = file.read
self.memo = {}
def load(self):
"""Read a pickled object representation from the open file.
Return the reconstituted object hierarchy specified in the file.
"""
self.mark = object() # any new unique object
self.stack = []
self.append = self.stack.append
read = self.read
dispatch = self.dispatch
try:
while 1:
key = read(1)
dispatch[key](self)
except _Stop, stopinst:
return stopinst.value
# Return largest index k such that self.stack[k] is self.mark.
# If the stack doesn't contain a mark, eventually raises IndexError.
# This could be sped by maintaining another stack, of indices at which
# the mark appears. For that matter, the latter stack would suffice,
# and we wouldn't need to push mark objects on self.stack at all.
# Doing so is probably a good thing, though, since if the pickle is
# corrupt (or hostile) we may get a clue from finding self.mark embedded
# in unpickled objects.
def marker(self):
stack = self.stack
mark = self.mark
k = len(stack)-1
while stack[k] is not mark: k = k-1
return k
dispatch = {}
def load_eof(self):
raise EOFError
dispatch[''] = load_eof
def load_proto(self):
proto = ord(self.read(1))
if not 0 <= proto <= 2:
raise ValueError, "unsupported pickle protocol: %d" % proto
dispatch[PROTO] = load_proto
def load_persid(self):
pid = self.readline()[:-1]
self.append(self.persistent_load(pid))
dispatch[PERSID] = load_persid
def load_binpersid(self):
pid = self.stack.pop()
self.append(self.persistent_load(pid))
dispatch[BINPERSID] = load_binpersid
def load_none(self):
self.append(None)
dispatch[NONE] = load_none
def load_false(self):
self.append(False)
dispatch[NEWFALSE] = load_false
def load_true(self):
self.append(True)
dispatch[NEWTRUE] = load_true
def load_int(self):
data = self.readline()
if data == FALSE[1:]:
val = False
elif data == TRUE[1:]:
val = True
else:
try:
val = int(data)
except ValueError:
val = long(data)
self.append(val)
dispatch[INT] = load_int
def load_binint(self):
self.append(mloads('i' + self.read(4)))
dispatch[BININT] = load_binint
def load_binint1(self):
self.append(ord(self.read(1)))
dispatch[BININT1] = load_binint1
def load_binint2(self):
self.append(mloads('i' + self.read(2) + '\000\000'))
dispatch[BININT2] = load_binint2
def load_long(self):
self.append(long(self.readline()[:-1], 0))
dispatch[LONG] = load_long
def load_long1(self):
n = ord(self.read(1))
bytes = self.read(n)
self.append(decode_long(bytes))
dispatch[LONG1] = load_long1
def load_long4(self):
n = mloads('i' + self.read(4))
bytes = self.read(n)
self.append(decode_long(bytes))
dispatch[LONG4] = load_long4
def load_float(self):
self.append(float(self.readline()[:-1]))
dispatch[FLOAT] = load_float
def load_binfloat(self, unpack=struct.unpack):
self.append(unpack('>d', self.read(8))[0])
dispatch[BINFLOAT] = load_binfloat
def load_string(self):
rep = self.readline()[:-1]
for q in "\"'": # double or single quote
if rep.startswith(q):
if not rep.endswith(q):
raise ValueError, "insecure string pickle"
rep = rep[len(q):-len(q)]
break
else:
raise ValueError, "insecure string pickle"
self.append(rep.decode("string-escape"))
dispatch[STRING] = load_string
def load_binstring(self):
len = mloads('i' + self.read(4))
self.append(self.read(len))
dispatch[BINSTRING] = load_binstring
def load_unicode(self):
self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
dispatch[UNICODE] = load_unicode
def load_binunicode(self):
len = mloads('i' + self.read(4))
self.append(unicode(self.read(len),'utf-8'))
dispatch[BINUNICODE] = load_binunicode
def load_short_binstring(self):
len = ord(self.read(1))
self.append(self.read(len))
dispatch[SHORT_BINSTRING] = load_short_binstring
def load_tuple(self):
k = self.marker()
self.stack[k:] = [tuple(self.stack[k+1:])]
dispatch[TUPLE] = load_tuple
def load_empty_tuple(self):
self.stack.append(())
dispatch[EMPTY_TUPLE] = load_empty_tuple
def load_tuple1(self):
self.stack[-1] = (self.stack[-1],)
dispatch[TUPLE1] = load_tuple1
def load_tuple2(self):
self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
dispatch[TUPLE2] = load_tuple2
def load_tuple3(self):
self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
dispatch[TUPLE3] = load_tuple3
def load_empty_list(self):
self.stack.append([])
dispatch[EMPTY_LIST] = load_empty_list
def load_empty_dictionary(self):
self.stack.append({})
dispatch[EMPTY_DICT] = load_empty_dictionary
def load_list(self):
k = self.marker()
self.stack[k:] = [self.stack[k+1:]]
dispatch[LIST] = load_list
def load_dict(self):
k = self.marker()
d = {}
items = self.stack[k+1:]
for i in range(0, len(items), 2):
key = items[i]
value = items[i+1]
d[key] = value
self.stack[k:] = [d]
dispatch[DICT] = load_dict
# INST and OBJ differ only in how they get a class object. It's not
# only sensible to do the rest in a common routine, the two routines
# previously diverged and grew different bugs.
# klass is the class to instantiate, and k points to the topmost mark
# object, following which are the arguments for klass.__init__.
def _instantiate(self, klass, k):
args = tuple(self.stack[k+1:])
del self.stack[k:]
instantiated = 0
if (not args and
type(klass) is ClassType and
not hasattr(klass, "__getinitargs__")):
try:
value = _EmptyClass()
value.__class__ = klass
instantiated = 1
except RuntimeError:
# In restricted execution, assignment to inst.__class__ is
# prohibited
pass
if not instantiated:
try:
value = klass(*args)
except TypeError, err:
raise TypeError, "in constructor for %s: %s" % (
klass.__name__, str(err)), sys.exc_info()[2]
self.append(value)
def load_inst(self):
module = self.readline()[:-1]
name = self.readline()[:-1]
klass = self.find_class(module, name)
self._instantiate(klass, self.marker())
dispatch[INST] = load_inst
def load_obj(self):
# Stack is ... markobject classobject arg1 arg2 ...
k = self.marker()
klass = self.stack.pop(k+1)
self._instantiate(klass, k)
dispatch[OBJ] = load_obj
def load_newobj(self):
args = self.stack.pop()
cls = self.stack[-1]
obj = cls.__new__(cls, *args)
self.stack[-1] = obj
dispatch[NEWOBJ] = load_newobj
def load_global(self):
module = self.readline()[:-1]
name = self.readline()[:-1]
klass = self.find_class(module, name)
self.append(klass)
dispatch[GLOBAL] = load_global
def load_ext1(self):
code = ord(self.read(1))
self.get_extension(code)
dispatch[EXT1] = load_ext1
def load_ext2(self):
code = mloads('i' + self.read(2) + '\000\000')
self.get_extension(code)
dispatch[EXT2] = load_ext2
def load_ext4(self):
code = mloads('i' + self.read(4))
self.get_extension(code)
dispatch[EXT4] = load_ext4
def get_extension(self, code):
nil = []
obj = _extension_cache.get(code, nil)
if obj is not nil:
self.append(obj)
return
key = _inverted_registry.get(code)
if not key:
raise ValueError("unregistered extension code %d" % code)
obj = self.find_class(*key)
_extension_cache[code] = obj
self.append(obj)
def find_class(self, module, name):
# Subclasses may override this
__import__(module)
mod = sys.modules[module]
klass = getattr(mod, name)
return klass
def load_reduce(self):
stack = self.stack
args = stack.pop()
func = stack[-1]
value = func(*args)
stack[-1] = value
dispatch[REDUCE] = load_reduce
def load_pop(self):
del self.stack[-1]
dispatch[POP] = load_pop
def load_pop_mark(self):
k = self.marker()
del self.stack[k:]
dispatch[POP_MARK] = load_pop_mark
def load_dup(self):
self.append(self.stack[-1])
dispatch[DUP] = load_dup
def load_get(self):
self.append(self.memo[self.readline()[:-1]])
dispatch[GET] = load_get
def load_binget(self):
i = ord(self.read(1))
self.append(self.memo[repr(i)])
dispatch[BINGET] = load_binget
def load_long_binget(self):
i = mloads('i' + self.read(4))
self.append(self.memo[repr(i)])
dispatch[LONG_BINGET] = load_long_binget
def load_put(self):
self.memo[self.readline()[:-1]] = self.stack[-1]
dispatch[PUT] = load_put
def load_binput(self):
i = ord(self.read(1))
self.memo[repr(i)] = self.stack[-1]
dispatch[BINPUT] = load_binput
def load_long_binput(self):
i = mloads('i' + self.read(4))
self.memo[repr(i)] = self.stack[-1]
dispatch[LONG_BINPUT] = load_long_binput
def load_append(self):
stack = self.stack
value = stack.pop()
list = stack[-1]
list.append(value)
dispatch[APPEND] = load_append
def load_appends(self):
stack = self.stack
mark = self.marker()
list = stack[mark - 1]
list.extend(stack[mark + 1:])
del stack[mark:]
dispatch[APPENDS] = load_appends
def load_setitem(self):
stack = self.stack
value = stack.pop()
key = stack.pop()
dict = stack[-1]
dict[key] = value
dispatch[SETITEM] = load_setitem
def load_setitems(self):
stack = self.stack
mark = self.marker()
dict = stack[mark - 1]
for i in range(mark + 1, len(stack), 2):
dict[stack[i]] = stack[i + 1]
del stack[mark:]
dispatch[SETITEMS] = load_setitems
def load_build(self):
stack = self.stack
state = stack.pop()
inst = stack[-1]
setstate = getattr(inst, "__setstate__", None)
if setstate:
setstate(state)
return
slotstate = None
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if state:
try:
d = inst.__dict__
try:
for k, v in state.iteritems():
d[intern(k)] = v
# keys in state don't have to be strings
# don't blow up, but don't go out of our way
except TypeError:
d.update(state)
except RuntimeError:
# XXX In restricted execution, the instance's __dict__
# is not accessible. Use the old way of unpickling
# the instance variables. This is a semantic
# difference when unpickling in restricted
# vs. unrestricted modes.
# Note, however, that cPickle has never tried to do the
# .update() business, and always uses
# PyObject_SetItem(inst.__dict__, key, value) in a
# loop over state.items().
for k, v in state.items():
setattr(inst, k, v)
if slotstate:
for k, v in slotstate.items():
setattr(inst, k, v)
dispatch[BUILD] = load_build
def load_mark(self):
self.append(self.mark)
dispatch[MARK] = load_mark
def load_stop(self):
value = self.stack.pop()
raise _Stop(value)
dispatch[STOP] = load_stop
# Helper class for load_inst/load_obj
class _EmptyClass:
pass
# Encode/decode longs in linear time.
import binascii as _binascii
def encode_long(x):
r"""Encode a long to a two's complement little-endian binary string.
Note that 0L is a special case, returning an empty string, to save a
byte in the LONG1 pickling context.
# bug 24549
#>>> encode_long(0L)
#''
#>>> encode_long(255L)
#'\xff\x00'
#>>> encode_long(32767L)
#'\xff\x7f'
#>>> encode_long(-256L)
#'\x00\xff'
#>>> encode_long(-32768L)
#'\x00\x80'
#>>> encode_long(-128L)
#'\x80'
#>>> encode_long(127L)
#'\x7f'
#>>>
"""
if x == 0:
return ''
if x > 0:
ashex = hex(x)
assert ashex.startswith("0x")
njunkchars = 2 + ashex.endswith('L')
nibbles = len(ashex) - njunkchars
if nibbles & 1:
# need an even # of nibbles for unhexlify
ashex = "0x0" + ashex[2:]
elif int(ashex[2], 16) >= 8:
# "looks negative", so need a byte of sign bits
ashex = "0x00" + ashex[2:]
else:
# Build the 256's-complement: (1L << nbytes) + x. The trick is
# to find the number of bytes in linear time (although that should
# really be a constant-time task).
ashex = hex(-x)
assert ashex.startswith("0x")
njunkchars = 2 + ashex.endswith('L')
nibbles = len(ashex) - njunkchars
if nibbles & 1:
# Extend to a full byte.
nibbles += 1
nbits = nibbles * 4
x += 1L << nbits
assert x > 0
ashex = hex(x)
njunkchars = 2 + ashex.endswith('L')
newnibbles = len(ashex) - njunkchars
if newnibbles < nibbles:
ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
if int(ashex[2], 16) < 8:
# "looks positive", so need a byte of sign bits
ashex = "0xff" + ashex[2:]
if ashex.endswith('L'):
ashex = ashex[2:-1]
else:
ashex = ashex[2:]
assert len(ashex) & 1 == 0, (x, ashex)
binary = _binascii.unhexlify(ashex)
return binary[::-1]
def decode_long(data):
r"""Decode a long from a two's complement little-endian binary string.
>>> decode_long('')
0L
>>> decode_long("\xff\x00")
255L
>>> decode_long("\xff\x7f")
32767L
>>> decode_long("\x00\xff")
-256L
>>> decode_long("\x00\x80")
-32768L
>>> decode_long("\x80")
-128L
>>> decode_long("\x7f")
127L
"""
nbytes = len(data)
if nbytes == 0:
return 0L
ashex = _binascii.hexlify(data[::-1])
n = long(ashex, 16) # quadratic time before Python 2.3; linear now
if data[-1] >= '\x80':
n -= 1L << (nbytes * 8)
return n
# Shorthands
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
def dump(obj, file, protocol=None):
Pickler(file, protocol).dump(obj)
def dumps(obj, protocol=None):
file = StringIO()
Pickler(file, protocol).dump(obj)
return file.getvalue()
def load(file):
return Unpickler(file).load()
def loads(str):
file = StringIO(str)
return Unpickler(file).load()
# Doctest
def _test():
import doctest
return doctest.testmod()
if __name__ == "__main__":
_test()
| gpl-3.0 | 2,992,326,597,203,099,600 | 31.44069 | 80 | 0.529882 | false |
jjmleiro/hue | desktop/core/ext-py/cffi-1.5.2/demo/pyobj.py | 13 | 3399 |
referents = [] # list "object descriptor -> python object"
freelist = None
def store(x):
"Store the object 'x' and returns a new object descriptor for it."
global freelist
p = freelist
if p is None:
p = len(referents)
referents.append(x)
else:
freelist = referents[p]
referents[p] = x
return p
def discard(p):
"""Discard (i.e. close) the object descriptor 'p'.
Return the original object that was attached to 'p'."""
global freelist
x = referents[p]
referents[p] = freelist
freelist = p
return x
class Ref(object):
"""For use in 'with Ref(x) as ob': open an object descriptor
and returns it in 'ob', and close it automatically when the
'with' statement finishes."""
def __init__(self, x):
self.x = x
def __enter__(self):
self.p = p = store(self.x)
return p
def __exit__(self, *args):
discard(self.p)
def count_pyobj_alive():
result = len(referents)
p = freelist
while p is not None:
assert result > 0
result -= 1
p = referents[p]
return result
# ------------------------------------------------------------
if __name__ == '__main__':
import api
ffi = api.PythonFFI()
ffi.cdef("""
typedef int pyobj_t;
int sum_integers(pyobj_t p_list);
pyobj_t sum_objects(pyobj_t p_list, pyobj_t p_initial);
""")
@ffi.pyexport("int(pyobj_t)")
def length(p_list):
list = referents[p_list]
return len(list)
@ffi.pyexport("int(pyobj_t, int)")
def getitem(p_list, index):
list = referents[p_list]
return list[index]
@ffi.pyexport("pyobj_t(pyobj_t)")
def pyobj_dup(p):
return store(referents[p])
@ffi.pyexport("void(pyobj_t)")
def pyobj_close(p):
discard(p)
@ffi.pyexport("pyobj_t(pyobj_t, int)")
def pyobj_getitem(p_list, index):
list = referents[p_list]
return store(list[index])
@ffi.pyexport("pyobj_t(pyobj_t, pyobj_t)")
def pyobj_add(p1, p2):
return store(referents[p1] + referents[p2])
lib = ffi.verify("""
typedef int pyobj_t; /* an "object descriptor" number */
int sum_integers(pyobj_t p_list) {
/* this a demo function written in C, using the API
defined above: length() and getitem(). */
int i, result = 0;
int count = length(p_list);
for (i=0; i<count; i++) {
int n = getitem(p_list, i);
result += n;
}
return result;
}
pyobj_t sum_objects(pyobj_t p_list, pyobj_t p_initial) {
/* same as above, but keeps all additions as Python objects */
int i;
int count = length(p_list);
pyobj_t p1 = pyobj_dup(p_initial);
for (i=0; i<count; i++) {
pyobj_t p2 = pyobj_getitem(p_list, i);
pyobj_t p3 = pyobj_add(p1, p2);
pyobj_close(p2);
pyobj_close(p1);
p1 = p3;
}
return p1;
}
""")
with Ref([10, 20, 30, 40]) as p_list:
print lib.sum_integers(p_list)
with Ref(5) as p_initial:
result = discard(lib.sum_objects(p_list, p_initial))
print result
assert count_pyobj_alive() == 0
| apache-2.0 | -8,240,845,674,043,694,000 | 26.41129 | 74 | 0.523095 | false |
ActiveState/code | recipes/Python/579037_How_execute_x86_64bit_assembly_code_directly/recipe-579037.py | 1 | 1537 | #!/usr/bin/env python
import subprocess, os, tempfile
from ctypes import *
PAGE_SIZE = 4096
class AssemblerFunction(object):
def __init__(self, code, ret_type, *arg_types):
# Run Nasm
fd, source = tempfile.mkstemp(".S", "assembly", os.getcwd())
os.write(fd, code)
os.close(fd)
target = os.path.splitext(source)[0]
subprocess.check_call(["nasm",source])
os.unlink(source)
binary = file(target,"rb").read()
os.unlink(target)
bin_len = len(binary)
# align our code on page boundary.
self.code_buffer = create_string_buffer(PAGE_SIZE*2+bin_len)
addr = (addressof(self.code_buffer) + PAGE_SIZE) & (~(PAGE_SIZE-1))
memmove(addr, binary, bin_len)
# Change memory protection
self.mprotect = cdll.LoadLibrary("libc.so.6").mprotect
mp_ret = self.mprotect(addr, bin_len, 4) # execute only.
if mp_ret: raise OSError("Unable to change memory protection")
self.func = CFUNCTYPE(ret_type, *arg_types)(addr)
self.addr = addr
self.bin_len = bin_len
def __call__(self, *args):
return self.func(*args)
def __del__(self):
# Revert memory protection
if hasattr(self,"mprotect"):
self.mprotect(self.addr, self.bin_len, 3)
if __name__ == "__main__":
add_func = """ BITS 64
mov rax, rdi ; Move the first parameter
add rax, rsi ; add the second parameter
ret ; rax will be returned
"""
Add = AssemblerFunction(add_func, c_int, c_int, c_int)
print Add(1, 2)
| mit | 4,346,553,609,545,770,500 | 26.446429 | 71 | 0.612882 | false |
lkhomenk/integration_tests | cfme/tests/optimize/test_bottlenecks.py | 5 | 7933 | # -*- coding: utf-8 -*-
import fauxfactory
import pytest
from datetime import timedelta
from cfme.optimize.bottlenecks import Bottlenecks
from cfme.utils import conf
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.blockers import BZ
from cfme.utils.timeutil import parsetime
from cfme.utils.ssh import SSHClient
pytestmark = pytest.mark.uncollectif(lambda appliance: appliance.is_pod)
@pytest.fixture(scope="module")
def temp_appliance_extended_db(temp_appliance_preconfig):
app = temp_appliance_preconfig
app.evmserverd.stop()
app.db.extend_partition()
app.start_evm_service()
return app
@pytest.fixture(scope="module")
def db_tbl(temp_appliance_extended_db):
app = temp_appliance_extended_db
return app.db.client['bottleneck_events']
@pytest.fixture(scope="module")
def db_events(temp_appliance_extended_db, db_tbl):
app = temp_appliance_extended_db
return app.db.client.session.query(db_tbl.timestamp,
db_tbl.resource_type, db_tbl.resource_name, db_tbl.event_type, db_tbl.severity, db_tbl.message)
@pytest.fixture(scope="module")
def db_restore(temp_appliance_extended_db):
app = temp_appliance_extended_db
app.evmserverd.stop()
app.db.drop()
db_storage_hostname = conf.cfme_data['bottlenecks']['hostname']
db_storage_ssh = SSHClient(hostname=db_storage_hostname, **conf.credentials['bottlenecks'])
with db_storage_ssh as ssh_client:
# Different files for different versions
ver = "_58" if temp_appliance_extended_db.version < '5.9' else "_59"
rand_filename = "/tmp/v2_key_{}".format(fauxfactory.gen_alphanumeric())
ssh_client.get_file("/home/backups/otsuman_db_bottlenecks/v2_key{}".format(ver),
rand_filename)
dump_filename = "/tmp/db_dump_{}".format(fauxfactory.gen_alphanumeric())
ssh_client.get_file("/home/backups/otsuman_db_bottlenecks/db.backup{}".format(ver),
dump_filename)
region_filename = "/tmp/REGION_{}".format(fauxfactory.gen_alphanumeric())
ssh_client.get_file("/home/backups/otsuman_db_bottlenecks/REGION{}".format(ver),
region_filename)
guid_filename = "/tmp/GUID_{}".format(fauxfactory.gen_alphanumeric())
ssh_client.get_file("/home/backups/otsuman_db_bottlenecks/GUID{}".format(ver),
guid_filename)
with app.ssh_client as ssh_client:
ssh_client.put_file(rand_filename, "/var/www/miq/vmdb/certs/v2_key")
ssh_client.put_file(dump_filename, "/tmp/evm_db.backup")
ssh_client.put_file(region_filename, "/var/www/miq/vmdb/REGION")
ssh_client.put_file(guid_filename, "/var/www/miq/vmdb/GUID")
app.db.restore()
app.start_evm_service()
app.wait_for_web_ui()
@pytest.mark.tier(2)
def test_bottlenecks_report_event_groups(temp_appliance_extended_db, db_restore, db_tbl, db_events):
""" Checks event_groups selectbox in report tab. It should filter events by type """
with temp_appliance_extended_db:
view = navigate_to(Bottlenecks, 'All')
# Enabling this option to show all possible values
view.report.show_host_events.fill(True)
view.report.event_groups.fill('Capacity')
rows = view.report.event_details.rows()
# Compare number of rows in bottleneck's table with number of rows in db
assert sum(1 for row in rows) == db_events.filter(db_tbl.event_type == 'DiskUsage').count()
view.report.event_groups.fill('Utilization')
rows = view.report.event_details.rows()
assert sum(1 for row in rows) == db_events.filter(db_tbl.event_type != 'DiskUsage').count()
@pytest.mark.tier(2)
def test_bottlenecks_report_show_host_events(temp_appliance_extended_db, db_restore, db_events):
""" Checks host_events checkbox in report tab. It should show or not host events """
with temp_appliance_extended_db:
view = navigate_to(Bottlenecks, 'All')
view.report.show_host_events.fill(False)
rows = view.report.event_details.rows(type='Host / Node')
# Checking that rows with value 'Host / Node' absent in table
assert not sum(1 for row in rows)
view.report.show_host_events.fill(True)
rows = view.report.event_details.rows()
# Compare number of rows in bottleneck's table with number of rows in db
assert sum(1 for row in rows) == db_events.count()
@pytest.mark.tier(2)
def test_bottlenecks_report_time_zone(temp_appliance_extended_db, db_restore, db_tbl, db_events):
""" Checks time zone selectbox in report tab. It should change time zone of events in table """
with temp_appliance_extended_db:
view = navigate_to(Bottlenecks, 'All')
row = view.report.event_details[0]
# Selecting row by uniq value
db_row = db_events.filter(db_tbl.message == row[5].text)
# Compare bottleneck's table timestamp with db
assert row[0].text == db_row[0][0].strftime(parsetime.american_with_utc_format)
# Changing time zone
view.report.time_zone.fill('(GMT-04:00) La Paz')
row = view.report.event_details[0]
assert row[0].text == (db_row[0][0] - timedelta(hours=4)).strftime("%m/%d/%y %H:%M:%S -04")
@pytest.mark.meta(blockers=[BZ(1507565, forced_streams=["5.8"])])
@pytest.mark.tier(2)
def test_bottlenecks_summary_event_groups(temp_appliance_extended_db, db_restore, db_tbl,
db_events):
""" Checks event_groups selectbox in summary tab. It should filter events by type """
with temp_appliance_extended_db:
view = navigate_to(Bottlenecks, 'All')
# Enabling this option to show all possible values
view.summary.show_host_events.fill(True)
view.summary.event_groups.fill('Capacity')
events = view.summary.chart.get_events()
# Compare number of events in chart with number of rows in db
assert len(events) == db_events.filter(db_tbl.event_type == 'DiskUsage').count()
view.summary.event_groups.fill('Utilization')
events = view.summary.chart.get_events()
assert len(events) == db_events.filter(db_tbl.event_type != 'DiskUsage').count()
@pytest.mark.tier(2)
def test_bottlenecks_summary_show_host_events(temp_appliance_extended_db, db_restore, db_events):
""" Checks host_events checkbox in summary tab. It should show or not host events """
with temp_appliance_extended_db:
view = navigate_to(Bottlenecks, 'All')
view.summary.show_host_events.fill(False)
# Checking that events with value 'Host / Node' absent in table
events = view.summary.chart.get_events()
assert not sum(1 for event in events if event.type == 'Host')
view.summary.show_host_events.fill(True)
events = view.summary.chart.get_events()
# Compare number of events in chart with number of rows in db
assert len(events) == db_events.count()
@pytest.mark.tier(2)
def test_bottlenecks_summary_time_zone(temp_appliance_extended_db, db_restore, db_tbl, db_events):
""" Checks time zone selectbox in summary tab. It should change time zone of events in chart """
with temp_appliance_extended_db:
view = navigate_to(Bottlenecks, 'All')
events = view.summary.chart.get_events()
# Selecting row by uniq value
db_row = db_events.filter(db_tbl.message == events[0].message)
# Compare event timestamp with db
assert events[0].time_stamp == db_row[0][0].strftime(parsetime.iso_with_utc_format)
# Changing time zone
view.summary.time_zone.fill('(GMT-04:00) La Paz')
events = view.summary.chart.get_events()
assert events[0].time_stamp == (db_row[0][0] - timedelta(hours=4)).strftime("%Y-%m-%d "
"%H:%M:%S -04")
| gpl-2.0 | -5,769,613,551,466,446,000 | 46.789157 | 100 | 0.662801 | false |
gdreich/geonode | geonode/services/models.py | 6 | 6197 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import logging
from django.conf import settings
from django.db import models
from geoserver.catalog import FailedRequestError, Catalog
from geonode.base.models import ResourceBase
from geonode.services.enumerations import SERVICE_TYPES, SERVICE_METHODS, GXP_PTYPES
from geonode.layers.models import Layer
from django.utils.translation import ugettext_lazy as _
from django.db.models import signals
from geonode.people.enumerations import ROLE_VALUES
from geonode.security.models import remove_object_permissions
STATUS_VALUES = [
'pending',
'failed',
'process'
]
logger = logging.getLogger("geonode.services")
"""
geonode.services
"""
class Service(ResourceBase):
"""
Service Class to represent remote Geo Web Services
"""
type = models.CharField(max_length=4, choices=SERVICE_TYPES)
method = models.CharField(max_length=1, choices=SERVICE_METHODS)
# with service, version and request etc stripped off
base_url = models.URLField(unique=True, db_index=True)
version = models.CharField(max_length=10, null=True, blank=True)
# Should force to slug?
name = models.CharField(max_length=255, unique=True, db_index=True)
description = models.CharField(max_length=255, null=True, blank=True)
online_resource = models.URLField(False, null=True, blank=True)
fees = models.CharField(max_length=1000, null=True, blank=True)
access_constraints = models.CharField(max_length=255, null=True, blank=True)
connection_params = models.TextField(null=True, blank=True)
username = models.CharField(max_length=50, null=True, blank=True)
password = models.CharField(max_length=50, null=True, blank=True)
api_key = models.CharField(max_length=255, null=True, blank=True)
workspace_ref = models.URLField(False, null=True, blank=True)
store_ref = models.URLField(null=True, blank=True)
resources_ref = models.URLField(null=True, blank=True)
profiles = models.ManyToManyField(
settings.AUTH_USER_MODEL, through='ServiceProfileRole')
created = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
first_noanswer = models.DateTimeField(null=True, blank=True)
noanswer_retries = models.PositiveIntegerField(null=True, blank=True)
external_id = models.IntegerField(null=True, blank=True)
parent = models.ForeignKey(
'services.Service', null=True, blank=True, related_name='service_set')
# Supported Capabilities
def __unicode__(self):
return self.name
@property
def ptype(self):
# Return the gxp ptype that should be used to display layers
return GXP_PTYPES[self.type]
def get_absolute_url(self):
return '/services/%i' % self.id
class Meta(ResourceBase.Meta):
pass
class ServiceProfileRole(models.Model):
"""
ServiceProfileRole is an intermediate model to bind Profiles and Services and apply roles.
"""
profiles = models.ForeignKey(settings.AUTH_USER_MODEL)
service = models.ForeignKey(Service)
role = models.CharField(choices=ROLE_VALUES, max_length=255, help_text=_(
'function performed by the responsible party'))
class ServiceLayer(models.Model):
service = models.ForeignKey(Service)
layer = models.ForeignKey(Layer, null=True)
typename = models.CharField(_("Layer Name"), max_length=255)
title = models.CharField(_("Layer Title"), max_length=512)
description = models.TextField(_("Layer Description"), null=True)
styles = models.TextField(_("Layer Styles"), null=True)
class WebServiceHarvestLayersJob(models.Model):
service = models.OneToOneField(Service, blank=False, null=False)
status = models.CharField(choices=[(
x, x) for x in STATUS_VALUES], max_length=10, blank=False, null=False, default='pending')
class WebServiceRegistrationJob(models.Model):
base_url = models.URLField(unique=True)
type = models.CharField(max_length=4, choices=SERVICE_TYPES)
status = models.CharField(choices=[(
x, x) for x in STATUS_VALUES], max_length=10, blank=False, null=False, default='pending')
def post_save_service(instance, sender, created, **kwargs):
if created:
instance.set_default_permissions()
def pre_delete_service(instance, sender, **kwargs):
for layer in instance.layer_set.all():
layer.delete()
# if instance.method == 'H':
# gn = Layer.objects.gn_catalog
# gn.control_harvesting_task('stop', [instance.external_id])
# gn.control_harvesting_task('remove', [instance.external_id])
if instance.method == 'C':
try:
_user = settings.OGC_SERVER['default']['USER']
_password = settings.OGC_SERVER['default']['PASSWORD']
gs = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
_user, _password)
cascade_store = gs.get_store(
instance.name, settings.CASCADE_WORKSPACE)
gs.delete(cascade_store, recurse=True)
except FailedRequestError:
logger.error(
"Could not delete cascading WMS Store for %s - maybe already gone" % instance.name)
remove_object_permissions(instance.get_self_resource())
signals.pre_delete.connect(pre_delete_service, sender=Service)
signals.post_save.connect(post_save_service, sender=Service)
| gpl-3.0 | -1,489,865,166,235,389,400 | 37.974843 | 99 | 0.688882 | false |
neumerance/cloudloon2 | .venv/lib/python2.7/site-packages/lxml/html/soupparser.py | 53 | 4360 | __doc__ = """External interface to the BeautifulSoup HTML parser.
"""
__all__ = ["fromstring", "parse", "convert_tree"]
from lxml import etree, html
from BeautifulSoup import \
BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString
def fromstring(data, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a string of HTML data into an Element tree using the
BeautifulSoup parser.
Returns the root ``<html>`` Element of the tree.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
return _parse(data, beautifulsoup, makeelement, **bsargs)
def parse(file, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a file into an ElemenTree using the BeautifulSoup parser.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
if not hasattr(file, 'read'):
file = open(file)
root = _parse(file, beautifulsoup, makeelement, **bsargs)
return etree.ElementTree(root)
def convert_tree(beautiful_soup_tree, makeelement=None):
"""Convert a BeautifulSoup tree to a list of Element trees.
Returns a list instead of a single root Element to support
HTML-like soup with more than one root element.
You can pass a different Element factory through the `makeelement`
keyword.
"""
if makeelement is None:
makeelement = html.html_parser.makeelement
root = _convert_tree(beautiful_soup_tree, makeelement)
children = root.getchildren()
for child in children:
root.remove(child)
return children
# helpers
def _parse(source, beautifulsoup, makeelement, **bsargs):
if beautifulsoup is None:
beautifulsoup = BeautifulSoup
if makeelement is None:
makeelement = html.html_parser.makeelement
if 'convertEntities' not in bsargs:
bsargs['convertEntities'] = 'html'
tree = beautifulsoup(source, **bsargs)
root = _convert_tree(tree, makeelement)
# from ET: wrap the document in a html root element, if necessary
if len(root) == 1 and root[0].tag == "html":
return root[0]
root.tag = "html"
return root
def _convert_tree(beautiful_soup_tree, makeelement):
root = makeelement(beautiful_soup_tree.name,
attrib=dict(beautiful_soup_tree.attrs))
_convert_children(root, beautiful_soup_tree, makeelement)
return root
def _convert_children(parent, beautiful_soup_tree, makeelement):
SubElement = etree.SubElement
et_child = None
for child in beautiful_soup_tree:
if isinstance(child, Tag):
et_child = SubElement(parent, child.name, attrib=dict(
[(k, unescape(v)) for (k,v) in child.attrs]))
_convert_children(et_child, child, makeelement)
elif type(child) is NavigableString:
_append_text(parent, et_child, unescape(child))
else:
if isinstance(child, Comment):
parent.append(etree.Comment(child))
elif isinstance(child, ProcessingInstruction):
parent.append(etree.ProcessingInstruction(
*child.split(' ', 1)))
else: # CData
_append_text(parent, et_child, unescape(child))
def _append_text(parent, element, text):
if element is None:
parent.text = (parent.text or '') + text
else:
element.tail = (element.tail or '') + text
# copied from ET's ElementSoup
try:
from html.entities import name2codepoint # Python 3
except ImportError:
from htmlentitydefs import name2codepoint
import re
handle_entities = re.compile("&(\w+);").sub
def unescape(string):
if not string:
return ''
# work around oddities in BeautifulSoup's entity handling
def unescape_entity(m):
try:
return unichr(name2codepoint[m.group(1)])
except KeyError:
return m.group(0) # use as is
return handle_entities(unescape_entity, string)
| apache-2.0 | 2,575,153,873,812,638,700 | 33.88 | 72 | 0.668578 | false |
MyAOSP/external_chromium_org | tools/json_schema_compiler/dart_generator_test.py | 25 | 2388 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
from compiler import GenerateSchema
# If --rebase is passed to this test, this is set to True, indicating the test
# output should be re-generated for each test (rather than running the tests
# themselves).
REBASE_MODE = False
# The directory containing the input and expected output files corresponding
# to each test name.
TESTS_DIR = 'dart_test'
class DartTest(unittest.TestCase):
def _RunTest(self, test_filename):
'''Given the name of a test, runs compiler.py on the file:
TESTS_DIR/test_filename.idl
and compares it to the output in the file:
TESTS_DIR/test_filename.dart
'''
file_rel = os.path.join(TESTS_DIR, test_filename)
output_dir = None
if REBASE_MODE:
output_dir = TESTS_DIR
output_code = GenerateSchema('dart', ['%s.idl' % file_rel], TESTS_DIR,
output_dir, None, None)
if not REBASE_MODE:
with open('%s.dart' % file_rel) as f:
expected_output = f.read()
# Remove the first line of the output code (as it contains the filename).
# Also remove all blank lines, ignoring them from the comparison.
# Compare with lists instead of strings for clearer diffs (especially with
# whitespace) when a test fails.
self.assertEqual([l for l in expected_output.split('\n') if l],
[l for l in output_code.split('\n')[1:] if l])
def setUp(self):
# Increase the maximum diff amount to see the full diff on a failed test.
self.maxDiff = 2000
def testComments(self):
self._RunTest('comments')
def testDictionaries(self):
self._RunTest('dictionaries')
def testEmptyNamespace(self):
self._RunTest('empty_namespace')
def testEmptyType(self):
self._RunTest('empty_type')
def testEvents(self):
self._RunTest('events')
def testBasicFunction(self):
self._RunTest('functions')
def testOpratableType(self):
self._RunTest('operatable_type')
def testTags(self):
self._RunTest('tags')
if __name__ == '__main__':
if '--rebase' in sys.argv:
print "Running in rebase mode."
REBASE_MODE = True
sys.argv.remove('--rebase')
unittest.main()
| bsd-3-clause | -2,191,838,094,733,130,500 | 28.481481 | 80 | 0.668342 | false |
NL66278/odoo | addons/account_cancel/__openerp__.py | 52 | 1667 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Cancel Journal Entries',
'version': '1.1',
'author': 'OpenERP SA',
'category': 'Accounting & Finance',
'description': """
Allows canceling accounting entries.
====================================
This module adds 'Allow Canceling Entries' field on form view of account journal.
If set to true it allows user to cancel entries & invoices.
""",
'website': 'https://www.odoo.com/page/accounting',
'images': ['images/account_cancel.jpeg'],
'depends' : ['account'],
'data': ['account_cancel_view.xml' ],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,223,729,447,985,070,000 | 37.767442 | 81 | 0.602879 | false |
samthor/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/geos/prototypes/errcheck.py | 623 | 3522 | """
Error checking functions for GEOS ctypes prototype functions.
"""
import os
from ctypes import c_void_p, string_at, CDLL
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.libgeos import GEOS_VERSION
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
# Getting the `free` routine used to free the memory allocated for
# string pointers returned by GEOS.
if GEOS_VERSION >= (3, 1, 1):
# In versions 3.1.1 and above, `GEOSFree` was added to the C API
# because `free` isn't always available on all platforms.
free = GEOSFunc('GEOSFree')
free.argtypes = [c_void_p]
free.restype = None
else:
# Getting the `free` routine from the C library of the platform.
if os.name == 'nt':
# On NT, use the MS C library.
libc = CDLL('msvcrt')
else:
# On POSIX platforms C library is obtained by passing None into `CDLL`.
libc = CDLL(None)
free = libc.free
### ctypes error checking routines ###
def last_arg_byref(args):
"Returns the last C argument's value by reference."
return args[-1]._obj.value
def check_dbl(result, func, cargs):
"Checks the status code and returns the double value passed in by reference."
# Checking the status code
if result != 1: return None
# Double passed in by reference, return its value.
return last_arg_byref(cargs)
def check_geom(result, func, cargs):
"Error checking on routines that return Geometries."
if not result:
raise GEOSException('Error encountered checking Geometry returned from GEOS C function "%s".' % func.__name__)
return result
def check_minus_one(result, func, cargs):
"Error checking on routines that should not return -1."
if result == -1:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
def check_predicate(result, func, cargs):
"Error checking for unary/binary predicate functions."
val = ord(result) # getting the ordinal from the character
if val == 1: return True
elif val == 0: return False
else:
raise GEOSException('Error encountered on GEOS C predicate function "%s".' % func.__name__)
def check_sized_string(result, func, cargs):
"""
Error checking for routines that return explicitly sized strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result:
raise GEOSException('Invalid string pointer returned by GEOS C function "%s"' % func.__name__)
# A c_size_t object is passed in by reference for the second
# argument on these routines, and its needed to determine the
# correct size.
s = string_at(result, last_arg_byref(cargs))
# Freeing the memory allocated within GEOS
free(result)
return s
def check_string(result, func, cargs):
"""
Error checking for routines that return strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result: raise GEOSException('Error encountered checking string return value in GEOS C function "%s".' % func.__name__)
# Getting the string value at the pointer address.
s = string_at(result)
# Freeing the memory allocated within GEOS
free(result)
return s
def check_zero(result, func, cargs):
"Error checking on routines that should not return 0."
if result == 0:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
| apache-2.0 | -7,250,681,628,632,668,000 | 36.073684 | 129 | 0.684838 | false |
dimid/ansible-modules-extras | cloud/amazon/ec2_group_facts.py | 12 | 5047 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_group_facts
short_description: Gather facts about ec2 security groups in AWS.
description:
- Gather facts about ec2 security groups in AWS.
version_added: "2.3"
author: "Henrique Rodrigues (github.com/Sodki)"
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See \
U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroups.html) for \
possible filters. Filter names and values are case sensitive. You can also use underscores (_) \
instead of dashes (-) in the filter keys, which will take precedence in case of conflict.
required: false
default: {}
notes:
- By default, the module will return all security groups. To limit results use the appropriate filters.
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Gather facts about all security groups
- ec2_group_facts:
# Gather facts about all security groups in a specific VPC
- ec2_group_facts:
filters:
vpc-id: vpc-12345678
# Gather facts about all security groups in a specific VPC
- ec2_group_facts:
filters:
vpc-id: vpc-12345678
# Gather facts about a security group
- ec2_group_facts:
filters:
group-name: example-1
# Gather facts about a security group by id
- ec2_group_facts:
filters:
group-id: sg-12345678
# Gather facts about a security group with multiple filters, also mixing the use of underscores as filter keys
- ec2_group_facts:
filters:
group_id: sg-12345678
vpc-id: vpc-12345678
# Gather facts about various security groups
- ec2_group_facts:
filters:
group-name:
- example-1
- example-2
- example-3
# Gather facts about any security group with a tag key Name and value Example. The quotes around 'tag:name' are important because of the colon in the value
- ec2_group_facts:
filters:
"tag:Name": Example
'''
RETURN = '''
security_groups:
description: Security groups that match the provided filters. Each element consists of a dict with all the information related to that security group.
type: list
sample:
'''
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = Falsentry
import traceback
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
filters=dict(default={}, type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(
module,
conn_type='client',
resource='ec2',
region=region,
endpoint=ec2_url,
**aws_connect_params
)
else:
module.fail_json(msg="region must be specified")
# Replace filter key underscores with dashes, for compatibility, except if we're dealing with tags
sanitized_filters = module.params.get("filters")
for key in sanitized_filters:
if not key.startswith("tag:"):
sanitized_filters[key.replace("_", "-")] = sanitized_filters.pop(key)
try:
security_groups = connection.describe_security_groups(
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
)
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(e))
# Turn the boto3 result in to ansible_friendly_snaked_names
snaked_security_groups = []
for security_group in security_groups['SecurityGroups']:
snaked_security_groups.append(camel_dict_to_snake_dict(security_group))
# Turn the boto3 result in to ansible friendly tag dictionary
for security_group in snaked_security_groups:
if 'tags' in security_group:
security_group['tags'] = boto3_tag_list_to_ansible_dict(security_group['tags'])
module.exit_json(security_groups=snaked_security_groups)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 | -4,173,176,529,530,374,000 | 29.96319 | 155 | 0.690113 | false |
sodexis/odoo | addons/account_analytic_plans/wizard/analytic_plan_create_model.py | 384 | 2829 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class analytic_plan_create_model(osv.osv_memory):
_name = "analytic.plan.create.model"
_description = "analytic.plan.create.model"
def activate(self, cr, uid, ids, context=None):
plan_obj = self.pool.get('account.analytic.plan.instance')
mod_obj = self.pool.get('ir.model.data')
anlytic_plan_obj = self.pool.get('account.analytic.plan')
if context is None:
context = {}
if 'active_id' in context and context['active_id']:
plan = plan_obj.browse(cr, uid, context['active_id'], context=context)
if (not plan.name) or (not plan.code):
raise osv.except_osv(_('Error!'), _('Please put a name and a code before saving the model.'))
pids = anlytic_plan_obj.search(cr, uid, [], context=context)
if not pids:
raise osv.except_osv(_('Error!'), _('There is no analytic plan defined.'))
plan_obj.write(cr, uid, [context['active_id']], {'plan_id':pids[0]}, context=context)
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'),('name', '=', 'view_analytic_plan_create_model')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'name': _('Distribution Model Saved'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'analytic.plan.create.model',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
else:
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,207,221,363,995,383,300 | 46.949153 | 150 | 0.57865 | false |
isidorn/test2 | test/rdb_workloads/stress.py | 1 | 12326 | #!/usr/bin/python
import sys, os, time, signal, random, string, subprocess
from tempfile import NamedTemporaryFile
from optparse import OptionParser
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'drivers', 'python')))
import rethinkdb as r
client_script = os.path.join(os.path.dirname(__file__), "stress_client.py")
parser = OptionParser()
parser.add_option("--table", dest="db_table", metavar="DB.TABLE", default="", type="string")
parser.add_option("--timeout", dest="timeout", metavar="SECONDS", default=60, type="int")
parser.add_option("--clients", dest="clients", metavar="CLIENTS", default=64, type="int")
parser.add_option("--batch-size", dest="batch_size", metavar="BATCH_SIZE", default=100, type="int")
parser.add_option("--value-size", dest="value_size", metavar="VALUE_SIZE", default=4, type="int")
parser.add_option("--workload", dest="workload", metavar="WRITES/DELETES/READS/SINDEX_READS/UPDATES/NON_ATOMIC_UPDATES", default="3/2/5/0/1/1", type="string")
parser.add_option("--host", dest="hosts", metavar="HOST:PORT", action="append", default=[], type="string")
parser.add_option("--add-sindex", dest="sindexes", metavar="constant | simple | complex | long", action="append", default=[], type="string")
(options, args) = parser.parse_args()
if len(args) != 0:
raise RuntimeError("No positional arguments supported")
# Parse out host/port pairs
hosts = [ ]
for host_port in options.hosts:
(host, port) = host_port.split(":")
hosts.append((host, int(port)))
if len(hosts) == 0:
raise RuntimeError("No rethinkdb host specified")
# Parse out and verify secondary indexes
sindexes = [ ]
for sindex in options.sindexes:
if sindex not in ["constant", "simple", "complex", "long"]:
raise RuntimeError("sindex type not recognized: " + sindex)
sindexes.append(sindex)
# Parse out workload info - probably an easier way to do this
workload = { }
workload_defaults = [("--writes", 3),
("--deletes", 2),
("--reads", 5),
("--sindex-reads", 0),
("--updates", 1),
("--non-atomic-updates", 1)]
workload_types = [item[0] for item in workload_defaults]
workload_values = options.workload.split("/")
if len(workload_values) > len(workload_types):
raise RuntimeError("Too many workload values specified")
workload_values.extend([0 for i in range(len(workload_types) - len(workload_values))])
for op, value in zip(workload_types, workload_values):
workload[op] = str(value)
for op, value in workload_defaults:
if op not in workload.keys():
workload[op] = str(value)
clients = [ ]
output_files = [ ]
def collect_and_print_results():
global output_files
# Read in each file so that we have a per-client array containing a
# dict of timestamps to dicts of op-names: op-counts
# Format is "<time>[,<op_type>,<op_count>,<op_errs>,<avg_duration>]...
results_per_client = [ ]
errors = { }
for f in output_files:
file_data = { }
for line in f:
split_line = line.strip().split(",")
op_counts = { }
op_durations = { }
if split_line[0] == "ERROR":
key = split_line[1].strip()
errors[key] = errors.get(key, 0) + 1
else:
timestamp = float(split_line[0])
for (op_name, op_count, err_count, avg_dur) in zip(split_line[1::4], split_line[2::4], split_line[3::4], split_line[4::4]):
op_counts[op_name] = (int(op_count), int(err_count))
op_durations[op_name] = int(float(avg_dur) * 1000)
file_data[timestamp] = op_counts
results_per_client.append(file_data)
# Until we do some real analysis on the results, just get ops/sec for each client
total_per_client = [ ]
averages = [ ]
durations = [ ]
ignored_results = 0
for results in results_per_client:
if len(results) < 2:
ignored_results += 1
else:
keys = sorted(results.keys())
duration = keys[-1] - keys[0]
accumulator = { }
for (timestamp, counts) in results.items():
accumulator = dict((op, map(sum, zip(accumulator.get(op, (0, 0)), counts.get(op, (0, 0))))) for op in set(accumulator) | set(counts))
total_per_client.append(accumulator)
averages.append(dict((op, accumulator.get(op, (0, 0))[0] / (duration)) for op in accumulator.keys()))
durations.append(duration)
if ignored_results > 0:
print "Ignored %d client results due to insufficient data" % ignored_results
# Get the total number of ops of each type
total_op_counts = { }
for client_data in total_per_client:
total_op_counts = dict((op, map(sum, zip(total_op_counts.get(op, (0, 0)), client_data.get(op, (0, 0))))) for op in set(client_data) | set(total_op_counts))
# Add up all the client averages for the total ops/sec
total = { }
min_ops_per_sec = { }
max_ops_per_sec = { }
for average in averages:
total = dict((op, total.get(op, 0) + average.get(op, 0)) for op in set(total) | set(average))
# Get the lowest and highest per-client ops/sec
min_ops_per_sec = dict((op, min(min_ops_per_sec.get(op, 10000000), average.get(op))) for op in set(min_ops_per_sec) | set(average))
max_ops_per_sec = dict((op, max(max_ops_per_sec.get(op, 0), average.get(op))) for op in set(max_ops_per_sec) | set(average))
if len(durations) < 1:
print "Not enough data for results"
else:
print "Duration: " + str(int(max(durations))) + " seconds"
print "\nOperations data: "
table = [["op type", "successes", "per sec min", "per sec max", "per sec total", "errors", "avg duration"]]
for op in total.keys():
table.append([op, str(total_op_counts[op][0]), str(int(min_ops_per_sec[op])), str(int(max_ops_per_sec[op])), str(int(total[op])), str(total_op_counts[op][1]), "-"])
column_widths = []
for i in range(len(table[0])):
column_widths.append(max([len(row[i]) + 2 for row in table]))
format_str = ("{:<%d}" + ("{:>%d}" * (len(column_widths) - 1))) % tuple(column_widths)
for row in table:
print format_str.format(*row)
if len(errors) != 0:
print "\nErrors encountered:"
for error in errors:
print "%s: %s" % (error, errors[error])
def finish_stress():
global clients
print "Stopping client processes..."
[client.send_signal(signal.SIGINT) for client in clients if client.poll() is None]
# Wait up to 5s for clients to exit
end_time = time.time() + 5
while len(clients) > 0 and time.time() < end_time:
time.sleep(0.1)
clients = [client for client in clients if client.poll() is None]
# Kill any remaining clients
[client.terminate() for client in clients]
collect_and_print_results()
def interrupt_handler(signal, frame):
print "Interrupted"
finish_stress()
exit(0)
def complex_sindex_fn(row, db, table):
return r.expr([row["value"]]).concat_map(lambda item: [item, item, item, item]) \
.concat_map(lambda item: [item, item, item, item]) \
.concat_map(lambda item: [item, item, item, item]) \
.concat_map(lambda item: [item, item, item, item]) \
.concat_map(lambda item: [item, item, item, item]) \
.concat_map(lambda item: [item, item, item, item]) \
.reduce(lambda acc, val: acc + val, 0)
return 1
def long_sindex_fn(row):
result = []
for i in range(32):
denom = 2 ** i
result.insert(0, r.branch(((row["value"] / denom) % 2) == 0, "zero", "one"))
return result
def initialize_sindexes(sindexes, connection, db, table):
# This assumes sindexes are never deleted
# if they are and a table is loaded, there could be problems
sindex_count = len(r.db(db).table(table).index_list().run(connection))
for sindex in sindexes:
# Sindexes are named as their type of sindex (below) plus a unique number
sindex_name = sindex + str(sindex_count)
sindex_count += 1
sindex_fn = None
if sindex == "constant":
sindex_fn = lambda x: 1
elif sindex == "simple":
sindex_fn = lambda x: r.branch(x["value"] % 2 == 0, "odd", "even")
elif sindex == "complex":
sindex_fn = lambda x: complex_sindex_fn(x, db, table)
elif sindex == "long":
sindex_fn = long_sindex_fn
else:
raise RuntimeError("Unknown sindex type")
print "Adding sindex '%s'..." % sindex_name
r.db(db).table(table).index_create(sindex_name, sindex_fn).run(connection)
# Get table name, and make sure it exists on the server
if len(options.db_table) == 0:
print "Creating table..."
random.seed()
table = "stress_" + "".join(random.sample(string.letters + string.digits, 10))
db = "test"
with r.connect(hosts[0][0], hosts[0][1]) as connection:
if db not in r.db_list().run(connection):
r.db_create(db).run(connection)
while table in r.db(db).table_list().run(connection):
table = "stress_" + "".join(random.sample(string.letters + string.digits, 10))
r.db(db).table_create(table).run(connection)
initialize_sindexes(sindexes, connection, db, table)
else:
# User-specified table
if "." not in options.db_table:
raise RuntimeError("Incorrect db.table format in --table option")
(db, table) = options.db_table.split(".")
with r.connect(hosts[0][0], hosts[0][1]) as connection:
if db not in r.db_list().run(connection):
r.db_create(db).run(connection)
if table not in r.db(db).table_list().run(connection):
r.db(db).table_create(table).run(connection)
initialize_sindexes(sindexes, connection, db, table)
# TODO: load existing keys, distribute them among clients
# TODO: fill out keys so that all keys are contiguous (for use by clients) - may be tricky
# Build up arg list for client processes
client_args = [client_script]
for (op, value) in workload.items():
client_args.extend([op, value])
for sindex in sindexes:
client_args.extend(["--sindex", sindex])
client_args.extend(["--value-size", str(options.value_size)])
client_args.extend(["--batch-size", str(options.batch_size)])
client_args.extend(["--table", db + "." + table])
# Register interrupt, now that we're spawning client processes
signal.signal(signal.SIGINT, interrupt_handler)
print "Launching client processes..."
count_width = len(str(options.clients))
progress_format = "\r[%%%dd/%%%dd]" % (count_width, count_width)
done_format = "\r[%%%ds]" % (count_width * 2 + 1) % "DONE"
# Launch all the client processes
for i in range(options.clients):
print (progress_format % (i, options.clients)),
sys.stdout.flush()
output_file = NamedTemporaryFile()
host, port = hosts[i % len(hosts)]
current_args = list(client_args)
current_args.extend(["--host", host + ":" + str(port)])
current_args.extend(["--output", output_file.name])
client = subprocess.Popen(current_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
clients.append(client)
output_files.append(output_file)
print done_format
print "Waiting for clients to connect..."
for i in range(options.clients):
print (progress_format % (i, options.clients)),
sys.stdout.flush()
if clients[i].stdout.readline().strip() != "ready":
raise RuntimeError("unexpected client output")
print done_format
print "Running traffic..."
for client in clients:
client.stdin.write("go\n")
client.stdin.flush()
# Wait for timeout or interrupt
end_time = time.time() + options.timeout
while time.time() < end_time:
time.sleep(1)
# Check to see if all the clients have exited (perhaps due to the cluster going down)
if not any([client.poll() == None for client in clients]):
print "All clients have exited prematurely"
break
finish_stress()
| agpl-3.0 | -8,497,769,647,382,522,000 | 40.362416 | 176 | 0.612689 | false |
zlsun/XX-Net | code/default/python27/1.0/lib/win32/cryptography/hazmat/primitives/kdf/concatkdf.py | 58 | 4109 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import struct
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.backends.interfaces import HashBackend
from cryptography.hazmat.primitives import constant_time, hashes, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
def _int_to_u32be(n):
return struct.pack('>I', n)
def _common_args_checks(algorithm, length, otherinfo):
max_length = algorithm.digest_size * (2 ** 32 - 1)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} bits.".format(
max_length
))
if not (otherinfo is None or isinstance(otherinfo, bytes)):
raise TypeError("otherinfo must be bytes.")
def _concatkdf_derive(key_material, length, auxfn, otherinfo):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
output = [b""]
outlen = 0
counter = 1
while (length > outlen):
h = auxfn()
h.update(_int_to_u32be(counter))
h.update(key_material)
h.update(otherinfo)
output.append(h.finalize())
outlen += len(output[-1])
counter += 1
return b"".join(output)[:length]
@utils.register_interface(KeyDerivationFunction)
class ConcatKDFHash(object):
def __init__(self, algorithm, length, otherinfo, backend):
_common_args_checks(algorithm, length, otherinfo)
self._algorithm = algorithm
self._length = length
self._otherinfo = otherinfo
if self._otherinfo is None:
self._otherinfo = b""
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._backend = backend
self._used = False
def _hash(self):
return hashes.Hash(self._algorithm, self._backend)
def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
return _concatkdf_derive(key_material, self._length,
self._hash, self._otherinfo)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class ConcatKDFHMAC(object):
def __init__(self, algorithm, length, salt, otherinfo, backend):
_common_args_checks(algorithm, length, otherinfo)
self._algorithm = algorithm
self._length = length
self._otherinfo = otherinfo
if self._otherinfo is None:
self._otherinfo = b""
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * algorithm.block_size
self._salt = salt
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._backend = backend
self._used = False
def _hmac(self):
return hmac.HMAC(self._salt, self._algorithm, self._backend)
def derive(self, key_material):
if self._used:
raise AlreadyFinalized
self._used = True
return _concatkdf_derive(key_material, self._length,
self._hmac, self._otherinfo)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
| bsd-2-clause | -6,185,853,059,870,201,000 | 31.872 | 79 | 0.634218 | false |
RackHD-Mirror/RackHD | test/tests/api/v2_0/schema_tests.py | 16 | 2321 | from config.api2_0_config import *
from modules.logger import Log
from on_http_api2_0 import ApiApi as api20
from on_http_api2_0 import rest
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_not_equal
from proboscis.asserts import assert_true
from proboscis.asserts import fail
from proboscis import test
from json import loads,dumps
LOG = Log(__name__)
@test(groups=['schemas_api2.tests'])
class SchemaTests(object):
def __init__(self):
self.__client = config.api_client
self.__schemaList = None
def __get_data(self):
return loads(self.__client.last_response.data)
@test(groups=['2.0.list_schemas'])
def test_list_schemas(self):
""" Testing GET /api/2.0/schemas """
api20().schemas_get()
schemas = self.__get_data()
LOG.debug(schemas,json=True)
assert_not_equal(0, len(schemas), message='Schema list was empty')
self.__schemaList = schemas
@test(groups=['2.0.get_schema'], depends_on_groups=['2.0.list_schemas'])
def test_get_schema(self):
""" Testing GET /api/2.0/schemas/{identifier} """
assert_not_equal(None, self.__schemaList)
for member in self.__schemaList:
assert_not_equal(None,member)
dataId = member.split('/api/2.0/schemas/')[1]
api20().schemas_id_get(dataId)
schema_ref = self.__get_data()
LOG.debug(schema_ref,json=True)
id = schema_ref.get('title')
assert_true('title' in schema_ref.keys(), message='title not found in schema')
assert_true('definitions' in schema_ref.keys(), message='definitions not found in schema')
@test(groups=['2.0.get_schema_invalid'], depends_on_groups=['2.0.list_schemas'])
def test_get_schema_invalid(self):
""" Testing GET /api/2.0/schemas/{identifier} 404s properly """
assert_not_equal(None, self.__schemaList)
for member in self.__schemaList:
assert_not_equal(None,member)
try:
api20().schemas_id_get(member + '-invalid')
fail(message='did not raise exception')
except rest.ApiException as e:
assert_equal(404, e.status, message='unexpected response {0}, expected 404'.format(e.status))
break
| apache-2.0 | 7,457,309,283,560,259,000 | 39.017241 | 109 | 0.628608 | false |
sayan801/indivo_server | indivo/migrations/0016_auto__del_field_document_type.py | 3 | 52062 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Document.type'
db.delete_column('indivo_document', 'type_id')
def backwards(self, orm):
# Adding field 'Document.type'
db.add_column('indivo_document', 'type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['indivo.DocumentSchema'], null=True), keep_default=False)
models = {
'indivo.accesstoken': {
'Meta': {'object_name': 'AccessToken', '_ormbases': ['indivo.Principal']},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']", 'null': 'True'}),
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']", 'null': 'True'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'principal_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Principal']", 'unique': 'True', 'primary_key': 'True'}),
'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.PHAShare']"}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
'indivo.account': {
'Meta': {'object_name': 'Account', '_ormbases': ['indivo.Principal']},
'account': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Principal']", 'unique': 'True', 'primary_key': 'True'}),
'contact_email': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'failed_login_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'last_failed_login_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'last_login_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'last_state_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'primary_secret': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True'}),
'secondary_secret': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'uninitialized'", 'max_length': '50'}),
'total_login_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'indivo.accountauthsystem': {
'Meta': {'unique_together': "(('auth_system', 'account'), ('auth_system', 'username'))", 'object_name': 'AccountAuthSystem'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_systems'", 'to': "orm['indivo.Account']"}),
'auth_parameters': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True'}),
'auth_system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.AuthSystem']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'accountauthsystem_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'user_attributes': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
'indivo.accountfullshare': {
'Meta': {'unique_together': "(('record', 'with_account'),)", 'object_name': 'AccountFullShare'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'accountfullshare_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fullshares'", 'to': "orm['indivo.Record']"}),
'role_label': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'with_account': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fullshares_to'", 'to': "orm['indivo.Account']"})
},
'indivo.allergy': {
'Meta': {'object_name': 'Allergy', '_ormbases': ['indivo.Fact']},
'allergen_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'allergen_name_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'allergen_name_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'allergen_name_value': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'allergen_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'allergen_type_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'allergen_type_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'allergen_type_value': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'date_diagnosed': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'diagnosed_by': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'reaction': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'specifics': ('django.db.models.fields.TextField', [], {'null': 'True'})
},
'indivo.audit': {
'Meta': {'object_name': 'Audit'},
'carenet_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {}),
'document_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'effective_principal_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'pha_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'proxied_by_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'record_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'req_domain': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'req_headers': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'req_ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True'}),
'req_method': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'req_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'request_successful': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'resp_code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'resp_headers': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'view_func': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'indivo.authsystem': {
'Meta': {'object_name': 'AuthSystem'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'authsystem_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'internal_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'indivo.carenet': {
'Meta': {'unique_together': "(('name', 'record'),)", 'object_name': 'Carenet'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'carenet_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Record']"})
},
'indivo.carenetaccount': {
'Meta': {'unique_together': "(('carenet', 'account'),)", 'object_name': 'CarenetAccount'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']"}),
'can_write': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'carenetaccount_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'})
},
'indivo.carenetautoshare': {
'Meta': {'unique_together': "(('carenet', 'record', 'type'),)", 'object_name': 'CarenetAutoshare'},
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'carenetautoshare_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Record']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.DocumentSchema']", 'null': 'True'})
},
'indivo.carenetdocument': {
'Meta': {'unique_together': "(('carenet', 'document'),)", 'object_name': 'CarenetDocument'},
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'carenetdocument_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Document']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'share_p': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'indivo.carenetpha': {
'Meta': {'unique_together': "(('carenet', 'pha'),)", 'object_name': 'CarenetPHA'},
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'carenetpha_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'pha': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.PHA']"})
},
'indivo.document': {
'Meta': {'unique_together': "(('record', 'external_id'),)", 'object_name': 'Document'},
'content': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'content_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'document_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'digest': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'fqn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'nevershare': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'original': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'document_thread'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'pha': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pha_document'", 'null': 'True', 'to': "orm['indivo.PHA']"}),
'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documents'", 'null': 'True', 'to': "orm['indivo.Record']"}),
'replaced_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'document_replaced'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'replaces': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Document']", 'null': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['indivo.StatusName']"}),
'suppressed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'suppressed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Principal']", 'null': 'True'})
},
'indivo.documentprocessing': {
'Meta': {'object_name': 'DocumentProcessing'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documentprocessing_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'processed_doc'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'})
},
'indivo.documentrels': {
'Meta': {'object_name': 'DocumentRels'},
'document_0': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rels_as_doc_0'", 'to': "orm['indivo.Document']"}),
'document_1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rels_as_doc_1'", 'to': "orm['indivo.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relationship': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.DocumentSchema']"})
},
'indivo.documentschema': {
'Meta': {'object_name': 'DocumentSchema'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documentschema_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'internal_p': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'stylesheet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stylesheet'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'indivo.documentstatushistory': {
'Meta': {'object_name': 'DocumentStatusHistory'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documentstatushistory_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'document': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'effective_principal': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'proxied_by_principal': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {}),
'record': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.StatusName']"})
},
'indivo.equipment': {
'Meta': {'object_name': 'Equipment', '_ormbases': ['indivo.Fact']},
'date_started': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_stopped': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'vendor': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'})
},
'indivo.fact': {
'Meta': {'object_name': 'Fact'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'allergy'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'allergy'", 'null': 'True', 'to': "orm['indivo.Record']"})
},
'indivo.immunization': {
'Meta': {'object_name': 'Immunization', '_ormbases': ['indivo.Fact']},
'administered_by': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'adverse_event': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'anatomic_surface': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'anatomic_surface_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'anatomic_surface_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'anatomic_surface_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'date_administered': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'sequence': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'vaccine_expiration': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'vaccine_lot': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'vaccine_manufacturer': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'vaccine_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'vaccine_type_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'vaccine_type_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'vaccine_type_value': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'})
},
'indivo.lab': {
'Meta': {'object_name': 'Lab', '_ormbases': ['indivo.Fact']},
'date_measured': ('django.db.models.fields.DateTimeField', [], {}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'first_lab_test_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'first_lab_test_value': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'first_panel_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'lab_address': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'lab_comments': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'lab_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'lab_type': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'non_critical_range_maximum': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'non_critical_range_minimum': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'normal_range_maximum': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'normal_range_minimum': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'})
},
'indivo.machineapp': {
'Meta': {'object_name': 'MachineApp'},
'app_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'consumer_key': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'principal_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Principal']", 'unique': 'True', 'primary_key': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
'indivo.measurement': {
'Meta': {'object_name': 'Measurement', '_ormbases': ['indivo.Fact']},
'datetime': ('django.db.models.fields.DateTimeField', [], {}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'value': ('django.db.models.fields.FloatField', [], {})
},
'indivo.medication': {
'Meta': {'object_name': 'Medication', '_ormbases': ['indivo.Fact']},
'brand_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'brand_name_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'brand_name_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'brand_name_value': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'date_started': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_stopped': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'dispense_as_written': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'dose_textvalue': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'dose_unit': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'dose_unit_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'dose_unit_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'dose_unit_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'dose_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'frequency_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'frequency_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'frequency_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'name_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'name_value': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'prescribed_by_institution': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'prescribed_by_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'prescribed_on': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'prescribed_stop_on': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'prescription_duration': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'prescription_instructions': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'prescription_refill_info': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'route': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'route_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'route_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'route_value': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'strength_textvalue': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'strength_unit': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'strength_unit_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'strength_unit_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'strength_unit_value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'strength_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'})
},
'indivo.message': {
'Meta': {'unique_together': "(('account', 'external_identifier', 'sender'),)", 'object_name': 'Message'},
'about_record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Record']", 'null': 'True'}),
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']"}),
'archived_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'body': ('django.db.models.fields.TextField', [], {}),
'body_type': ('django.db.models.fields.CharField', [], {'default': "'plaintext'", 'max_length': '100'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'message_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'external_identifier': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'num_attachments': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'read_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'received_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'message_as_recipient'", 'to': "orm['indivo.Principal']"}),
'response_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'message_responses'", 'null': 'True', 'to': "orm['indivo.Message']"}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'message_as_sender'", 'to': "orm['indivo.Principal']"}),
'severity': ('django.db.models.fields.CharField', [], {'default': "'low'", 'max_length': '100'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'indivo.messageattachment': {
'Meta': {'unique_together': "(('message', 'attachment_num'),)", 'object_name': 'MessageAttachment'},
'attachment_num': ('django.db.models.fields.IntegerField', [], {}),
'content': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messageattachment_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'message': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Message']"}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'saved_to_document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Document']", 'null': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
'indivo.nonce': {
'Meta': {'unique_together': "(('nonce', 'oauth_type'),)", 'object_name': 'Nonce'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nonce': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'oauth_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'indivo.notification': {
'Meta': {'object_name': 'Notification'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']"}),
'app_url': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Document']", 'null': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Record']", 'null': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notifications_sent_by'", 'to': "orm['indivo.Principal']"})
},
'indivo.nouser': {
'Meta': {'object_name': 'NoUser', '_ormbases': ['indivo.Principal']},
'principal_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Principal']", 'unique': 'True', 'primary_key': 'True'})
},
'indivo.pha': {
'Meta': {'object_name': 'PHA'},
'autonomous_reason': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'callback_url': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'consumer_key': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True'}),
'frameable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_ui': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_autonomous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'principal_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Principal']", 'unique': 'True'}),
'privacy_tou': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.DocumentSchema']", 'null': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'start_url_template': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'indivo.phashare': {
'Meta': {'unique_together': "(('record', 'with_pha'),)", 'object_name': 'PHAShare'},
'authorized_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'authorized_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shares_authorized_by'", 'null': 'True', 'to': "orm['indivo.Account']"}),
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']", 'null': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'phashare_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pha_shares'", 'to': "orm['indivo.Record']"}),
'with_pha': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pha_shares_to'", 'to': "orm['indivo.PHA']"})
},
'indivo.principal': {
'Meta': {'object_name': 'Principal'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'principal_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'indivo.problem': {
'Meta': {'object_name': 'Problem', '_ormbases': ['indivo.Fact']},
'comments': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'date_onset': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_resolution': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'diagnosed_by': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'name_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '24', 'null': 'True'}),
'name_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'name_value': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'indivo.procedure': {
'Meta': {'object_name': 'Procedure', '_ormbases': ['indivo.Fact']},
'comments': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'date_performed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'name_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'name_value': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'provider_institution': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'provider_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
},
'indivo.record': {
'Meta': {'object_name': 'Record'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'the_record_for_contact'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'record_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'demographics': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'the_record_for_demographics'", 'null': 'True', 'to': "orm['indivo.Document']"}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'records_owned_by'", 'null': 'True', 'to': "orm['indivo.Principal']"})
},
'indivo.recordnotificationroute': {
'Meta': {'unique_together': "(('account', 'record'),)", 'object_name': 'RecordNotificationRoute'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recordnotificationroute_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_routes'", 'to': "orm['indivo.Record']"})
},
'indivo.reqtoken': {
'Meta': {'object_name': 'ReqToken', '_ormbases': ['indivo.Principal']},
'authorized_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'authorized_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']", 'null': 'True'}),
'carenet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Carenet']", 'null': 'True'}),
'oauth_callback': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
'pha': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.PHA']"}),
'principal_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Principal']", 'unique': 'True', 'primary_key': 'True'}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Record']", 'null': 'True'}),
'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.PHAShare']", 'null': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'verifier': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
'indivo.sessionrequesttoken': {
'Meta': {'object_name': 'SessionRequestToken'},
'approved_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessionrequesttoken_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']", 'null': 'True'})
},
'indivo.sessiontoken': {
'Meta': {'object_name': 'SessionToken'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessiontoken_created_by'", 'null': 'True', 'to': "orm['indivo.Principal']"}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['indivo.Account']", 'null': 'True'})
},
'indivo.simpleclinicalnote': {
'Meta': {'object_name': 'SimpleClinicalNote', '_ormbases': ['indivo.Fact']},
'chief_complaint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'date_of_visit': ('django.db.models.fields.DateTimeField', [], {}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'finalized_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'provider_institution': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'provider_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'signed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'specialty': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'specialty_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'specialty_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'specialty_value': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'visit_location': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'visit_type': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'visit_type_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'visit_type_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'visit_type_value': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'})
},
'indivo.statusname': {
'Meta': {'object_name': 'StatusName'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '24'})
},
'indivo.vitals': {
'Meta': {'object_name': 'Vitals', '_ormbases': ['indivo.Fact']},
'comments': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'date_measured': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'fact_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['indivo.Fact']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'name_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'name_value': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'unit_abbrev': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'unit_type': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'unit_value': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'value': ('django.db.models.fields.FloatField', [], {})
}
}
complete_apps = ['indivo']
| gpl-3.0 | 2,188,460,089,523,783,000 | 92.636691 | 181 | 0.55359 | false |
petesburgh/or-tools | examples/python/labeled_dice.py | 34 | 4106 | # Copyright 2010 Hakan Kjellerstrand [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Labeled dice problem in Google CP Solver.
From Jim Orlin 'Colored letters, labeled dice: a logic puzzle'
http://jimorlin.wordpress.com/2009/02/17/colored-letters-labeled-dice-a-logic-puzzle/
'''
My daughter Jenn bough a puzzle book, and showed me a cute puzzle. There
are 13 words as follows: BUOY, CAVE, CELT, FLUB, FORK, HEMP, JUDY,
JUNK, LIMN, QUIP, SWAG, VISA, WISH.
There are 24 different letters that appear in the 13 words. The question
is: can one assign the 24 letters to 4 different cubes so that the
four letters of each word appears on different cubes. (There is one
letter from each word on each cube.) It might be fun for you to try
it. I'll give a small hint at the end of this post. The puzzle was
created by Humphrey Dudley.
'''
Jim Orlin's followup 'Update on Logic Puzzle':
http://jimorlin.wordpress.com/2009/02/21/update-on-logic-puzzle/
Compare with the following models:
* ECLiPSe: http://hakank.org/eclipse/labeled_dice.ecl
* Comet : http://www.hakank.org/comet/labeled_dice.co
* Gecode : http://hakank.org/gecode/labeled_dice.cpp
* SICStus: http://hakank.org/sicstus/labeled_dice.pl
* Zinc : http://hakank.org/minizinc/labeled_dice.zinc
This model was created by Hakan Kjellerstrand ([email protected])
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from ortools.constraint_solver import pywrapcp
def main():
# Create the solver.
solver = pywrapcp.Solver("Labeled dice")
#
# data
#
n = 4
m = 24
A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, Y = (
range(m))
letters = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M",
"N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "Y"]
num_words = 13
words = [
[B, U, O, Y],
[C, A, V, E],
[C, E, L, T],
[F, L, U, B],
[F, O, R, K],
[H, E, M, P],
[J, U, D, Y],
[J, U, N, K],
[L, I, M, N],
[Q, U, I, P],
[S, W, A, G],
[V, I, S, A],
[W, I, S, H]
]
#
# declare variables
#
dice = [solver.IntVar(0, n - 1, "dice[%i]" % i) for i in range(m)]
#
# constraints
#
# the letters in a word must be on a different die
for i in range(num_words):
solver.Add(solver.AllDifferent([dice[words[i][j]] for j in range(n)]))
# there must be exactly 6 letters of each die
for i in range(n):
b = [solver.IsEqualCstVar(dice[j], i) for j in range(m)]
solver.Add(solver.Sum(b) == 6)
#
# solution and search
#
solution = solver.Assignment()
solution.Add(dice)
db = solver.Phase(dice,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
#
# result
#
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
num_solutions += 1
# print "dice:", [(letters[i],dice[i].Value()) for i in range(m)]
for d in range(n):
print "die %i:" % d,
for i in range(m):
if dice[i].Value() == d:
print letters[i],
print
print "The words with the cube label:"
for i in range(num_words):
for j in range(n):
print "%s (%i)" % (letters[words[i][j]], dice[words[i][j]].Value()),
print
print
solver.EndSearch()
print
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
if __name__ == "__main__":
main()
| apache-2.0 | 5,448,742,784,316,329,000 | 27.123288 | 87 | 0.613492 | false |
KarimAllah/celery | docs/conf.py | 17 | 3240 | # -*- coding: utf-8 -*-
import sys
import os
# eventlet/gevent should not monkey patch anything.
os.environ["GEVENT_NOPATCH"] = "yes"
os.environ["EVENTLET_NOPATCH"] = "yes"
os.environ["CELERY_LOADER"] = "default"
this = os.path.dirname(os.path.abspath(__file__))
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.append(os.path.join(os.pardir, "tests"))
sys.path.append(os.path.join(this, "_ext"))
import celery
# use app loader
from celery import Celery
app = Celery(set_as_current=True)
app.conf.update(BROKER_TRANSPORT="memory",
CELERY_RESULT_BACKEND="cache",
CELERY_CACHE_BACKEND="memory",
CELERYD_HIJACK_ROOT_LOGGER=False,
CELERYD_LOG_COLOR=False)
# General configuration
# ---------------------
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.intersphinx',
'sphinxcontrib.issuetracker',
'celerydocs']
html_show_sphinx = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Celery'
copyright = u'2009-2011, Ask Solem & Contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ".".join(map(str, celery.VERSION[0:2]))
# The full version, including alpha/beta/rc tags.
release = celery.__version__
exclude_trees = ['.build']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
intersphinx_mapping = {
"http://docs.python.org/dev": None,
"http://kombu.readthedocs.org/en/latest/": None,
"http://django-celery.readthedocs.org/en/latest": None,
}
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
html_use_smartypants = True
# If false, no module index is generated.
html_use_modindex = True
# If false, no index is generated.
html_use_index = True
latex_documents = [
('index', 'Celery.tex', ur'Celery Documentation',
ur'Ask Solem & Contributors', 'manual'),
]
html_theme = "celery"
html_theme_path = ["_theme"]
html_sidebars = {
'index': ['sidebarintro.html', 'sourcelink.html', 'searchbox.html'],
'**': ['sidebarlogo.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
}
### Issuetracker
if False: #not os.environ.get("SKIP_ISSUES"):
# Issue tracker is not working, just hangs
issuetracker = "github"
issuetracker_project = "ask/celery"
issuetracker_issue_pattern = r'[Ii]ssue #(\d+)'
| bsd-3-clause | 6,859,345,410,117,625,000 | 28.454545 | 78 | 0.675926 | false |
xtuml/pyxtuml | examples/list_bp_enums.py | 2 | 1385 | #!/usr/bin/env python
# encoding: utf-8
# Copyright (C) 2017 John Törnblom
#
# This file is part of pyxtuml.
#
# pyxtuml is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# pyxtuml is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with pyxtuml. If not, see <http://www.gnu.org/licenses/>.
import sys
from xtuml import navigate_one as one
from bridgepoint import ooaofooa
if len(sys.argv) < 2:
print('')
print(' usage: %s <path to bridgepoint model folder>' % sys.argv[0])
print('')
sys.exit(1)
m = ooaofooa.load_metamodel(sys.argv[1])
get_name = lambda inst: one(inst).S_DT[17]().Name
for s_edt in sorted(m.select_many('S_EDT'), key=get_name):
print(get_name(s_edt))
is_first = lambda inst: not one(inst).S_ENUM[56, 'succeeds']()
s_enum = one(s_edt).S_ENUM[27](is_first)
while s_enum:
print(' %s' % s_enum.Name)
s_enum = one(s_enum).S_ENUM[56, 'precedes']()
| lgpl-3.0 | -8,952,911,738,587,387,000 | 29.755556 | 73 | 0.690029 | false |
rancher/python-agent | cattle/agent/handler.py | 3 | 3291 | import re
import logging
from cattle import utils
from cattle.lock import lock
from cattle.utils import JsonObject
log = logging.getLogger("agent")
class BaseHandler(object):
def __init__(self):
pass
def events(self):
ret = []
for i in utils.events_from_methods(self):
ret.append(".".join([self._get_handler_category(None), i]))
return ret
def supports(self, req):
method = self._get_method_for(req)
if method is None:
return False
return self._check_supports(req)
def execute(self, req):
method = self._get_method_for(req)
if method is None:
return None
else:
return method(req=req, **req.data.__dict__)
def _get_method_for(self, req):
prefix = ''
category = self._get_handler_category(req)
if len(category) > 0:
prefix = category + '.'
if len(req.name) <= len(prefix):
return None
name = req.name[len(prefix):].replace('.', '_')
idx = name.find(';')
if idx != -1:
name = name[0:idx]
try:
return getattr(self, name)
except:
return None
def _reply(self, req, response_data):
if req is None:
return None
resp = utils.reply(req)
resp.data = JsonObject(response_data)
return resp
def _do(self, req=None, check=None, result=None, lock_obj=None,
action=None, post_check=True):
if check():
return self._reply(req, result())
with lock(lock_obj):
if check():
return self._reply(req, result())
action()
data = result()
if post_check and not check():
raise Exception("Operation failed")
return self._reply(req, data)
def _get_response_data(self, req, obj):
resource_type = req.get("resourceType")
type = obj.get("type")
if type is not None:
inner_name = re.sub("([A-Z])", r'_\1', type)
method_name = "_get_{0}_data".format(inner_name).lower()
method = None
try:
method = getattr(self, method_name)
except AttributeError:
pass
if method is not None:
return {resource_type: method(obj)}
return {}
def _check_supports(self, req):
raise Exception("Not implemented")
def _get_handler_category(self, req):
return ''
class KindBasedMixin(object):
CHECK_PATHS = [
["imageStoragePoolMap", "storagePool", "kind"],
["instanceHostMap", "host", "kind"],
["instanceForceStop", "kind"],
["instanceInspect", "kind"],
["instancePull", "kind"]
]
def __init__(self, kind=None):
super(KindBasedMixin, self).__init__()
self._kind = kind
def _check_supports(self, req):
for check in KindBasedMixin.CHECK_PATHS:
val = req.data
try:
for part in check:
val = val[part]
if val == self._kind:
return True
except KeyError:
pass
return False
| apache-2.0 | -3,908,730,040,892,918,300 | 24.315385 | 71 | 0.517168 | false |
steven-cutting/icsisumm | icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/inference/inference.py | 9 | 3449 | # Natural Language Toolkit: Interface to Theorem Provers
#
# Author: Dan Garrette <[email protected]>
# Ewan Klein <[email protected]>
#
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
from nltk.sem.logic import ApplicationExpression, Operator, LogicParser
import tableau
import prover9
import mace
"""
A wrapper module that calls theorem provers and model builders.
"""
def get_prover(goal=None, assumptions=[], prover_name='Prover9'):
"""
@param goal: Input expression to prove
@type goal: L{logic.Expression}
@param assumptions: Input expressions to use as assumptions in the proof
@type assumptions: L{list} of logic.Expression objects
"""
if prover_name.lower() == 'tableau':
prover_module = tableau.Tableau
elif prover_name.lower() == 'prover9':
prover_module = prover9.Prover9
return prover_module(goal, assumptions)
def get_model_builder(goal=None, assumptions=[], model_builder_name='Mace'):
"""
@param goal: Input expression to prove
@type goal: L{logic.Expression}
@param assumptions: Input expressions to use as assumptions in the proof
@type assumptions: L{list} of logic.Expression objects
"""
if model_builder_name.lower() == 'mace':
builder_module = mace.Mace
return builder_module(goal, assumptions)
def demo_drt_glue_remove_duplicates(show_example=-1):
from nltk_contrib.gluesemantics import drt_glue
examples = ['David sees Mary',
'David eats a sandwich',
'every man chases a dog',
'John chases himself',
'John likes a cat',
'John likes every cat',
'he likes a dog',
'a dog walks and he leaves']
example_num = 0
hit = False
for sentence in examples:
if example_num==show_example or show_example==-1:
print '[[[Example %s]]] %s' % (example_num, sentence)
readings = drt_glue.parse_to_meaning(sentence, True)
for j in range(len(readings)):
reading = readings[j].simplify().resolve_anaphora()
print reading
print ''
hit = True
example_num += 1
if not hit:
print 'example not found'
def demo():
from nltk_contrib.drt import DRT
DRT.testTp_equals()
print '\n'
lp = LogicParser()
a = lp.parse(r'some x.((man x) and (walks x))')
b = lp.parse(r'some x.((walks x) and (man x))')
bicond = ApplicationExpression(ApplicationExpression(Operator('iff'), a), b)
print "Trying to prove:\n '%s <-> %s'" % (a.infixify(), b.infixify())
print 'tableau: %s' % get_prover(bicond, prover_name='tableau').prove()
print 'Prover9: %s' % get_prover(bicond, prover_name='Prover9').prove()
print '\n'
demo_drt_glue_remove_duplicates()
lp = LogicParser()
a = lp.parse(r'all x.((man x) implies (mortal x))')
b = lp.parse(r'(man socrates)')
c1 = lp.parse(r'(mortal socrates)')
c2 = lp.parse(r'(not (mortal socrates))')
print get_prover(c1, [a,b], 'prover9').prove()
print get_prover(c2, [a,b], 'prover9').prove()
print get_model_builder(c1, [a,b], 'mace').build_model()
print get_model_builder(c2, [a,b], 'mace').build_model()
if __name__ == '__main__':
demo()
| gpl-3.0 | -5,746,815,121,541,323,000 | 32.838384 | 80 | 0.596405 | false |
Intel-tensorflow/tensorflow | tensorflow/python/eager/monitoring_test.py | 14 | 4979 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for monitoring."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from tensorflow.python.eager import monitoring
from tensorflow.python.eager import test
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
class MonitoringTest(test_util.TensorFlowTestCase):
def test_counter(self):
counter = monitoring.Counter('test/counter', 'test counter')
counter.get_cell().increase_by(1)
self.assertEqual(counter.get_cell().value(), 1)
counter.get_cell().increase_by(5)
self.assertEqual(counter.get_cell().value(), 6)
def test_multiple_counters(self):
counter1 = monitoring.Counter('test/counter1', 'test counter', 'label1')
counter1.get_cell('foo').increase_by(1)
self.assertEqual(counter1.get_cell('foo').value(), 1)
counter2 = monitoring.Counter('test/counter2', 'test counter', 'label1',
'label2')
counter2.get_cell('foo', 'bar').increase_by(5)
self.assertEqual(counter2.get_cell('foo', 'bar').value(), 5)
def test_same_counter(self):
counter1 = monitoring.Counter('test/same_counter', 'test counter') # pylint: disable=unused-variable
with self.assertRaises(errors.AlreadyExistsError):
counter2 = monitoring.Counter('test/same_counter', 'test counter') # pylint: disable=unused-variable
def test_int_gauge(self):
gauge = monitoring.IntGauge('test/gauge', 'test gauge')
gauge.get_cell().set(1)
self.assertEqual(gauge.get_cell().value(), 1)
gauge.get_cell().set(5)
self.assertEqual(gauge.get_cell().value(), 5)
gauge1 = monitoring.IntGauge('test/gauge1', 'test gauge1', 'label1')
gauge1.get_cell('foo').set(2)
self.assertEqual(gauge1.get_cell('foo').value(), 2)
def test_string_gauge(self):
gauge = monitoring.StringGauge('test/gauge', 'test gauge')
gauge.get_cell().set('left')
self.assertEqual(gauge.get_cell().value(), 'left')
gauge.get_cell().set('right')
self.assertEqual(gauge.get_cell().value(), 'right')
gauge1 = monitoring.StringGauge('test/gauge1', 'test gauge1', 'label1')
gauge1.get_cell('foo').set('start')
self.assertEqual(gauge1.get_cell('foo').value(), 'start')
def test_bool_gauge(self):
gauge = monitoring.BoolGauge('test/gauge', 'test gauge')
gauge.get_cell().set(True)
self.assertTrue(gauge.get_cell().value())
gauge.get_cell().set(False)
self.assertFalse(gauge.get_cell().value())
gauge1 = monitoring.BoolGauge('test/gauge1', 'test gauge1', 'label1')
gauge1.get_cell('foo').set(True)
self.assertTrue(gauge1.get_cell('foo').value())
def test_sampler(self):
buckets = monitoring.ExponentialBuckets(1.0, 2.0, 2)
sampler = monitoring.Sampler('test/sampler', buckets, 'test sampler')
sampler.get_cell().add(1.0)
sampler.get_cell().add(5.0)
histogram_proto = sampler.get_cell().value()
self.assertEqual(histogram_proto.min, 1.0)
self.assertEqual(histogram_proto.num, 2.0)
self.assertEqual(histogram_proto.sum, 6.0)
sampler1 = monitoring.Sampler('test/sampler1', buckets, 'test sampler',
'label1')
sampler1.get_cell('foo').add(2.0)
sampler1.get_cell('foo').add(4.0)
sampler1.get_cell('bar').add(8.0)
histogram_proto1 = sampler1.get_cell('foo').value()
self.assertEqual(histogram_proto1.max, 4.0)
self.assertEqual(histogram_proto1.num, 2.0)
self.assertEqual(histogram_proto1.sum, 6.0)
def test_context_manager(self):
counter = monitoring.Counter('test/ctxmgr', 'test context manager', 'slot')
with monitoring.MonitoredTimer(counter.get_cell('long')):
time.sleep(0.01)
with monitoring.MonitoredTimer(counter.get_cell('short')):
time.sleep(0.01)
self.assertGreater(
counter.get_cell('long').value(),
counter.get_cell('short').value())
def test_function_decorator(self):
counter = monitoring.Counter('test/funcdecorator', 'test func decorator')
@monitoring.monitored_timer(counter.get_cell())
def timed_function(seconds):
time.sleep(seconds)
timed_function(0.001)
self.assertGreater(counter.get_cell().value(), 1000)
if __name__ == '__main__':
test.main()
| apache-2.0 | -5,664,138,620,009,820,000 | 38.204724 | 107 | 0.675236 | false |
Bluscream/Discord-Selfbot | cogs/debugger.py | 1 | 17971 | import pkg_resources
import contextlib
import sys
import inspect
import os
import shutil
import glob
import math
import textwrap
from PythonGists import PythonGists
from discord.ext import commands
from io import StringIO
from traceback import format_exc
from cogs.utils.checks import *
from contextlib import redirect_stdout
# Common imports that can be used by the debugger.
import requests
import json
import gc
import datetime
import time
import traceback
import prettytable
import re
import io
import asyncio
import discord
import random
import subprocess
from bs4 import BeautifulSoup
import urllib
import psutil
'''Module for the python interpreter as well as saving, loading, viewing, etc. the cmds/scripts ran with the interpreter.'''
class Debugger:
def __init__(self, bot):
self.bot = bot
self.stream = io.StringIO()
self.channel = None
self._last_result = None
def cleanup_code(self, content):
"""Automatically removes code blocks from the code."""
# remove ```py\n```
if content.startswith('```') and content.endswith('```'):
return '\n'.join(content.split('\n')[1:-1])
# remove `foo`
return content.strip('` \n')
# Executes/evaluates code.Pretty much the same as Rapptz implementation for RoboDanny with slight variations.
async def interpreter(self, env, code, ctx):
body = self.cleanup_code(code)
stdout = io.StringIO()
os.chdir(os.getcwd())
with open('%s/cogs/utils/temp.txt' % os.getcwd(), 'w') as temp:
temp.write(body)
to_compile = 'async def func():\n{}'.format(textwrap.indent(body, " "))
try:
exec(to_compile, env)
except Exception as e:
return await ctx.send('```\n{}: {}\n```'.format(e.__class__.__name__, e))
func = env['func']
try:
with redirect_stdout(stdout):
ret = await func()
except Exception as e:
value = stdout.getvalue()
await ctx.send('```\n{}{}\n```'.format(value, traceback.format_exc()))
else:
value = stdout.getvalue()
result = None
if ret is None:
if value:
result = '```\n{}\n```'.format(value)
else:
try:
result = '```\n{}\n```'.format(repr(eval(body, env)))
except:
pass
else:
self._last_result = ret
result = '```\n{}{}\n```'.format(value, ret)
if result:
if len(str(result)) > 1950:
url = PythonGists.Gist(description='Py output', content=str(result).strip("`"), name='output.txt')
result = self.bot.bot_prefix + 'Large output. Posted to Gist: %s' % url
await ctx.send(result)
else:
await ctx.send(result)
else:
await ctx.send("```\n```")
@commands.command(pass_context=True)
async def debug(self, ctx, *, option: str = None):
"""Shows useful informations to people that try to help you."""
try:
if embed_perms(ctx.message):
em = discord.Embed(color=0xad2929, title='\ud83e\udd16 Appu\'s Discord Selfbot Debug Infos')
system = ''
if sys.platform == 'linux':
system = subprocess.run(['uname', '-a'], stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
if 'ubuntu' in system.lower():
system += '\n'+subprocess.run(['lsb_release', '-a'], stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
elif sys.platform == 'win32':
try: platform
except: import platform
system = '%s %s (%s)'%(platform.system(), platform.version(), sys.platform)
else:
system = sys.platform
em.add_field(name='Operating System', value='%s' % system, inline=False)
try:
foo = subprocess.run("pip show discord.py", stdout=subprocess.PIPE)
_ver = re.search(r'Version: (\d+.\d+.\w+)', str(foo.stdout)).group(1)
except: _ver = discord.__version__
em.add_field(name='Discord.py Version', value='%s'%_ver)
em.add_field(name='PIP Version', value='%s'%pkg_resources.get_distribution('pip').version)
if os.path.exists('.git'):
try: em.add_field(name='Bot version', value='%s' % os.popen('git rev-parse --verify HEAD').read()[:7])
except: pass
em.add_field(name='Python Version', value='%s (%s)'%(sys.version,sys.api_version), inline=False)
if option and 'deps' in option.lower():
dependencies = ''
dep_file = sorted(open('%s/requirements.txt' % os.getcwd()).read().split("\n"), key=str.lower)
for dep in dep_file:
if not '==' in dep: continue
dep = dep.split('==')
cur = pkg_resources.get_distribution(dep[0]).version
if cur == dep[1]: dependencies += '\✅ %s: %s\n'%(dep[0], cur)
else: dependencies += '\❌ %s: %s / %s\n'%(dep[0], cur, dep[1])
em.add_field(name='Dependencies', value='%s' % dependencies)
cog_list = ["cogs." + os.path.splitext(f)[0] for f in [os.path.basename(f) for f in glob.glob("cogs/*.py")]]
loaded_cogs = [x.__module__.split(".")[1] for x in self.bot.cogs.values()]
unloaded_cogs = [c.split(".")[1] for c in cog_list if c.split(".")[1] not in loaded_cogs]
if option and 'cogs' in option.lower():
if len(loaded_cogs) > 0: em.add_field(name='Loaded Cogs ({})'.format(len(loaded_cogs)), value='\n'.join(sorted(loaded_cogs)), inline=True)
if len(unloaded_cogs) > 0: em.add_field(name='Unloaded Cogs ({})'.format(len(unloaded_cogs)), value='\n'.join(sorted(unloaded_cogs)), inline=True)
else: em.add_field(name='Cogs', value='{} loaded.\n{} unloaded'.format(len(loaded_cogs), len(unloaded_cogs)), inline=True)
if option and 'path' in option.lower():
paths = "\n".join(sys.path).strip()
if len(paths) > 300:
url = PythonGists.Gist(description='sys.path', content=str(paths), name='syspath.txt')
em.add_field(name='Import Paths', value=paths[:300]+' [(Show more)](%s)'%url)
else:
em.add_field(name='Import Paths', value=paths)
user = subprocess.run(['whoami'], stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
if sys.platform == 'linux':
user += '@'+subprocess.run(['hostname'], stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
em.set_footer(text='Generated at {:%Y-%m-%d %H:%M:%S} by {}'.format(datetime.datetime.now(), user))
try: await ctx.send(content=None, embed=em)
except discord.HTTPException as e:
await ctx.send(content=None, embed=em)
else:
await ctx.send('No permissions to embed debug info.')
except:
await ctx.send('``` %s ```'%format_exc())
@commands.group(pass_context=True, invoke_without_command=True)
async def py(self, ctx, *, msg):
"""Python interpreter. See the wiki for more info."""
if ctx.invoked_subcommand is None:
env = {
'bot': self.bot,
'ctx': ctx,
'channel': ctx.channel,
'author': ctx.author,
'guild': ctx.guild,
'server': ctx.guild,
'message': ctx.message,
'_': self._last_result
}
env.update(globals())
await self.interpreter(env, msg, ctx)
# Save last [p]py cmd/script.
@py.command(pass_context=True)
async def save(self, ctx, *, msg):
"""Save the code you last ran. Ex: [p]py save stuff"""
msg = msg.strip()[:-4] if msg.strip().endswith('.txt') else msg.strip()
os.chdir(os.getcwd())
if not os.path.exists('%s/cogs/utils/temp.txt' % os.getcwd()):
return await ctx.send(self.bot.bot_prefix + 'Nothing to save. Run a ``>py`` cmd/script first.')
if not os.path.isdir('%s/cogs/utils/save/' % os.getcwd()):
os.makedirs('%s/cogs/utils/save/' % os.getcwd())
if os.path.exists('%s/cogs/utils/save/%s.txt' % (os.getcwd(), msg)):
await ctx.send(self.bot.bot_prefix + '``%s.txt`` already exists. Overwrite? ``y/n``.' % msg)
reply = await self.bot.wait_for('message', check=lambda m: m.author == ctx.message.author and (m.content.lower() == 'y' or m.content.lower() == 'n'))
if reply.content.lower().strip() != 'y':
return await ctx.send(self.bot.bot_prefix + 'Cancelled.')
if os.path.exists('%s/cogs/utils/save/%s.txt' % (os.getcwd(), msg)):
os.remove('%s/cogs/utils/save/%s.txt' % (os.getcwd(), msg))
try:
shutil.move('%s/cogs/utils/temp.txt' % os.getcwd(), '%s/cogs/utils/save/%s.txt' % (os.getcwd(), msg))
await ctx.send(self.bot.bot_prefix + 'Saved last run cmd/script as ``%s.txt``' % msg)
except:
await ctx.send(self.bot.bot_prefix + 'Error saving file as ``%s.txt``' % msg)
# Load a cmd/script saved with the [p]save cmd
@py.command(aliases=['start'], pass_context=True)
async def run(self, ctx, *, msg):
"""Run code that you saved with the save commmand. Ex: [p]py run stuff parameter1 parameter2"""
# Like in unix, the first parameter is the script name
parameters = msg.split()
save_file = parameters[0] # Force scope
if save_file.endswith('.txt'):
save_file = save_file[:-(len('.txt'))] # Temptation to put '.txt' in a constant increases
else:
parameters[0] += '.txt' # The script name is always full
if not os.path.exists('%s/cogs/utils/save/%s.txt' % (os.getcwd(), save_file)):
return await ctx.send(self.bot.bot_prefix + 'Could not find file ``%s.txt``' % save_file)
script = open('%s/cogs/utils/save/%s.txt' % (os.getcwd(), save_file)).read()
env = {
'bot': self.bot,
'ctx': ctx,
'channel': ctx.channel,
'author': ctx.author,
'guild': ctx.guild,
'server': ctx.guild,
'message': ctx.message,
'_': self._last_result,
'argv': parameters
}
env.update(globals())
await self.interpreter(env, script, ctx)
# List saved cmd/scripts
@py.command(aliases=['ls'], pass_context=True)
async def list(self, ctx, txt: str = None):
"""List all saved scripts. Ex: [p]py list or [p]py ls"""
try:
if txt:
numb = txt.strip()
if numb.isdigit():
numb = int(numb)
else:
await ctx.send(self.bot.bot_prefix + 'Invalid syntax. Ex: ``>py list 1``')
else:
numb = 1
filelist = glob.glob('cogs/utils/save/*.txt')
if len(filelist) == 0:
return await ctx.send(self.bot.bot_prefix + 'No saved cmd/scripts.')
filelist.sort()
msg = ''
pages = int(math.ceil(len(filelist) / 10))
if numb < 1:
numb = 1
elif numb > pages:
numb = pages
for i in range(10):
try:
msg += filelist[i + (10 * (numb-1))][16:] + '\n'
except:
break
await ctx.send(self.bot.bot_prefix + 'List of saved cmd/scripts. Page ``%s of %s`` ```%s```' % (numb, pages, msg))
except Exception as e:
await ctx.send(self.bot.bot_prefix + 'Error, something went wrong: ``%s``' % e)
# View a saved cmd/script
@py.group(aliases=['vi', 'vim'], pass_context=True)
async def view(self, ctx, *, msg: str):
"""View a saved script's contents. Ex: [p]py view stuff"""
msg = msg.strip()[:-4] if msg.strip().endswith('.txt') else msg.strip()
try:
if os.path.isfile('cogs/utils/save/%s.txt' % msg):
f = open('cogs/utils/save/%s.txt' % msg, 'r').read()
await ctx.send(self.bot.bot_prefix + 'Viewing ``%s.txt``: ```py\n%s```' % (msg, f.strip('` ')))
else:
await ctx.send(self.bot.bot_prefix + '``%s.txt`` does not exist.' % msg)
except Exception as e:
await ctx.send(self.bot.bot_prefix + 'Error, something went wrong: ``%s``' % e)
# Delete a saved cmd/script
@py.group(aliases=['rm'], pass_context=True)
async def delete(self, ctx, *, msg: str):
"""Delete a saved script. Ex: [p]py delete stuff"""
msg = msg.strip()[:-4] if msg.strip().endswith('.txt') else msg.strip()
try:
if os.path.exists('cogs/utils/save/%s.txt' % msg):
os.remove('cogs/utils/save/%s.txt' % msg)
await ctx.send(self.bot.bot_prefix + 'Deleted ``%s.txt`` from saves.' % msg)
else:
await ctx.send(self.bot.bot_prefix + '``%s.txt`` does not exist.' % msg)
except Exception as e:
await ctx.send(self.bot.bot_prefix + 'Error, something went wrong: ``%s``' % e)
@commands.command(pass_context=True)
async def load(self, ctx, *, msg):
"""Load a module."""
await ctx.message.delete()
try:
if os.path.exists("custom_cogs/{}.py".format(msg)):
self.bot.load_extension("custom_cogs.{}".format(msg))
elif os.path.exists("cogs/{}.py".format(msg)):
self.bot.load_extension("cogs.{}".format(msg))
else:
raise ImportError("No module named '{}'".format(msg))
except Exception as e:
await ctx.send(self.bot.bot_prefix + 'Failed to load module: `{}.py`'.format(msg))
await ctx.send(self.bot.bot_prefix + '{}: {}'.format(type(e).__name__, e))
else:
await ctx.send(self.bot.bot_prefix + 'Loaded module: `{}.py`'.format(msg))
@commands.command(pass_context=True)
async def unload(self, ctx, *, msg):
"""Unload a module"""
await ctx.message.delete()
try:
if os.path.exists("cogs/{}.py".format(msg)):
self.bot.unload_extension("cogs.{}".format(msg))
elif os.path.exists("custom_cogs/{}.py".format(msg)):
self.bot.unload_extension("custom_cogs.{}".format(msg))
else:
raise ImportError("No module named '{}'".format(msg))
except Exception as e:
await ctx.send(self.bot.bot_prefix + 'Failed to unload module: `{}.py`'.format(msg))
await ctx.send(self.bot.bot_prefix + '{}: {}'.format(type(e).__name__, e))
else:
await ctx.send(self.bot.bot_prefix + 'Unloaded module: `{}.py`'.format(msg))
@commands.command(pass_context=True)
async def loadall(self, ctx):
"""Loads all core modules"""
await ctx.message.delete()
errors = ""
for cog in os.listdir("cogs"):
if ".py" in cog:
cog = cog.replace('.py', '')
try:
self.bot.load_extension("cogs.{}".format(cog))
except Exception as e:
errors += 'Failed to load module: `{}.py` due to `{}: {}`\n'.format(cog, type(e).__name__, e)
if not errors:
await ctx.send(self.bot.bot_prefix + "All core modules loaded")
else:
await ctx.send(self.bot.bot_prefix + errors)
@commands.command(pass_context=True)
async def redirect(self, ctx):
"""Redirect STDOUT and STDERR to a channel for debugging purposes."""
sys.stdout = self.stream
sys.stderr = self.stream
self.channel = ctx.message.channel
await ctx.send(self.bot.bot_prefix + "Successfully redirected STDOUT and STDERR to the current channel!")
@commands.command(pass_context=True)
async def unredirect(self, ctx):
"""Redirect STDOUT and STDERR back to the console for debugging purposes."""
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
self.channel = None
await ctx.send(self.bot.bot_prefix + "Successfully redirected STDOUT and STDERR back to the console!")
async def redirection_clock(self):
await self.bot.wait_until_ready()
while self is self.bot.get_cog("Debugger"):
await asyncio.sleep(0.2)
stream_content = self.stream.getvalue()
if stream_content and self.channel:
await self.channel.send("```" + stream_content + "```")
self.stream = io.StringIO()
sys.stdout = self.stream
sys.stderr = self.stream
def setup(bot):
debug_cog = Debugger(bot)
loop = asyncio.get_event_loop()
loop.create_task(debug_cog.redirection_clock())
bot.add_cog(debug_cog)
| gpl-3.0 | 6,991,569,216,241,980,000 | 43.601523 | 166 | 0.523014 | false |
albertomurillo/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_inventory_source.py | 38 | 11320 | #!/usr/bin/python
# coding: utf-8 -*-
# Copyright: (c) 2018, Adrien Fleury <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: tower_inventory_source
author: "Adrien Fleury (@fleu42)"
version_added: "2.7"
short_description: create, update, or destroy Ansible Tower inventory source.
description:
- Create, update, or destroy Ansible Tower inventories source. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the inventory source.
required: True
description:
description:
- The description to use for the inventory source.
inventory:
description:
- The inventory the source is linked to.
required: True
source:
description:
- Types of inventory source.
choices:
- file
- scm
- ec2
- gce
- azure
- azure_rm
- vmware
- satellite6
- cloudforms
- openstack
- rhv
- tower
- custom
required: True
credential:
description:
- Credential to use to retrieve the inventory from.
source_vars:
description:
- >-
The source_vars allow to Override variables found in the source config
file. For example with Openstack, specifying *private: false* would
change the output of the openstack.py script. It has to be YAML or
JSON.
timeout:
description:
- Number in seconds after which the Tower API methods will time out.
source_project:
description:
- Use a *project* as a source for the *inventory*.
source_path:
description:
- Path to the file to use as a source in the selected *project*.
update_on_project_update:
description:
- >-
That parameter will sync the inventory when the project is synced. It
can only be used with a SCM source.
type: bool
source_regions:
description:
- >-
List of regions for your cloud provider. You can include multiple all
regions. Only Hosts associated with the selected regions will be
updated. Refer to Ansible Tower documentation for more detail.
instance_filters:
description:
- >-
Provide a comma-separated list of filter expressions. Hosts are
imported when all of the filters match. Refer to Ansible Tower
documentation for more detail.
group_by:
description:
- >-
Specify which groups to create automatically. Group names will be
created similar to the options selected. If blank, all groups above
are created. Refer to Ansible Tower documentation for more detail.
source_script:
description:
- >-
The source custom script to use to build the inventory. It needs to
exist.
overwrite:
description:
- >-
If set, any hosts and groups that were previously present on the
external source but are now removed will be removed from the Tower
inventory. Hosts and groups that were not managed by the inventory
source will be promoted to the next manually created group or if
there is no manually created group to promote them into, they will be
left in the "all" default group for the inventory. When not checked,
local child hosts and groups not found on the external source will
remain untouched by the inventory update process.
type: bool
overwrite_vars:
description:
- >-
If set, all variables for child groups and hosts will be removed
and replaced by those found on the external source. When not checked,
a merge will be performed, combining local variables with those found
on the external source.
type: bool
update_on_launch:
description:
- >-
Each time a job runs using this inventory, refresh the inventory from
the selected source before executing job tasks.
type: bool
update_cache_timeout:
description:
- >-
Time in seconds to consider an inventory sync to be current. During
job runs and callbacks the task system will evaluate the timestamp of
the latest sync. If it is older than Cache Timeout, it is not
considered current, and a new inventory sync will be performed.
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
validate_certs:
description:
- Tower option to avoid certificates check.
type: bool
aliases: [ tower_verify_ssl ]
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Add tower inventory source
tower_inventory_source:
name: Inventory source
description: My Inventory source
inventory: My inventory
credential: Devstack_credential
source: openstack
update_on_launch: true
overwrite: true
source_vars: '{ private: false }'
state: present
validate_certs: false
'''
RETURN = ''' # '''
from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode
try:
import tower_cli
import tower_cli.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
SOURCE_CHOICES = {
'file': 'Directory or Script',
'scm': 'Sourced from a Project',
'ec2': 'Amazon EC2',
'gce': 'Google Compute Engine',
'azure': 'Microsoft Azure',
'azure_rm': 'Microsoft Azure Resource Manager',
'vmware': 'VMware vCenter',
'satellite6': 'Red Hat Satellite 6',
'cloudforms': 'Red Hat CloudForms',
'openstack': 'OpenStack',
'rhv': 'Red Hat Virtualization',
'tower': 'Ansible Tower',
'custom': 'Custom Script',
}
def main():
argument_spec = dict(
name=dict(required=True),
description=dict(required=False),
inventory=dict(required=True),
source=dict(required=True,
choices=SOURCE_CHOICES.keys()),
credential=dict(required=False),
source_vars=dict(required=False),
timeout=dict(type='int', required=False),
source_project=dict(required=False),
source_path=dict(required=False),
update_on_project_update=dict(type='bool', required=False),
source_regions=dict(required=False),
instance_filters=dict(required=False),
group_by=dict(required=False),
source_script=dict(required=False),
overwrite=dict(type='bool', required=False),
overwrite_vars=dict(type='bool', required=False),
update_on_launch=dict(type='bool', required=False),
update_cache_timeout=dict(type='int', required=False),
state=dict(choices=['present', 'absent'], default='present'),
)
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
name = module.params.get('name')
inventory = module.params.get('inventory')
source = module.params.get('source')
state = module.params.get('state')
json_output = {'inventory_source': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
inventory_source = tower_cli.get_resource('inventory_source')
try:
params = {}
params['name'] = name
params['source'] = source
if module.params.get('description'):
params['description'] = module.params.get('description')
if module.params.get('credential'):
credential_res = tower_cli.get_resource('credential')
try:
credential = credential_res.get(
name=module.params.get('credential'))
params['credential'] = credential['id']
except (exc.NotFound) as excinfo:
module.fail_json(
msg='Failed to update credential source,'
'credential not found: {0}'.format(excinfo),
changed=False
)
if module.params.get('source_project'):
source_project_res = tower_cli.get_resource('project')
try:
source_project = source_project_res.get(
name=module.params.get('source_project'))
params['source_project'] = source_project['id']
except (exc.NotFound) as excinfo:
module.fail_json(
msg='Failed to update source project,'
'project not found: {0}'.format(excinfo),
changed=False
)
if module.params.get('source_script'):
source_script_res = tower_cli.get_resource('inventory_script')
try:
script = source_script_res.get(
name=module.params.get('source_script'))
params['source_script'] = script['id']
except (exc.NotFound) as excinfo:
module.fail_json(
msg='Failed to update source script,'
'script not found: {0}'.format(excinfo),
changed=False
)
try:
inventory_res = tower_cli.get_resource('inventory')
params['inventory'] = inventory_res.get(name=inventory)['id']
except (exc.NotFound) as excinfo:
module.fail_json(
msg='Failed to update inventory source, '
'inventory not found: {0}'.format(excinfo),
changed=False
)
for key in ('source_vars', 'timeout', 'source_path',
'update_on_project_update', 'source_regions',
'instance_filters', 'group_by', 'overwrite',
'overwrite_vars', 'update_on_launch',
'update_cache_timeout'):
if module.params.get(key) is not None:
params[key] = module.params.get(key)
if state == 'present':
params['create_on_missing'] = True
result = inventory_source.modify(**params)
json_output['id'] = result['id']
elif state == 'absent':
params['fail_on_missing'] = False
result = inventory_source.delete(**params)
except (exc.ConnectionError, exc.BadRequest, exc.AuthError) as excinfo:
module.fail_json(msg='Failed to update inventory source: \
{0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 | 9,069,161,357,011,588,000 | 34.936508 | 95 | 0.587633 | false |
ahsquared/arc | arc-assets/themes/ut-thehill/node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSUtil.py | 566 | 9386 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions shared amongst the Windows generators."""
import copy
import os
_TARGET_TYPE_EXT = {
'executable': '.exe',
'loadable_module': '.dll',
'shared_library': '.dll',
}
def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
win_data_dir = os.path.join(src_dir, 'data', 'win')
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
return large_pdb_shim_cc
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%s' % (parts[0], suffix)
return '#'.join(parts)
def _ShardName(name, number):
"""Add a shard number to the end of a target.
Arguments:
name: name of the target (foo#target)
number: shard number
Returns:
Target name with shard added (foo_1#target)
"""
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]['target_name'] = _ShardName(
new_target_dicts[name]['target_name'], i)
sources = new_target_dicts[name].get('sources', [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]['sources'] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t]['dependencies'] = new_dependencies
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)
large_pdb_shim_cc = _GetLargePdbShimCcPath()
for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get('target_name')
base_dict = _DeepCopySomeKeys(target_dict,
['configurations', 'default_configuration', 'toolset'])
# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict['target_name'] = copy_target_name
copy_dict['type'] = 'none'
copy_dict['sources'] = [ large_pdb_shim_cc ]
copy_dict['copies'] = [{
'destination': shim_cc_dir,
'files': [ large_pdb_shim_cc ]
}]
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict['target_name'] = shim_target_name
shim_dict['type'] = 'static_library'
shim_dict['sources'] = [ shim_cc_path ]
shim_dict['dependencies'] = [ full_copy_target_name ]
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config_name, config in shim_dict.get('configurations').iteritems():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
config.pop(key, None)
msvs = config.setdefault('msvs_settings', {})
# Update the compiler directives in the shim target.
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler['DebugInformationFormat'] = '3'
compiler['ProgramDataBaseFileName'] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.setdefault('VCLinkerTool', {})
linker['GenerateDebugInformation'] = 'true'
linker['ProgramDatabaseFile'] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
return (target_list, target_dicts) | gpl-2.0 | 2,459,139,348,113,441,300 | 34.157303 | 80 | 0.667697 | false |
wenqvip/pytuto | proj3/pagemaker.py | 1 | 1061 | __author__ = 'Spencer'
from xml.sax.handler import ContentHandler
from xml.sax import parse
class PageMaker(ContentHandler):
passthrough = False
def startElement(self, name, attrs):
if name == 'page':
self.passthrough = True
self.out = open(attrs['name'] + '.html', 'w')
self.out.write('<html><head>\n')
self.out.write('<title>%s</title>\n' % attrs['title'])
self.out.write('</head><body>\n')
elif self.passthrough:
self.out.write('<' + name)
for key, val in attrs.items():
self.out.write(' %s="%s"' % (key, val))
self.out.write('>')
def endElement(self, name):
if name == 'page':
self.passthrough = False
self.out.write('\n</body></html>\n')
self.out.close()
elif self.passthrough:
self.out.write('</%s>' % name)
def characters(self, content):
if self.passthrough:
self.out.write(content)
parse('website.xml', PageMaker()) | gpl-3.0 | -1,734,967,306,656,178,200 | 26.947368 | 66 | 0.533459 | false |
areitz/pants | src/python/pants/backend/jvm/tasks/jvm_compile/jvm_dependency_analyzer.py | 4 | 13065 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from collections import defaultdict
from twitter.common.collections import OrderedSet
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.backend.jvm.targets.scala_library import ScalaLibrary
from pants.backend.jvm.tasks.ivy_task_mixin import IvyTaskMixin
from pants.base.build_environment import get_buildroot
from pants.base.build_graph import sort_targets
from pants.base.exceptions import TaskError
class JvmDependencyAnalyzer(object):
def __init__(self,
context,
check_missing_deps,
check_missing_direct_deps,
check_unnecessary_deps,
target_whitelist):
self._context = context
self._check_missing_deps = check_missing_deps
self._check_missing_direct_deps = check_missing_direct_deps
self._check_unnecessary_deps = check_unnecessary_deps
# These targets we will not report as having any dependency issues even if they do.
self._target_whitelist = OrderedSet(target_whitelist)
@classmethod
def prepare(clsc, options, round_manager):
round_manager.require_data('ivy_jar_products')
round_manager.require_data('ivy_resolve_symlink_map')
def _compute_targets_by_file(self):
"""Returns a map from abs path of source, class or jar file to an OrderedSet of targets.
The value is usually a singleton, because a source or class file belongs to a single target.
However a single jar may be provided (transitively or intransitively) by multiple JarLibrary
targets. But if there is a JarLibrary target that depends on a jar directly, then that
"canonical" target will be the first one in the list of targets.
"""
targets_by_file = defaultdict(OrderedSet)
# Multiple JarLibrary targets can provide the same (org, name).
jarlibs_by_id = defaultdict(set)
# Compute src -> target.
with self._context.new_workunit(name='map_sources'):
buildroot = get_buildroot()
# Look at all targets in-play for this pants run. Does not include synthetic targets,
for target in self._context.targets():
if isinstance(target, JvmTarget):
for src in target.sources_relative_to_buildroot():
targets_by_file[os.path.join(buildroot, src)].add(target)
elif isinstance(target, JarLibrary):
for jardep in target.jar_dependencies:
jarlibs_by_id[(jardep.org, jardep.name)].add(target)
# TODO(Tejal Desai): pantsbuild/pants/65: Remove java_sources attribute for ScalaLibrary
if isinstance(target, ScalaLibrary):
for java_source in target.java_sources:
for src in java_source.sources_relative_to_buildroot():
targets_by_file[os.path.join(buildroot, src)].add(target)
# Compute class -> target.
with self._context.new_workunit(name='map_classes'):
classes_by_target = self._context.products.get_data('classes_by_target')
for tgt, target_products in classes_by_target.items():
for _, classes in target_products.abs_paths():
for cls in classes:
targets_by_file[cls].add(tgt)
# Compute jar -> target.
with self._context.new_workunit(name='map_jars'):
with IvyTaskMixin.symlink_map_lock:
all_symlinks_map = self._context.products.get_data('ivy_resolve_symlink_map').copy()
# We make a copy, so it's safe to use outside the lock.
def register_transitive_jars_for_ref(ivyinfo, ref):
deps_by_ref_memo = {}
def get_transitive_jars_by_ref(ref1):
def create_collection(current_ref):
return {ivyinfo.modules_by_ref[current_ref].artifact}
return ivyinfo.traverse_dependency_graph(ref1, create_collection, memo=deps_by_ref_memo)
target_key = (ref.org, ref.name)
if target_key in jarlibs_by_id:
# These targets provide all the jars in ref, and all the jars ref transitively depends on.
jarlib_targets = jarlibs_by_id[target_key]
for jar_path in get_transitive_jars_by_ref(ref):
# Register that each jarlib_target provides jar (via all its symlinks).
symlink = all_symlinks_map.get(os.path.realpath(jar_path), None)
if symlink:
for jarlib_target in jarlib_targets:
targets_by_file[symlink].add(jarlib_target)
ivy_products = self._context.products.get_data('ivy_jar_products')
if ivy_products:
for ivyinfos in ivy_products.values():
for ivyinfo in ivyinfos:
for ref in ivyinfo.modules_by_ref:
register_transitive_jars_for_ref(ivyinfo, ref)
return targets_by_file
def _compute_transitive_deps_by_target(self):
"""Map from target to all the targets it depends on, transitively."""
# Sort from least to most dependent.
sorted_targets = reversed(sort_targets(self._context.targets()))
transitive_deps_by_target = defaultdict(set)
# Iterate in dep order, to accumulate the transitive deps for each target.
for target in sorted_targets:
transitive_deps = set()
for dep in target.dependencies:
transitive_deps.update(transitive_deps_by_target.get(dep, []))
transitive_deps.add(dep)
# Need to handle the case where a java_sources target has dependencies.
# In particular if it depends back on the original target.
if hasattr(target, 'java_sources'):
for java_source_target in target.java_sources:
for transitive_dep in java_source_target.dependencies:
transitive_deps_by_target[java_source_target].add(transitive_dep)
transitive_deps_by_target[target] = transitive_deps
return transitive_deps_by_target
def check(self, srcs, actual_deps):
"""Check for missing deps.
See docstring for _compute_missing_deps for details.
"""
if self._check_missing_deps or self._check_missing_direct_deps or self._check_unnecessary_deps:
missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps = \
self._compute_missing_deps(srcs, actual_deps)
buildroot = get_buildroot()
def shorten(path): # Make the output easier to read.
if path.startswith(buildroot):
return os.path.relpath(path, buildroot)
return path
def filter_whitelisted(missing_deps):
# Removing any targets that exist in the whitelist from the list of dependency issues.
return [(tgt_pair, evidence) for (tgt_pair, evidence) in missing_deps
if tgt_pair[0].address.reference() not in self._target_whitelist]
missing_tgt_deps = filter_whitelisted(missing_tgt_deps)
if self._check_missing_deps and (missing_file_deps or missing_tgt_deps):
for (tgt_pair, evidence) in missing_tgt_deps:
evidence_str = '\n'.join([' {} uses {}'.format(shorten(e[0]), shorten(e[1]))
for e in evidence])
self._context.log.error(
'Missing BUILD dependency {} -> {} because:\n{}'
.format(tgt_pair[0].address.reference(), tgt_pair[1].address.reference(), evidence_str))
for (src_tgt, dep) in missing_file_deps:
self._context.log.error('Missing BUILD dependency {} -> {}'
.format(src_tgt.address.reference(), shorten(dep)))
if self._check_missing_deps == 'fatal':
raise TaskError('Missing deps.')
missing_direct_tgt_deps = filter_whitelisted(missing_direct_tgt_deps)
if self._check_missing_direct_deps and missing_direct_tgt_deps:
for (tgt_pair, evidence) in missing_direct_tgt_deps:
evidence_str = '\n'.join([' {} uses {}'.format(shorten(e[0]), shorten(e[1]))
for e in evidence])
self._context.log.warn('Missing direct BUILD dependency {} -> {} because:\n{}'
.format(tgt_pair[0].address, tgt_pair[1].address, evidence_str))
if self._check_missing_direct_deps == 'fatal':
raise TaskError('Missing direct deps.')
if self._check_unnecessary_deps:
raise TaskError('Unnecessary dep warnings not implemented yet.')
def _compute_missing_deps(self, srcs, actual_deps):
"""Computes deps that are used by the compiler but not specified in a BUILD file.
These deps are bugs waiting to happen: the code may happen to compile because the dep was
brought in some other way (e.g., by some other root target), but that is obviously fragile.
Note that in practice we're OK with reliance on indirect deps that are only brought in
transitively. E.g., in Scala type inference can bring in such a dep subtly. Fortunately these
cases aren't as fragile as a completely missing dependency. It's still a good idea to have
explicit direct deps where relevant, so we optionally warn about indirect deps, to make them
easy to find and reason about.
- actual_deps: a map src -> list of actual deps (source, class or jar file) as noted by the
compiler.
Returns a triple (missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps) where:
- missing_file_deps: a list of pairs (src_tgt, dep_file) where src_tgt requires dep_file, and
we're unable to map to a target (because its target isn't in the total set of targets in play,
and we don't want to parse every BUILD file in the workspace just to find it).
- missing_tgt_deps: a list of pairs (src_tgt, dep_tgt) where src_tgt is missing a necessary
transitive dependency on dep_tgt.
- missing_direct_tgt_deps: a list of pairs (src_tgt, dep_tgt) where src_tgt is missing a direct
dependency on dep_tgt but has a transitive dep on it.
All paths in the input and output are absolute.
"""
def must_be_explicit_dep(dep):
# We don't require explicit deps on the java runtime, so we shouldn't consider that
# a missing dep.
return not dep.startswith(self._context.java_home)
def target_or_java_dep_in_targets(target, targets):
# We want to check if the target is in the targets collection
#
# However, for the special case of scala_library that has a java_sources
# reference we're ok if that exists in targets even if the scala_library does not.
if target in targets:
return True
elif target.is_scala:
return any(t in targets for t in target.java_sources)
else:
return False
# TODO: If recomputing these every time becomes a performance issue, memoize for
# already-seen targets and incrementally compute for new targets not seen in a previous
# partition, in this or a previous chunk.
targets_by_file = self._compute_targets_by_file()
transitive_deps_by_target = self._compute_transitive_deps_by_target()
# Find deps that are actual but not specified.
with self._context.new_workunit(name='scan_deps'):
missing_file_deps = OrderedSet() # (src, src).
missing_tgt_deps_map = defaultdict(list) # (tgt, tgt) -> a list of (src, src) as evidence.
missing_direct_tgt_deps_map = defaultdict(list) # The same, but for direct deps.
buildroot = get_buildroot()
abs_srcs = [os.path.join(buildroot, src) for src in srcs]
for src in abs_srcs:
src_tgt = next(iter(targets_by_file.get(src)))
if src_tgt is not None:
for actual_dep in filter(must_be_explicit_dep, actual_deps.get(src, [])):
actual_dep_tgts = targets_by_file.get(actual_dep)
# actual_dep_tgts is usually a singleton. If it's not, we only need one of these
# to be in our declared deps to be OK.
if actual_dep_tgts is None:
missing_file_deps.add((src_tgt, actual_dep))
elif not target_or_java_dep_in_targets(src_tgt, actual_dep_tgts):
# Obviously intra-target deps are fine.
canonical_actual_dep_tgt = next(iter(actual_dep_tgts))
if actual_dep_tgts.isdisjoint(transitive_deps_by_target.get(src_tgt, [])):
missing_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append((src, actual_dep))
elif canonical_actual_dep_tgt not in src_tgt.dependencies:
# The canonical dep is the only one a direct dependency makes sense on.
missing_direct_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append(
(src, actual_dep))
else:
raise TaskError('Requested dep info for unknown source file: {}'.format(src))
return (list(missing_file_deps),
missing_tgt_deps_map.items(),
missing_direct_tgt_deps_map.items())
| apache-2.0 | -4,437,477,147,543,314,400 | 46.682482 | 102 | 0.661692 | false |
CuriousLearner/kivy | examples/canvas/fbo_canvas.py | 59 | 2544 | '''
FBO Canvas
==========
This demonstrates a layout using an FBO (Frame Buffer Off-screen)
instead of a plain canvas. You should see a black canvas with a
button labelled 'FBO' in the bottom left corner. Clicking it
animates the button moving right to left.
'''
__all__ = ('FboFloatLayout', )
from kivy.graphics import Color, Rectangle, Canvas, ClearBuffers, ClearColor
from kivy.graphics.fbo import Fbo
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import ObjectProperty, NumericProperty
from kivy.app import App
from kivy.core.window import Window
from kivy.animation import Animation
from kivy.factory import Factory
class FboFloatLayout(FloatLayout):
texture = ObjectProperty(None, allownone=True)
alpha = NumericProperty(1)
def __init__(self, **kwargs):
self.canvas = Canvas()
with self.canvas:
self.fbo = Fbo(size=self.size)
self.fbo_color = Color(1, 1, 1, 1)
self.fbo_rect = Rectangle()
with self.fbo:
ClearColor(0, 0, 0, 0)
ClearBuffers()
# wait that all the instructions are in the canvas to set texture
self.texture = self.fbo.texture
super(FboFloatLayout, self).__init__(**kwargs)
def add_widget(self, *largs):
# trick to attach graphics instruction to fbo instead of canvas
canvas = self.canvas
self.canvas = self.fbo
ret = super(FboFloatLayout, self).add_widget(*largs)
self.canvas = canvas
return ret
def remove_widget(self, *largs):
canvas = self.canvas
self.canvas = self.fbo
super(FboFloatLayout, self).remove_widget(*largs)
self.canvas = canvas
def on_size(self, instance, value):
self.fbo.size = value
self.texture = self.fbo.texture
self.fbo_rect.size = value
def on_pos(self, instance, value):
self.fbo_rect.pos = value
def on_texture(self, instance, value):
self.fbo_rect.texture = value
def on_alpha(self, instance, value):
self.fbo_color.rgba = (1, 1, 1, value)
class ScreenLayerApp(App):
def build(self):
f = FboFloatLayout()
b = Factory.Button(text="FBO", size_hint=(None, None))
f.add_widget(b)
def anim_btn(*args):
if b.pos[0] == 0:
Animation(x=f.width - b.width).start(b)
else:
Animation(x=0).start(b)
b.bind(on_press=anim_btn)
return f
if __name__ == "__main__":
ScreenLayerApp().run()
| mit | -1,158,806,525,977,715,000 | 26.956044 | 76 | 0.625 | false |
pgiraud/georchestra | extractorapp/jsbuild/util/gen-go-jstools.py | 24 | 1441 | #!/usr/bin/env python
"""Generate go-jstools.py"""
import sys
import textwrap
import virtualenv
filename = 'go-jstools.py'
after_install = """\
import os, subprocess
def after_install(options, home_dir):
etc = join(home_dir, 'etc')
## TODO: this should all come from distutils
## like distutils.sysconfig.get_python_inc()
if sys.platform == 'win32':
lib_dir = join(home_dir, 'Lib')
bin_dir = join(home_dir, 'Scripts')
elif is_jython:
lib_dir = join(home_dir, 'Lib')
bin_dir = join(home_dir, 'bin')
else:
lib_dir = join(home_dir, 'lib', py_version)
bin_dir = join(home_dir, 'bin')
if not os.path.exists(etc):
os.makedirs(etc)
subprocess.call([join(bin_dir, 'easy_install'), 'JSTools==%s'])
"""
def generate(filename, version):
# what's commented out below comes from go-pylons.py
#path = version
#if '==' in version:
# path = version[:version.find('==')]
#output = virtualenv.create_bootstrap_script(
# textwrap.dedent(after_install % (path, version)))
output = virtualenv.create_bootstrap_script(
textwrap.dedent(after_install % version))
fp = open(filename, 'w')
fp.write(output)
fp.close()
def main():
if len(sys.argv) != 2:
print >> sys.stderr, 'usage: %s version' % sys.argv[0]
sys.exit(1)
generate(filename, sys.argv[1])
if __name__ == '__main__':
main()
| gpl-3.0 | 2,323,279,601,125,958,000 | 25.2 | 67 | 0.603747 | false |
boompieman/iim_project | project_python2/lib/python2.7/site-packages/pattern/server/cherrypy/cherrypy/lib/gctools.py | 40 | 7396 | import gc
import inspect
import os
import sys
import time
try:
import objgraph
except ImportError:
objgraph = None
import cherrypy
from cherrypy import _cprequest, _cpwsgi
from cherrypy.process.plugins import SimplePlugin
class ReferrerTree(object):
"""An object which gathers all referrers of an object to a given depth."""
peek_length = 40
def __init__(self, ignore=None, maxdepth=2, maxparents=10):
self.ignore = ignore or []
self.ignore.append(inspect.currentframe().f_back)
self.maxdepth = maxdepth
self.maxparents = maxparents
def ascend(self, obj, depth=1):
"""Return a nested list containing referrers of the given object."""
depth += 1
parents = []
# Gather all referrers in one step to minimize
# cascading references due to repr() logic.
refs = gc.get_referrers(obj)
self.ignore.append(refs)
if len(refs) > self.maxparents:
return [("[%s referrers]" % len(refs), [])]
try:
ascendcode = self.ascend.__code__
except AttributeError:
ascendcode = self.ascend.im_func.func_code
for parent in refs:
if inspect.isframe(parent) and parent.f_code is ascendcode:
continue
if parent in self.ignore:
continue
if depth <= self.maxdepth:
parents.append((parent, self.ascend(parent, depth)))
else:
parents.append((parent, []))
return parents
def peek(self, s):
"""Return s, restricted to a sane length."""
if len(s) > (self.peek_length + 3):
half = self.peek_length // 2
return s[:half] + '...' + s[-half:]
else:
return s
def _format(self, obj, descend=True):
"""Return a string representation of a single object."""
if inspect.isframe(obj):
filename, lineno, func, context, index = inspect.getframeinfo(obj)
return "<frame of function '%s'>" % func
if not descend:
return self.peek(repr(obj))
if isinstance(obj, dict):
return "{" + ", ".join(["%s: %s" % (self._format(k, descend=False),
self._format(v, descend=False))
for k, v in obj.items()]) + "}"
elif isinstance(obj, list):
return "[" + ", ".join([self._format(item, descend=False)
for item in obj]) + "]"
elif isinstance(obj, tuple):
return "(" + ", ".join([self._format(item, descend=False)
for item in obj]) + ")"
r = self.peek(repr(obj))
if isinstance(obj, (str, int, float)):
return r
return "%s: %s" % (type(obj), r)
def format(self, tree):
"""Return a list of string reprs from a nested list of referrers."""
output = []
def ascend(branch, depth=1):
for parent, grandparents in branch:
output.append((" " * depth) + self._format(parent))
if grandparents:
ascend(grandparents, depth + 1)
ascend(tree)
return output
def get_instances(cls):
return [x for x in gc.get_objects() if isinstance(x, cls)]
class RequestCounter(SimplePlugin):
def start(self):
self.count = 0
def before_request(self):
self.count += 1
def after_request(self):
self.count -=1
request_counter = RequestCounter(cherrypy.engine)
request_counter.subscribe()
def get_context(obj):
if isinstance(obj, _cprequest.Request):
return "path=%s;stage=%s" % (obj.path_info, obj.stage)
elif isinstance(obj, _cprequest.Response):
return "status=%s" % obj.status
elif isinstance(obj, _cpwsgi.AppResponse):
return "PATH_INFO=%s" % obj.environ.get('PATH_INFO', '')
elif hasattr(obj, "tb_lineno"):
return "tb_lineno=%s" % obj.tb_lineno
return ""
class GCRoot(object):
"""A CherryPy page handler for testing reference leaks."""
classes = [(_cprequest.Request, 2, 2,
"Should be 1 in this request thread and 1 in the main thread."),
(_cprequest.Response, 2, 2,
"Should be 1 in this request thread and 1 in the main thread."),
(_cpwsgi.AppResponse, 1, 1,
"Should be 1 in this request thread only."),
]
def index(self):
return "Hello, world!"
index.exposed = True
def stats(self):
output = ["Statistics:"]
for trial in range(10):
if request_counter.count > 0:
break
time.sleep(0.5)
else:
output.append("\nNot all requests closed properly.")
# gc_collect isn't perfectly synchronous, because it may
# break reference cycles that then take time to fully
# finalize. Call it thrice and hope for the best.
gc.collect()
gc.collect()
unreachable = gc.collect()
if unreachable:
if objgraph is not None:
final = objgraph.by_type('Nondestructible')
if final:
objgraph.show_backrefs(final, filename='finalizers.png')
trash = {}
for x in gc.garbage:
trash[type(x)] = trash.get(type(x), 0) + 1
if trash:
output.insert(0, "\n%s unreachable objects:" % unreachable)
trash = [(v, k) for k, v in trash.items()]
trash.sort()
for pair in trash:
output.append(" " + repr(pair))
# Check declared classes to verify uncollected instances.
# These don't have to be part of a cycle; they can be
# any objects that have unanticipated referrers that keep
# them from being collected.
allobjs = {}
for cls, minobj, maxobj, msg in self.classes:
allobjs[cls] = get_instances(cls)
for cls, minobj, maxobj, msg in self.classes:
objs = allobjs[cls]
lenobj = len(objs)
if lenobj < minobj or lenobj > maxobj:
if minobj == maxobj:
output.append(
"\nExpected %s %r references, got %s." %
(minobj, cls, lenobj))
else:
output.append(
"\nExpected %s to %s %r references, got %s." %
(minobj, maxobj, cls, lenobj))
for obj in objs:
if objgraph is not None:
ig = [id(objs), id(inspect.currentframe())]
fname = "graph_%s_%s.png" % (cls.__name__, id(obj))
objgraph.show_backrefs(
obj, extra_ignore=ig, max_depth=4, too_many=20,
filename=fname, extra_info=get_context)
output.append("\nReferrers for %s (refcount=%s):" %
(repr(obj), sys.getrefcount(obj)))
t = ReferrerTree(ignore=[objs], maxdepth=3)
tree = t.ascend(obj)
output.extend(t.format(tree))
return "\n".join(output)
stats.exposed = True
| gpl-3.0 | 8,837,314,561,475,576,000 | 33.560748 | 80 | 0.527312 | false |
mohitsethi/packstack | packstack/installer/exceptions.py | 13 | 1622 | # -*- coding: utf-8 -*-
__all__ = (
'PackStackError',
'InstallError',
'FlagValidationError',
'MissingRequirements',
'PluginError',
'ParamProcessingError',
'ParamValidationError',
'NetworkError',
'ScriptRuntimeError',
)
class PackStackError(Exception):
"""Default Exception class for packstack installer."""
def __init__(self, *args, **kwargs):
super(PackStackError, self).__init__(*args)
self.stdout = kwargs.get('stdout', None)
self.stderr = kwargs.get('stderr', None)
class PuppetError(Exception):
"""Raised when Puppet will have some problems."""
class MissingRequirements(PackStackError):
"""Raised when minimum install requirements are not met."""
pass
class InstallError(PackStackError):
"""Exception for generic errors during setup run."""
pass
class FlagValidationError(InstallError):
"""Raised when single flag validation fails."""
pass
class ParamValidationError(InstallError):
"""Raised when parameter value validation fails."""
pass
class PluginError(PackStackError):
pass
class ParamProcessingError(PluginError):
pass
class NetworkError(PackStackError):
"""Should be used for packstack's network failures."""
pass
class ScriptRuntimeError(PackStackError):
"""
Raised when utils.ScriptRunner.execute does not end successfully.
"""
pass
class ExecuteRuntimeError(PackStackError):
"""Raised when utils.execute does not end successfully."""
class SequenceError(PackStackError):
"""Exception for errors during setup sequence run."""
pass
| apache-2.0 | 1,240,357,896,755,142,400 | 20.064935 | 69 | 0.689889 | false |
texcaltech/windmilltownhomes-old | django/core/mail/backends/base.py | 660 | 1164 | """Base email backend class."""
class BaseEmailBackend(object):
"""
Base class for email backend implementations.
Subclasses must at least overwrite send_messages().
"""
def __init__(self, fail_silently=False, **kwargs):
self.fail_silently = fail_silently
def open(self):
"""Open a network connection.
This method can be overwritten by backend implementations to
open a network connection.
It's up to the backend implementation to track the status of
a network connection if it's needed by the backend.
This method can be called by applications to force a single
network connection to be used when sending mails. See the
send_messages() method of the SMTP backend for a reference
implementation.
The default implementation does nothing.
"""
pass
def close(self):
"""Close a network connection."""
pass
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
raise NotImplementedError
| bsd-3-clause | -5,029,092,739,704,117,000 | 28.846154 | 78 | 0.64433 | false |
talos/jsongit | jsongit/wrappers.py | 1 | 9151 | # -*- coding: utf-8 -*-
"""
jsongit.wrappers
These classes provide limited interfaces to pygit2 and json_diff constructs.
"""
import json_diff
import itertools
import copy
class Commit(object):
"""A wrapper around :class:`pygit2.Commit` linking to a single key in the
repo.
"""
def __init__(self, repo, key, data, pygit2_commit):
self._commit = pygit2_commit
self._repo = repo
self._key = key
self._data = data
def __eq__(self, other):
return self.oid == other.oid
def __str__(self):
return "'%s'='%s'@%s" % (self.key, self.data, self.hex[0:10])
def __repr__(self):
return "%s(%s,message=%s,author=%s)" % (type(self).__name__,
self.__str__(), self.message,
self.author)
@property
def data(self):
"""
:returns: the data associated with this commit.
:rtype: Boolean, Number, None, String, Dict, or List
"""
return self._data
@property
def key(self):
"""
:returns: the key associated with this commit.
:rtype: string
"""
return self._key
@property
def oid(self):
"""
:returns: The unique 20-byte ID of this Commit.
:rtype: string
"""
return self._commit.oid
@property
def hex(self):
"""
:returns: The unique 40-character hex representation of this commit's ID.
:rtype: string
"""
return self._commit.hex
@property
def message(self):
"""
:returns: The message associated with this commit.
:rtype: string
"""
return self._commit.message
@property
def author(self):
"""
:returns: The author of this commit.
:rtype: :class:`pygit2.Signature`
"""
return self._commit.author
@property
def committer(self):
"""
:returns: The committer of this commit.
:rtype: :class:`pygit2.Signature`
"""
return self._commit.committer
@property
def time(self):
"""
:returns: The time of this commit.
:rtype: long
"""
return self._commit.commit_time
@property
def repo(self):
"""
:returns: The repository of this commit.
:rtype: :class:`Repository <jsongit.models.Repository>`
"""
return self._repo
class DiffWrapper(object):
"""An internal wrapper for :mod:`json_diff`.
"""
def __init__(self, diff):
if Diff.is_json_diff(diff):
# wrap recursive updates
if Diff.UPDATE in diff:
update = diff[Diff.UPDATE]
for k, v in update.iteritems():
update[k] = DiffWrapper(v)
self._replace = None
else:
self._replace = diff
diff = {} if diff is None else diff
self._diff = diff
def __str__(self):
return self._diff.__str__()
def __repr__(self):
return "%s(%s)" % (type(self).__name__, self._diff.__repr__())
def __getitem__(self, k):
return self._diff[k]
def __eq__(self, other):
return self._diff == other
@property
def remove(self):
"""A dict of removed keys and their values.
"""
return self._diff.get(Diff.REMOVE)
@property
def update(self):
"""A DiffWrapper
"""
return self._diff.get(Diff.UPDATE)
@property
def append(self):
"""A dict of appended keys and their values.
"""
return self._diff.get(Diff.APPEND)
@property
def replace(self):
"""The diff is simply to replace wholesale.
"""
return self._replace
def apply(self, original):
"""Return an object modified with the changes in this diff.
:param original: the object to apply the diff to.
:type original: list, dict, number, or string
:returns: the modified object
:rtype: list, dict, number, or string
"""
if self.replace:
return self.replace
else:
obj = copy.copy(original)
for k, v in (self.remove or {}).iteritems():
obj.pop(k)
for k, v in (self.update or {}).iteritems():
# Recursive application
obj[k] = v.apply(obj[k])
for k, v in (self.append or {}).iteritems():
if hasattr(obj, 'insert'):
obj.insert(k, v)
else:
obj[k] = v
return obj
class Diff(DiffWrapper):
"""A class to encapsulate differences between two JSON git objects.
"""
APPEND = '_append'
REMOVE = '_remove'
UPDATE = '_update'
@classmethod
def is_json_diff(cls, obj):
"""Determine whether a dict was produced by JSON diff.
"""
if isinstance(obj, dict):
return any(k in obj for k in [cls.APPEND, cls.REMOVE, cls.UPDATE])
else:
return False
def __init__(self, obj1, obj2):
if isinstance(obj2, obj1.__class__):
c = json_diff.Comparator()
c.obj1 = obj1
c.obj2 = obj2
diff = c._compare_elements(obj1, obj2)
super(Diff, self).__init__(diff)
else:
# if types differ we just replace
super(Diff, self).__init__(obj2)
class Conflict(object):
"""A class wrapper for the conflict between two diffs.
"""
def __init__(self, diff1, diff2):
self._conflict = {}
if diff1.replace or diff2.replace:
if diff1.replace != diff2.replace:
self._conflict = {'replace': (diff1.replace, diff2.replace)}
else:
for verb1, verb2 in itertools.product(['append', 'update', 'remove'],
repeat=2):
mod1 = getattr(diff1, verb1) or {}
mod2 = getattr(diff2, verb2) or {}
# Isolate simultaneously modified keys
for k in (k for k in mod1 if k in mod2):
self._conflict.setdefault(verb1, {})
# If verbs were the same, it's OK unless mod was different.
if verb1 == verb2 and mod1[k] != mod2[k]:
self._conflict[verb1][k] = (mod1[k], mod2[k])
# Otherwise, it's a conflict no matter what
else:
self._conflict[verb1][k] = (mod1[k], None)
self._conflict.setdefault(verb2, {})
self._conflict[verb2][k] = (None, mod2[k])
def __nonzero__(self):
return len(self._conflict) != 0
def __str__(self):
return self._conflict.__str__()
def __repr__(self):
return "%s(%s)" % (type(self).__name__, self._conflict.__repr__())
@property
def remove(self):
"""A dict of key removal conflict tuples.
"""
return self._conflict.get('remove')
@property
def update(self):
"""A dict of key update conflict tuples.
"""
return self._conflict.get('update')
@property
def append(self):
"""A dict of key append conflict tuples.
"""
return self._conflict.get('append')
@property
def replace(self):
"""A tuple of the two diffs.
"""
return self._conflict.get('replace')
class Merge(object):
"""A class wrapper for the results of a merge operation.
"""
def __init__(self, success, original, merged, message, result=None,
conflict=None):
self._success = success
self._message = message
self._original = original
self._merged = merged
self._conflict = conflict
self._result = result
def __str__(self):
return self.message
def __repr__(self):
return "%s(success=%s,message=%s,conflict=%s,original=%s,merged=%s)" % (
type(self).__name__, self.success, self.message, self.conflict,
self.original, self.merged)
def __nonzero__(self):
return self.success
@property
def result(self):
"""
:returns:
the object resulting from this merge, or None if there was
a conflict.
"""
return self._result
@property
def success(self):
"""Whether the merge was a success.
"""
return self._success
@property
def original(self):
"""The original object.
"""
return self._original
@property
def merged(self):
"""The object that was merged in.
"""
return self._merged
@property
def conflict(self):
"""The :class:`Conflict <jsongit.wrappers.Conflict>`, if the merge
was not a success.
"""
return self._conflict
@property
def message(self):
"""The message associated with this merge.
"""
return self._message
| bsd-3-clause | 5,856,023,118,903,913,000 | 25.914706 | 81 | 0.518413 | false |
lindareijnhoudt/resync | resync/test/test_client_utils.py | 2 | 2589 | import unittest
from resync.client_utils import count_true_args,parse_links,parse_link,parse_capabilities,parse_capability_lists
from resync.client import ClientFatalError
class TestClientUtils(unittest.TestCase):
def test01_count_true_args(self):
self.assertEqual( count_true_args(), 0 )
self.assertEqual( count_true_args(True), 1 )
self.assertEqual( count_true_args(False), 0 )
self.assertEqual( count_true_args(0,1,2,3), 3 )
def test02_parse_links(self):
self.assertEqual( parse_links( [] ), [] )
self.assertEqual( parse_links( ['u,h'] ), [{'href': 'h', 'rel': 'u'}] )
self.assertEqual( parse_links( ['u,h','v,i'] ), [{'href': 'h', 'rel': 'u'},{'href': 'i', 'rel': 'v'}] )
self.assertRaises( ClientFatalError, parse_links, 'xx' )
self.assertRaises( ClientFatalError, parse_links, ['u'] )
self.assertRaises( ClientFatalError, parse_links, ['u,h','u'] )
def test03_parse_link(self):
# Input string of the form: rel,href,att1=val1,att2=val2
self.assertEqual( parse_link('u,h'), {'href': 'h', 'rel': 'u'} )
self.assertEqual( parse_link('u,h,a=b'), {'a': 'b', 'href': 'h', 'rel': 'u'} )
self.assertEqual( parse_link('u,h,a=b,c=d'), {'a': 'b', 'c': 'd', 'href': 'h', 'rel': 'u'} )
self.assertEqual( parse_link('u,h,a=b,a=d'), {'a': 'd', 'href': 'h', 'rel': 'u'} ) # desired??
self.assertRaises( ClientFatalError, parse_link, '' )
self.assertRaises( ClientFatalError, parse_link, 'u' )
self.assertRaises( ClientFatalError, parse_link, 'u,' )
self.assertRaises( ClientFatalError, parse_link, 'u,h,,' )
self.assertRaises( ClientFatalError, parse_link, 'u,h,a' )
self.assertRaises( ClientFatalError, parse_link, 'u,h,a=' )
self.assertRaises( ClientFatalError, parse_link, 'u,h,a=b,=c' )
def test04_parse_capabilities(self):
# Input string of the form: cap_name=uri,cap_name=uri
self.assertRaises( ClientFatalError, parse_capabilities, 'a' )
#self.assertRaises( ClientFatalError, parse_capabilities, 'a=' )
self.assertRaises( ClientFatalError, parse_capabilities, 'a=b,' )
#self.assertRaises( ClientFatalError, parse_capabilities, 'a=b,c=' )
def test05_parse_capability_lists(self):
# Input string of the form: uri,uri
self.assertEqual( parse_capability_lists('a,b'), ['a','b'] )
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestClientUtils)
unittest.TextTestRunner(verbosity=2).run(suite)
| apache-2.0 | -2,733,318,485,685,817,000 | 52.9375 | 112 | 0.62611 | false |
junkoda/fs2 | doc/conf.py | 1 | 10050 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# fs documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 5 18:48:51 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
'sphinx.ext.autodoc', 'sphinx.ext.napoleon',
]
# Add any Sphinx extension module names here, as strings
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'fs'
copyright = '2016, Jun Koda'
author = 'Jun Koda'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'fs v0.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'fsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'fs.tex', 'fs Documentation',
'Jun Koda', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'fs', 'fs Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'fs', 'fs Documentation',
author, 'fs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| gpl-3.0 | 4,489,397,436,459,284,000 | 27.551136 | 80 | 0.693532 | false |
blackzw/openwrt_sdk_dev1 | staging_dir/host/lib/python2.7/json/decoder.py | 65 | 13785 | """Implementation of JSONDecoder
"""
import re
import sys
import struct
from json import scanner
try:
from _json import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _json
lineno, colno = linecol(doc, pos)
if end is None:
fmt = '{0}: line {1} column {2} (char {3})'
return fmt.format(msg, lineno, colno, pos)
#fmt = '%s: line %d column %d (char %d)'
#return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
#fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
#return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
#msg = "Invalid control character %r at" % (terminator,)
msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject(s_and_end, encoding, strict, scan_once, object_hook,
object_pairs_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
s, end = s_and_end
pairs = []
pairs_append = pairs.append
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs_append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
s, end = s_and_end
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``object_pairs_hook``, if specified will be called with the result of
every JSON object decoded with an ordered list of pairs. The return
value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes
priority.
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
If ``strict`` is false (true is the default), then control
characters will be allowed inside strings. Control characters in
this context are those with character codes in the 0-31 range,
including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = scanner.make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
| gpl-2.0 | -4,720,612,494,095,835,000 | 34.805195 | 78 | 0.532028 | false |
vFense/vFenseAgent-nix | agent/deps/rpm6/Python-2.7.5/lib/python2.7/bsddb/test/test_misc.py | 68 | 4803 | """Miscellaneous bsddb module test cases
"""
import os, sys
import unittest
from test_all import db, dbshelve, hashopen, test_support, get_new_environment_path, get_new_database_path
#----------------------------------------------------------------------
class MiscTestCase(unittest.TestCase):
def setUp(self):
self.filename = get_new_database_path()
self.homeDir = get_new_environment_path()
def tearDown(self):
test_support.unlink(self.filename)
test_support.rmtree(self.homeDir)
def test01_badpointer(self):
dbs = dbshelve.open(self.filename)
dbs.close()
self.assertRaises(db.DBError, dbs.get, "foo")
def test02_db_home(self):
env = db.DBEnv()
# check for crash fixed when db_home is used before open()
self.assertTrue(env.db_home is None)
env.open(self.homeDir, db.DB_CREATE)
if sys.version_info[0] < 3 :
self.assertEqual(self.homeDir, env.db_home)
else :
self.assertEqual(bytes(self.homeDir, "ascii"), env.db_home)
def test03_repr_closed_db(self):
db = hashopen(self.filename)
db.close()
rp = repr(db)
self.assertEqual(rp, "{}")
def test04_repr_db(self) :
db = hashopen(self.filename)
d = {}
for i in xrange(100) :
db[repr(i)] = repr(100*i)
d[repr(i)] = repr(100*i)
db.close()
db = hashopen(self.filename)
rp = repr(db)
self.assertEqual(rp, repr(d))
db.close()
# http://sourceforge.net/tracker/index.php?func=detail&aid=1708868&group_id=13900&atid=313900
#
# See the bug report for details.
#
# The problem was that make_key_dbt() was not allocating a copy of
# string keys but FREE_DBT() was always being told to free it when the
# database was opened with DB_THREAD.
def test05_double_free_make_key_dbt(self):
try:
db1 = db.DB()
db1.open(self.filename, None, db.DB_BTREE,
db.DB_CREATE | db.DB_THREAD)
curs = db1.cursor()
t = curs.get("/foo", db.DB_SET)
# double free happened during exit from DBC_get
finally:
db1.close()
test_support.unlink(self.filename)
def test06_key_with_null_bytes(self):
try:
db1 = db.DB()
db1.open(self.filename, None, db.DB_HASH, db.DB_CREATE)
db1['a'] = 'eh?'
db1['a\x00'] = 'eh zed.'
db1['a\x00a'] = 'eh zed eh?'
db1['aaa'] = 'eh eh eh!'
keys = db1.keys()
keys.sort()
self.assertEqual(['a', 'a\x00', 'a\x00a', 'aaa'], keys)
self.assertEqual(db1['a'], 'eh?')
self.assertEqual(db1['a\x00'], 'eh zed.')
self.assertEqual(db1['a\x00a'], 'eh zed eh?')
self.assertEqual(db1['aaa'], 'eh eh eh!')
finally:
db1.close()
test_support.unlink(self.filename)
def test07_DB_set_flags_persists(self):
try:
db1 = db.DB()
db1.set_flags(db.DB_DUPSORT)
db1.open(self.filename, db.DB_HASH, db.DB_CREATE)
db1['a'] = 'eh'
db1['a'] = 'A'
self.assertEqual([('a', 'A')], db1.items())
db1.put('a', 'Aa')
self.assertEqual([('a', 'A'), ('a', 'Aa')], db1.items())
db1.close()
db1 = db.DB()
# no set_flags call, we're testing that it reads and obeys
# the flags on open.
db1.open(self.filename, db.DB_HASH)
self.assertEqual([('a', 'A'), ('a', 'Aa')], db1.items())
# if it read the flags right this will replace all values
# for key 'a' instead of adding a new one. (as a dict should)
db1['a'] = 'new A'
self.assertEqual([('a', 'new A')], db1.items())
finally:
db1.close()
test_support.unlink(self.filename)
def test08_ExceptionTypes(self) :
self.assertTrue(issubclass(db.DBError, Exception))
for i, j in db.__dict__.items() :
if i.startswith("DB") and i.endswith("Error") :
self.assertTrue(issubclass(j, db.DBError), msg=i)
if i not in ("DBKeyEmptyError", "DBNotFoundError") :
self.assertFalse(issubclass(j, KeyError), msg=i)
# This two exceptions have two bases
self.assertTrue(issubclass(db.DBKeyEmptyError, KeyError))
self.assertTrue(issubclass(db.DBNotFoundError, KeyError))
#----------------------------------------------------------------------
def test_suite():
return unittest.makeSuite(MiscTestCase)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| lgpl-3.0 | 5,788,092,869,546,380,000 | 33.804348 | 106 | 0.532792 | false |
IT-Department-Projects/OOAD-Project | Flask_App/oakcrest/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py | 356 | 1555 | from __future__ import absolute_import, division, unicode_literals
from genshi.core import QName, Attrs
from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
def to_genshi(walker):
text = []
for token in walker:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
text.append(token["data"])
elif text:
yield TEXT, "".join(text), (None, -1, -1)
text = []
if type in ("StartTag", "EmptyTag"):
if token["namespace"]:
name = "{%s}%s" % (token["namespace"], token["name"])
else:
name = token["name"]
attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value)
for attr, value in token["data"].items()])
yield (START, (QName(name), attrs), (None, -1, -1))
if type == "EmptyTag":
type = "EndTag"
if type == "EndTag":
if token["namespace"]:
name = "{%s}%s" % (token["namespace"], token["name"])
else:
name = token["name"]
yield END, QName(name), (None, -1, -1)
elif type == "Comment":
yield COMMENT, token["data"], (None, -1, -1)
elif type == "Doctype":
yield DOCTYPE, (token["name"], token["publicId"],
token["systemId"]), (None, -1, -1)
else:
pass # FIXME: What to do?
if text:
yield TEXT, "".join(text), (None, -1, -1)
| mit | -9,223,351,895,964,839,000 | 32.085106 | 94 | 0.47717 | false |
jylaxp/django | tests/template_tests/syntax_tests/test_list_index.py | 521 | 2694 | from django.test import SimpleTestCase
from ..utils import setup
class ListIndexTests(SimpleTestCase):
@setup({'list-index01': '{{ var.1 }}'})
def test_list_index01(self):
"""
List-index syntax allows a template to access a certain item of a
subscriptable object.
"""
output = self.engine.render_to_string('list-index01', {'var': ['first item', 'second item']})
self.assertEqual(output, 'second item')
@setup({'list-index02': '{{ var.5 }}'})
def test_list_index02(self):
"""
Fail silently when the list index is out of range.
"""
output = self.engine.render_to_string('list-index02', {'var': ['first item', 'second item']})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'list-index03': '{{ var.1 }}'})
def test_list_index03(self):
"""
Fail silently when the list index is out of range.
"""
output = self.engine.render_to_string('list-index03', {'var': None})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'list-index04': '{{ var.1 }}'})
def test_list_index04(self):
"""
Fail silently when variable is a dict without the specified key.
"""
output = self.engine.render_to_string('list-index04', {'var': {}})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'list-index05': '{{ var.1 }}'})
def test_list_index05(self):
"""
Dictionary lookup wins out when dict's key is a string.
"""
output = self.engine.render_to_string('list-index05', {'var': {'1': "hello"}})
self.assertEqual(output, 'hello')
@setup({'list-index06': '{{ var.1 }}'})
def test_list_index06(self):
"""
But list-index lookup wins out when dict's key is an int, which
behind the scenes is really a dictionary lookup (for a dict)
after converting the key to an int.
"""
output = self.engine.render_to_string('list-index06', {"var": {1: "hello"}})
self.assertEqual(output, 'hello')
@setup({'list-index07': '{{ var.1 }}'})
def test_list_index07(self):
"""
Dictionary lookup wins out when there is a string and int version
of the key.
"""
output = self.engine.render_to_string('list-index07', {"var": {'1': "hello", 1: "world"}})
self.assertEqual(output, 'hello')
| bsd-3-clause | 3,240,438,636,950,674,400 | 34.92 | 101 | 0.571641 | false |
thedep2/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/moevideo.py | 15 | 3685 | # coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
int_or_none,
)
class MoeVideoIE(InfoExtractor):
IE_DESC = 'LetitBit video services: moevideo.net, playreplay.net and videochart.net'
_VALID_URL = r'''(?x)
https?://(?P<host>(?:www\.)?
(?:(?:moevideo|playreplay|videochart)\.net))/
(?:video|framevideo)/(?P<id>[0-9]+\.[0-9A-Za-z]+)'''
_API_URL = 'http://api.letitbit.net/'
_API_KEY = 'tVL0gjqo5'
_TESTS = [
{
'url': 'http://moevideo.net/video/00297.0036103fe3d513ef27915216fd29',
'md5': '129f5ae1f6585d0e9bb4f38e774ffb3a',
'info_dict': {
'id': '00297.0036103fe3d513ef27915216fd29',
'ext': 'flv',
'title': 'Sink cut out machine',
'description': 'md5:f29ff97b663aefa760bf7ca63c8ca8a8',
'thumbnail': 're:^https?://.*\.jpg$',
'width': 540,
'height': 360,
'duration': 179,
'filesize': 17822500,
}
},
{
'url': 'http://playreplay.net/video/77107.7f325710a627383d40540d8e991a',
'md5': '74f0a014d5b661f0f0e2361300d1620e',
'info_dict': {
'id': '77107.7f325710a627383d40540d8e991a',
'ext': 'flv',
'title': 'Operacion Condor.',
'description': 'md5:7e68cb2fcda66833d5081c542491a9a3',
'thumbnail': 're:^https?://.*\.jpg$',
'width': 480,
'height': 296,
'duration': 6027,
'filesize': 588257923,
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(
'http://%s/video/%s' % (mobj.group('host'), video_id),
video_id, 'Downloading webpage')
title = self._og_search_title(webpage)
thumbnail = self._og_search_thumbnail(webpage)
description = self._og_search_description(webpage)
r = [
self._API_KEY,
[
'preview/flv_link',
{
'uid': video_id,
},
],
]
r_json = json.dumps(r)
post = compat_urllib_parse.urlencode({'r': r_json})
req = compat_urllib_request.Request(self._API_URL, post)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
response = self._download_json(req, video_id)
if response['status'] != 'OK':
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, response['data']),
expected=True
)
item = response['data'][0]
video_url = item['link']
duration = int_or_none(item['length'])
width = int_or_none(item['width'])
height = int_or_none(item['height'])
filesize = int_or_none(item['convert_size'])
formats = [{
'format_id': 'sd',
'http_headers': {'Range': 'bytes=0-'}, # Required to download
'url': video_url,
'width': width,
'height': height,
'filesize': filesize,
}]
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'description': description,
'duration': duration,
'formats': formats,
}
| gpl-3.0 | 4,911,426,098,837,324,000 | 31.324561 | 88 | 0.498507 | false |
romain-dartigues/ansible | lib/ansible/modules/storage/netapp/na_elementsw_volume.py | 7 | 13498 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""Element OS Software Volume Manager"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_volume
short_description: NetApp Element Software Manage Volumes
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <[email protected]>
description:
- Create, destroy, or update volumes on ElementSW
options:
state:
description:
- Whether the specified volume should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- The name of the volume to manage.
- It accepts volume_name or volume_id
required: true
account_id:
description:
- Account ID for the owner of this volume.
- It accepts Account_id or Account_name
required: true
enable512e:
description:
- Required when C(state=present)
- Should the volume provide 512-byte sector emulation?
type: bool
aliases:
- 512emulation
qos:
description: Initial quality of service settings for this volume. Configure as dict in playbooks.
attributes:
description: A YAML dictionary of attributes that you would like to apply on this volume.
size:
description:
- The size of the volume in (size_unit).
- Required when C(state = present).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
access:
description:
- Access allowed for the volume.
- readOnly Only read operations are allowed.
- readWrite Reads and writes are allowed.
- locked No reads or writes are allowed.
- replicationTarget Identify a volume as the target volume for a paired set of volumes.
- If the volume is not paired, the access status is locked.
- If unspecified, the access settings of the clone will be the same as the source.
choices: ['readOnly', 'readWrite', 'locked', 'replicationTarget']
password:
description:
- ElementSW access account password
aliases:
- pass
username:
description:
- ElementSW access account user-name
aliases:
- user
'''
EXAMPLES = """
- name: Create Volume
na_elementsw_volume:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
name: AnsibleVol
qos: {minIOPS: 1000, maxIOPS: 20000, burstIOPS: 50000}
account_id: 3
enable512e: False
size: 1
size_unit: gb
- name: Update Volume
na_elementsw_volume:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
name: AnsibleVol
account_id: 3
access: readWrite
- name: Delete Volume
na_elementsw_volume:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: absent
name: AnsibleVol
account_id: 2
"""
RETURN = """
msg:
description: Success message
returned: success
type: string
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_elementsw_module import NaElementSWModule
HAS_SF_SDK = netapp_utils.has_sf_sdk()
try:
import solidfire.common
except:
HAS_SF_SDK = False
class ElementOSVolume(object):
"""
Contains methods to parse arguments,
derive details of ElementSW objects
and send requests to ElementOS via
the ElementSW SDK
"""
def __init__(self):
"""
Parse arguments, setup state variables,
check paramenters and ensure SDK is installed
"""
self._size_unit_map = netapp_utils.SF_BYTE_MAP
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
account_id=dict(required=True),
enable512e=dict(type='bool', aliases=['512emulation']),
qos=dict(required=False, type='dict', default=None),
attributes=dict(required=False, type='dict', default=None),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
access=dict(required=False, type='str', default=None, choices=['readOnly', 'readWrite',
'locked', 'replicationTarget']),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['size', 'enable512e'])
],
supports_check_mode=True
)
param = self.module.params
# set up state variables
self.state = param['state']
self.name = param['name']
self.account_id = param['account_id']
self.enable512e = param['enable512e']
self.qos = param['qos']
self.attributes = param['attributes']
self.access = param['access']
self.size_unit = param['size_unit']
if param['size'] is not None:
self.size = param['size'] * self._size_unit_map[self.size_unit]
else:
self.size = None
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the ElementSW Python SDK")
else:
try:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
except solidfire.common.ApiServerError:
self.module.fail_json(msg="Unable to create the connection")
self.elementsw_helper = NaElementSWModule(self.sfe)
# add telemetry attributes
if self.attributes is not None:
self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_volume'))
else:
self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_volume')
def get_account_id(self):
"""
Return account id if found
"""
try:
# Update and return self.account_id
self.account_id = self.elementsw_helper.account_exists(self.account_id)
return self.account_id
except Exception as err:
self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err))
def get_volume(self):
"""
Return volume details if found
"""
# Get volume details
volume_id = self.elementsw_helper.volume_exists(self.name, self.account_id)
if volume_id is not None:
# Return volume_details
volume_details = self.elementsw_helper.get_volume(volume_id)
if volume_details is not None:
return volume_details
return None
def create_volume(self):
"""
Create Volume
:return: True if created, False if fails
"""
try:
self.sfe.create_volume(name=self.name,
account_id=self.account_id,
total_size=self.size,
enable512e=self.enable512e,
qos=self.qos,
attributes=self.attributes)
except Exception as err:
self.module.fail_json(msg="Error provisioning volume %s of size %s" % (self.name, self.size),
exception=to_native(err))
def delete_volume(self, volume_id):
"""
Delete and purge the volume using volume id
:return: Success : True , Failed : False
"""
try:
self.sfe.delete_volume(volume_id=volume_id)
self.sfe.purge_deleted_volume(volume_id=volume_id)
# Delete method will delete and also purge the volume instead of moving the volume state to inactive.
except Exception as err:
# Throwing the exact error message instead of generic error message
self.module.fail_json(msg=err.message,
exception=to_native(err))
def update_volume(self, volume_id):
"""
Update the volume with the specified param
:return: Success : True, Failed : False
"""
try:
self.sfe.modify_volume(volume_id,
account_id=self.account_id,
access=self.access,
qos=self.qos,
total_size=self.size,
attributes=self.attributes)
except Exception as err:
# Throwing the exact error message instead of generic error message
self.module.fail_json(msg=err.message,
exception=to_native(err))
def apply(self):
# Perform pre-checks, call functions and exit
changed = False
volume_exists = False
update_volume = False
self.get_account_id()
volume_detail = self.get_volume()
if volume_detail:
volume_exists = True
volume_id = volume_detail.volume_id
if self.state == 'absent':
# Checking for state change(s) here, and applying it later in the code allows us to support
# check_mode
changed = True
elif self.state == 'present':
# Checking all the params for update operation
if volume_detail.access is not None and self.access is not None and volume_detail.access != self.access:
update_volume = True
changed = True
elif volume_detail.account_id is not None and self.account_id is not None \
and volume_detail.account_id != self.account_id:
update_volume = True
changed = True
elif volume_detail.qos is not None and self.qos is not None:
"""
Actual volume_detail.qos has ['burst_iops', 'burst_time', 'curve', 'max_iops', 'min_iops'] keys.
As only minOPS, maxOPS, burstOPS is important to consider, checking only these values.
"""
volume_qos = volume_detail.qos.__dict__
if volume_qos['min_iops'] != self.qos['minIOPS'] or volume_qos['max_iops'] != self.qos['maxIOPS'] \
or volume_qos['burst_iops'] != self.qos['burstIOPS']:
update_volume = True
changed = True
else:
# If check fails, do nothing
pass
if volume_detail.total_size is not None and volume_detail.total_size != self.size:
size_difference = abs(float(volume_detail.total_size - self.size))
# Change size only if difference is bigger than 0.001
if size_difference / self.size > 0.001:
update_volume = True
changed = True
else:
# If check fails, do nothing
pass
if volume_detail.attributes is not None and self.attributes is not None and \
volume_detail.attributes != self.attributes:
update_volume = True
changed = True
else:
if self.state == 'present':
changed = True
result_message = ""
if changed:
if self.module.check_mode:
result_message = "Check mode, skipping changes"
else:
if self.state == 'present':
if not volume_exists:
self.create_volume()
result_message = "Volume created"
elif update_volume:
self.update_volume(volume_id)
result_message = "Volume updated"
elif self.state == 'absent':
self.delete_volume(volume_id)
result_message = "Volume deleted"
self.module.exit_json(changed=changed, msg=result_message)
def main():
# Create object and call apply
na_elementsw_volume = ElementOSVolume()
na_elementsw_volume.apply()
if __name__ == '__main__':
main()
| gpl-3.0 | 7,505,285,887,136,647,000 | 33.085859 | 120 | 0.552897 | false |
x3ro/RIOT | tests/eepreg/tests/01-run.py | 30 | 1057 | #!/usr/bin/env python3
# Copyright (C) 2018 Acutam Automation, LLC
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
def testfunc(child):
child.expect_exact("EEPROM registry (eepreg) test routine")
child.expect_exact("Testing new registry creation: reset check [SUCCESS]")
child.expect_exact("Testing writing and reading entries: add write add read [SUCCESS]")
child.expect_exact("Testing detection of conflicting size: add [SUCCESS]")
child.expect_exact("Testing calculation of lengths: len len [SUCCESS]")
child.expect_exact("Testing of successful data move after rm: rm read data [SUCCESS]")
child.expect_exact("Testing of free space change after write: free add free [SUCCESS]")
child.expect_exact("Testing of iteration over registry: iter bar foo [SUCCESS]")
child.expect_exact("Tests complete!")
if __name__ == "__main__":
sys.exit(run(testfunc))
| lgpl-2.1 | -3,500,785,719,814,803,500 | 39.653846 | 91 | 0.730369 | false |
littlstar/chromium.src | third_party/closure_linter/closure_linter/common/filetestcase.py | 109 | 3893 | #!/usr/bin/env python
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test case that runs a checker on a file, matching errors against annotations.
Runs the given checker on the given file, accumulating all errors. The list
of errors is then matched against those annotated in the file. Based heavily
on devtools/javascript/gpylint/full_test.py.
"""
__author__ = ('[email protected] (Robert Walker)',
'[email protected] (Andy Perelson)')
import re
import unittest as googletest
from closure_linter.common import erroraccumulator
class AnnotatedFileTestCase(googletest.TestCase):
"""Test case to run a linter against a single file."""
# Matches an all caps letters + underscores error identifer
_MESSAGE = {'msg': '[A-Z][A-Z_]+'}
# Matches a //, followed by an optional line number with a +/-, followed by a
# list of message IDs. Used to extract expected messages from testdata files.
# TODO(robbyw): Generalize to use different commenting patterns.
_EXPECTED_RE = re.compile(r'\s*//\s*(?:(?P<line>[+-]?[0-9]+):)?'
r'\s*(?P<msgs>%(msg)s(?:,\s*%(msg)s)*)' % _MESSAGE)
def __init__(self, filename, lint_callable, converter):
"""Create a single file lint test case.
Args:
filename: Filename to test.
lint_callable: Callable that lints a file. This is usually runner.Run().
converter: Function taking an error string and returning an error code.
"""
googletest.TestCase.__init__(self, 'runTest')
self._filename = filename
self._messages = []
self._lint_callable = lint_callable
self._converter = converter
def shortDescription(self):
"""Provides a description for the test."""
return 'Run linter on %s' % self._filename
def runTest(self):
"""Runs the test."""
try:
filename = self._filename
stream = open(filename)
except IOError as ex:
raise IOError('Could not find testdata resource for %s: %s' %
(self._filename, ex))
expected = self._GetExpectedMessages(stream)
got = self._ProcessFileAndGetMessages(filename)
self.assertEqual(expected, got)
def _GetExpectedMessages(self, stream):
"""Parse a file and get a sorted list of expected messages."""
messages = []
for i, line in enumerate(stream):
match = self._EXPECTED_RE.search(line)
if match:
line = match.group('line')
msg_ids = match.group('msgs')
if line is None:
line = i + 1
elif line.startswith('+') or line.startswith('-'):
line = i + 1 + int(line)
else:
line = int(line)
for msg_id in msg_ids.split(','):
# Ignore a spurious message from the license preamble.
if msg_id != 'WITHOUT':
messages.append((line, self._converter(msg_id.strip())))
stream.seek(0)
messages.sort()
return messages
def _ProcessFileAndGetMessages(self, filename):
"""Trap gjslint's output parse it to get messages added."""
error_accumulator = erroraccumulator.ErrorAccumulator()
self._lint_callable(filename, error_accumulator)
errors = error_accumulator.GetErrors()
# Convert to expected tuple format.
error_msgs = [(error.token.line_number, error.code) for error in errors]
error_msgs.sort()
return error_msgs
| bsd-3-clause | 7,674,920,894,543,389,000 | 35.046296 | 80 | 0.666324 | false |
morphis/home-assistant | homeassistant/components/notify/free_mobile.py | 12 | 1750 | """
Support for thr Free Mobile SMS platform.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.free_mobile/
"""
import logging
import voluptuous as vol
from homeassistant.components.notify import (
PLATFORM_SCHEMA, BaseNotificationService)
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['freesms==0.1.1']
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_ACCESS_TOKEN): cv.string,
})
def get_service(hass, config, discovery_info=None):
"""Get the Free Mobile SMS notification service."""
return FreeSMSNotificationService(config[CONF_USERNAME],
config[CONF_ACCESS_TOKEN])
class FreeSMSNotificationService(BaseNotificationService):
"""Implement a notification service for the Free Mobile SMS service."""
def __init__(self, username, access_token):
"""Initialize the service."""
from freesms import FreeClient
self.free_client = FreeClient(username, access_token)
def send_message(self, message="", **kwargs):
"""Send a message to the Free Mobile user cell."""
resp = self.free_client.send_sms(message)
if resp.status_code == 400:
_LOGGER.error("At least one parameter is missing")
elif resp.status_code == 402:
_LOGGER.error("Too much SMS send in a few time")
elif resp.status_code == 403:
_LOGGER.error("Wrong Username/Password")
elif resp.status_code == 500:
_LOGGER.error("Server error, try later")
| apache-2.0 | 3,225,798,003,885,568,500 | 33.313725 | 75 | 0.682286 | false |
rwth-ti/gr-ofdm | python/ofdm/qa_channel_equalizer_mimo.py | 1 | 1301 | #!/usr/bin/env python
#
# Copyright 2014 Institute for Theoretical Information Technology,
# RWTH Aachen University
# www.ti.rwth-aachen.de
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import ofdm_swig as ofdm
class qa_channel_equalizer_mimo (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
self.tb.run ()
# check data
if __name__ == '__main__':
gr_unittest.run(qa_channel_equalizer_mimo, "qa_channel_equalizer_mimo.xml")
| gpl-3.0 | -1,449,024,540,686,177,000 | 30.731707 | 79 | 0.691776 | false |
sunu/jasper-client | client/modules/Notifications.py | 35 | 1743 | # -*- coding: utf-8-*-
import re
import facebook
WORDS = ["FACEBOOK", "NOTIFICATION"]
def handle(text, mic, profile):
"""
Responds to user-input, typically speech text, with a summary of
the user's Facebook notifications, including a count and details
related to each individual notification.
Arguments:
text -- user-input, typically transcribed speech
mic -- used to interact with the user (for both input and output)
profile -- contains information related to the user (e.g., phone
number)
"""
oauth_access_token = profile['keys']['FB_TOKEN']
graph = facebook.GraphAPI(oauth_access_token)
try:
results = graph.request("me/notifications")
except facebook.GraphAPIError:
mic.say("I have not been authorized to query your Facebook. If you " +
"would like to check your notifications in the future, " +
"please visit the Jasper dashboard.")
return
except:
mic.say(
"I apologize, there's a problem with that service at the moment.")
if not len(results['data']):
mic.say("You have no Facebook notifications. ")
return
updates = []
for notification in results['data']:
updates.append(notification['title'])
count = len(results['data'])
mic.say("You have " + str(count) +
" Facebook notifications. " + " ".join(updates) + ". ")
return
def isValid(text):
"""
Returns True if the input is related to Facebook notifications.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search(r'\bnotification|Facebook\b', text, re.IGNORECASE))
| mit | -6,853,585,447,248,681,000 | 29.051724 | 78 | 0.6179 | false |
Weihonghao/ECM | Vpy34/lib/python3.5/site-packages/theano/tensor/tests/test_opt_uncanonicalize.py | 1 | 6708 | from __future__ import absolute_import, print_function, division
import unittest
import numpy
import theano
from theano import function, config
from theano import scalar
from theano.gof import FunctionGraph
from theano.gof.opt import out2in
from theano.tensor.opt_uncanonicalize import (
local_alloc_dimshuffle,
local_reshape_dimshuffle,
local_dimshuffle_alloc,
local_dimshuffle_subtensor,
)
import theano.tensor as tensor
#from theano.tensor import matrix,max_and_argmax,MaaxAndArgmax,neg
from theano.tensor.elemwise import CAReduce, Elemwise, DimShuffle
from theano.tests import unittest_tools as utt
class T_max_and_argmax(unittest.TestCase):
def test_optimization(self):
# If we use only the max output, we should replace this op with
# a faster one.
mode = theano.compile.mode.get_default_mode().including(
'canonicalize', 'fast_run')
for axis in [0, 1, -1]:
data = numpy.asarray(numpy.random.rand(2, 3), dtype=config.floatX)
n = tensor.matrix()
f = function([n], tensor.max_and_argmax(n, axis)[0], mode=mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce)
f = function([n], tensor.max_and_argmax(n, axis), mode=mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, tensor.MaxAndArgmax)
class T_min_max(unittest.TestCase):
def setUp(self):
utt.seed_rng()
self.mode = theano.compile.mode.get_default_mode().including(
'canonicalize', 'fast_run')
def test_optimization_max(self):
data = numpy.asarray(numpy.random.rand(2, 3), dtype=config.floatX)
n = tensor.matrix()
for axis in [0, 1, -1]:
f = function([n], tensor.max(n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce)
f(data)
f = function([n], tensor.max(-n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, Elemwise)
assert isinstance(topo[0].op.scalar_op, scalar.Neg)
assert isinstance(topo[1].op, CAReduce)
f(data)
f = function([n], -tensor.max(n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, CAReduce)
assert isinstance(topo[1].op, Elemwise)
assert isinstance(topo[1].op.scalar_op, scalar.Neg)
f(data)
f = function([n], -tensor.max(-n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce) # min
f(data)
def test_optimization_min(self):
data = numpy.asarray(numpy.random.rand(2, 3), dtype=config.floatX)
n = tensor.matrix()
for axis in [0, 1, -1]:
f = function([n], tensor.min(n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce)
f(data)
# test variant with neg to make sure we optimize correctly
f = function([n], tensor.min(-n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, CAReduce) # max
assert isinstance(topo[1].op, Elemwise)
assert isinstance(topo[1].op.scalar_op, scalar.Neg)
f(data)
f = function([n], -tensor.min(n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 2
assert isinstance(topo[0].op, Elemwise)
assert isinstance(topo[0].op.scalar_op, scalar.Neg)
assert isinstance(topo[1].op, CAReduce) # max
f(data)
f = function([n], -tensor.min(-n, axis), mode=self.mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, CAReduce) # max
f(data)
def test_local_alloc_dimshuffle():
alloc_dimshuffle = out2in(local_alloc_dimshuffle)
x = tensor.vector('x')
m = tensor.iscalar('m')
y = x.dimshuffle('x', 0)
out = tensor.alloc(y, m, 1, x.shape[0])
g = FunctionGraph([x, m], [out])
alloc_dimshuffle(g)
topo = g.toposort()
assert any([not isinstance(x, DimShuffle) for x in topo])
def test_local_reshape_dimshuffle():
reshape_dimshuffle = out2in(local_reshape_dimshuffle)
x = tensor.matrix('x')
y = x.dimshuffle('x', 0, 'x', 1)
out = tensor.reshape(y, (1, x.shape[0] * x.shape[1], 1))
g = FunctionGraph([x], [out])
reshape_dimshuffle(g)
topo = g.toposort()
assert any([not isinstance(x, DimShuffle) for x in topo])
def test_local_dimshuffle_alloc():
reshape_dimshuffle = out2in(local_dimshuffle_alloc)
x = tensor.vector('x')
out = tensor.alloc(x, 3, 2).dimshuffle('x', 'x', 0, 1)
g = FunctionGraph([x], [out])
reshape_dimshuffle(g)
l = theano.gof.PerformLinker()
l.accept(g)
f = l.make_function()
assert f([3, 4]).ndim == 4
topo = g.toposort()
assert any([not isinstance(x, DimShuffle) for x in topo])
def test_local_dimshuffle_subtensor():
dimshuffle_subtensor = out2in(local_dimshuffle_subtensor)
x = tensor.dtensor4('x')
x = tensor.patternbroadcast(x, (False, True, False, False))
i = tensor.iscalar('i')
out = x[:, :, 10:30, ::i].dimshuffle(0, 2, 3)
g = FunctionGraph([x, i], [out])
dimshuffle_subtensor(g)
topo = g.toposort()
assert any([not isinstance(x, DimShuffle) for x in topo])
# Test dimshuffle remove dimensions the subtensor don't "see".
x = tensor.tensor(broadcastable=(False, True, False), dtype='float64')
out = x[i].dimshuffle(1)
g = FunctionGraph([x, i], [out])
dimshuffle_subtensor(g)
topo = g.toposort()
assert any([not isinstance(x, DimShuffle) for x in topo])
# Test dimshuffle remove dimensions the subtensor don't "see" but
# have in between dimensions.
x = tensor.tensor(broadcastable=(False, True, False, True),
dtype='float64')
out = x[i].dimshuffle(1)
f = theano.function([x, i], out)
topo = f.maker.fgraph.toposort()
assert any([not isinstance(x, DimShuffle) for x in topo])
assert f(numpy.random.rand(5, 1, 4, 1), 2).shape == (4,)
| agpl-3.0 | -3,565,223,495,913,316,000 | 31.095694 | 78 | 0.596154 | false |
Kast0rTr0y/ansible | lib/ansible/modules/messaging/rabbitmq_policy.py | 25 | 5071 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, John Dewey <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: rabbitmq_policy
short_description: Manage the state of policies in RabbitMQ.
description:
- Manage the state of a virtual host in RabbitMQ.
version_added: "1.5"
author: "John Dewey (@retr0h)"
options:
name:
description:
- The name of the policy to manage.
required: true
default: null
vhost:
description:
- The name of the vhost to apply to.
required: false
default: /
apply_to:
description:
- What the policy applies to. Requires RabbitMQ 3.2.0 or later.
required: false
default: all
choices: [all, exchanges, queues]
version_added: "2.1"
pattern:
description:
- A regex of queues to apply the policy to.
required: true
default: null
tags:
description:
- A dict or string describing the policy.
required: true
default: null
priority:
description:
- The priority of the policy.
required: false
default: 0
node:
description:
- Erlang node name of the rabbit we wish to configure.
required: false
default: rabbit
state:
description:
- The state of the policy.
default: present
choices: [present, absent]
'''
EXAMPLES = '''
- name: ensure the default vhost contains the HA policy via a dict
rabbitmq_policy:
name: HA
pattern: .*
args:
tags:
ha-mode: all
- name: ensure the default vhost contains the HA policy
rabbitmq_policy:
name: HA
pattern: .*
tags:
ha-mode: all
'''
class RabbitMqPolicy(object):
def __init__(self, module, name):
self._module = module
self._name = name
self._vhost = module.params['vhost']
self._pattern = module.params['pattern']
self._apply_to = module.params['apply_to']
self._tags = module.params['tags']
self._priority = module.params['priority']
self._node = module.params['node']
self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True)
def _exec(self, args, run_in_check_mode=False):
if not self._module.check_mode or (self._module.check_mode and run_in_check_mode):
cmd = [self._rabbitmqctl, '-q', '-n', self._node]
args.insert(1, '-p')
args.insert(2, self._vhost)
rc, out, err = self._module.run_command(cmd + args, check_rc=True)
return out.splitlines()
return list()
def list(self):
policies = self._exec(['list_policies'], True)
for policy in policies:
policy_name = policy.split('\t')[1]
if policy_name == self._name:
return True
return False
def set(self):
import json
args = ['set_policy']
args.append(self._name)
args.append(self._pattern)
args.append(json.dumps(self._tags))
args.append('--priority')
args.append(self._priority)
if (self._apply_to != 'all'):
args.append('--apply-to')
args.append(self._apply_to)
return self._exec(args)
def clear(self):
return self._exec(['clear_policy', self._name])
def main():
arg_spec = dict(
name=dict(required=True),
vhost=dict(default='/'),
pattern=dict(required=True),
apply_to=dict(default='all', choices=['all', 'exchanges', 'queues']),
tags=dict(type='dict', required=True),
priority=dict(default='0'),
node=dict(default='rabbit'),
state=dict(default='present', choices=['present', 'absent']),
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
name = module.params['name']
state = module.params['state']
rabbitmq_policy = RabbitMqPolicy(module, name)
changed = False
if rabbitmq_policy.list():
if state == 'absent':
rabbitmq_policy.clear()
changed = True
else:
changed = False
elif state == 'present':
rabbitmq_policy.set()
changed = True
module.exit_json(changed=changed, name=name, state=state)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | -2,703,297,284,644,614,000 | 27.172222 | 90 | 0.610333 | false |
greedymouse/openwrt | tools/b43-tools/files/b43-fwsquash.py | 494 | 4767 | #!/usr/bin/env python
#
# b43 firmware file squasher
# Removes unnecessary firmware files
#
# Copyright (c) 2009 Michael Buesch <[email protected]>
#
# Licensed under the GNU/GPL version 2 or (at your option) any later version.
#
import sys
import os
def usage():
print("Usage: %s PHYTYPES COREREVS /path/to/extracted/firmware" % sys.argv[0])
print("")
print("PHYTYPES is a comma separated list of:")
print("A => A-PHY")
print("AG => Dual A-PHY G-PHY")
print("G => G-PHY")
print("LP => LP-PHY")
print("N => N-PHY")
print("HT => HT-PHY")
print("LCN => LCN-PHY")
print("LCN40 => LCN40-PHY")
print("AC => AC-PHY")
print("")
print("COREREVS is a comma separated list of core revision numbers.")
if len(sys.argv) != 4:
usage()
sys.exit(1)
phytypes = sys.argv[1]
corerevs = sys.argv[2]
fwpath = sys.argv[3]
phytypes = phytypes.split(',')
try:
corerevs = map(lambda r: int(r), corerevs.split(','))
except ValueError:
print("ERROR: \"%s\" is not a valid COREREVS string\n" % corerevs)
usage()
sys.exit(1)
fwfiles = os.listdir(fwpath)
fwfiles = filter(lambda str: str.endswith(".fw"), fwfiles)
if not fwfiles:
print("ERROR: No firmware files found in %s" % fwpath)
sys.exit(1)
required_fwfiles = []
def revs_match(revs_a, revs_b):
for rev in revs_a:
if rev in revs_b:
return True
return False
def phytypes_match(types_a, types_b):
for type in types_a:
type = type.strip().upper()
if type in types_b:
return True
return False
revmapping = {
"ucode2.fw" : ( (2,3,), ("G",), ),
"ucode4.fw" : ( (4,), ("G",), ),
"ucode5.fw" : ( (5,6,7,8,9,10,), ("G","A","AG",), ),
"ucode11.fw" : ( (11,12,), ("N",), ),
"ucode13.fw" : ( (13,), ("LP","G",), ),
"ucode14.fw" : ( (14,), ("LP",), ),
"ucode15.fw" : ( (15,), ("LP",), ),
"ucode16_mimo.fw" : ( (16,17,18,19,23,), ("N",), ),
# "ucode16_lp.fw" : ( (16,17,18,19,), ("LP",), ),
"ucode24_lcn.fw" : ( (24,), ("LCN",), ),
"ucode25_mimo.fw" : ( (25,28,), ("N",), ),
"ucode25_lcn.fw" : ( (25,28,), ("LCN",), ),
"ucode26_mimo.fw" : ( (26,), ("HT",), ),
"ucode29_mimo.fw" : ( (29,), ("HT",), ),
"ucode30_mimo.fw" : ( (30,), ("N",), ),
"ucode33_lcn40.fw" : ( (33,), ("LCN40",), ),
"ucode40.fw" : ( (40,), ("AC",), ),
"ucode42.fw" : ( (42,), ("AC",), ),
"pcm4.fw" : ( (1,2,3,4,), ("G",), ),
"pcm5.fw" : ( (5,6,7,8,9,10,), ("G","A","AG",), ),
}
initvalmapping = {
"a0g1initvals5.fw" : ( (5,6,7,8,9,10,), ("AG",), ),
"a0g0initvals5.fw" : ( (5,6,7,8,9,10,), ("A", "AG",), ),
"b0g0initvals2.fw" : ( (2,4,), ("G",), ),
"b0g0initvals5.fw" : ( (5,6,7,8,9,10,), ("G",), ),
"b0g0initvals13.fw" : ( (13,), ("G",), ),
"n0initvals11.fw" : ( (11,12,), ("N",), ),
"n0initvals16.fw" : ( (16,17,18,23,), ("N",), ),
"n0initvals24.fw" : ( (24,), ("N",), ),
"n0initvals25.fw" : ( (25,28,), ("N",), ),
"n16initvals30.fw" : ( (30,), ("N",), ),
"lp0initvals13.fw" : ( (13,), ("LP",), ),
"lp0initvals14.fw" : ( (14,), ("LP",), ),
"lp0initvals15.fw" : ( (15,), ("LP",), ),
# "lp0initvals16.fw" : ( (16,17,18,), ("LP",), ),
"lcn0initvals24.fw" : ( (24,), ("LCN",), ),
"ht0initvals26.fw" : ( (26,), ("HT",), ),
"ht0initvals29.fw" : ( (29,), ("HT",), ),
"lcn400initvals33.fw" : ( (33,), ("LCN40",), ),
"ac0initvals40.fw" : ( (40,), ("AC",), ),
"ac1initvals42.fw" : ( (42,), ("AC",), ),
"a0g1bsinitvals5.fw" : ( (5,6,7,8,9,10,), ("AG",), ),
"a0g0bsinitvals5.fw" : ( (5,6,7,8,9,10,), ("A", "AG"), ),
"b0g0bsinitvals5.fw" : ( (5,6,7,8,9,10,), ("G",), ),
"n0bsinitvals11.fw" : ( (11,12,), ("N",), ),
"n0bsinitvals16.fw" : ( (16,17,18,23,), ("N",), ),
"n0bsinitvals24.fw" : ( (24,), ("N",), ),
"n0bsinitvals25.fw" : ( (25,28,), ("N",), ),
"n16bsinitvals30.fw" : ( (30,), ("N",), ),
"lp0bsinitvals13.fw" : ( (13,), ("LP",), ),
"lp0bsinitvals14.fw" : ( (14,), ("LP",), ),
"lp0bsinitvals15.fw" : ( (15,), ("LP",), ),
# "lp0bsinitvals16.fw" : ( (16,17,18,), ("LP",), ),
"lcn0bsinitvals24.fw" : ( (24,), ("LCN",), ),
"ht0bsinitvals26.fw" : ( (26,), ("HT",), ),
"ht0bsinitvals29.fw" : ( (29,), ("HT",), ),
"lcn400bsinitvals33.fw" : ( (33,), ("LCN40",), ),
"ac0bsinitvals40.fw" : ( (40,), ("AC",), ),
"ac1bsinitvals42.fw" : ( (42,), ("AC",), ),
}
for f in fwfiles:
if f in revmapping:
if revs_match(corerevs, revmapping[f][0]) and\
phytypes_match(phytypes, revmapping[f][1]):
required_fwfiles += [f]
continue
if f in initvalmapping:
if revs_match(corerevs, initvalmapping[f][0]) and\
phytypes_match(phytypes, initvalmapping[f][1]):
required_fwfiles += [f]
continue
print("WARNING: Firmware file %s not found in the mapping lists" % f)
for f in fwfiles:
if f not in required_fwfiles:
print("Deleting %s" % f)
os.unlink(fwpath + '/' + f)
| gpl-2.0 | 4,091,595,116,069,625,000 | 30.993289 | 79 | 0.520663 | false |
philotas/enigma2 | lib/python/Components/Converter/ChannelNumbers.py | 42 | 1835 | from Components.NimManager import nimmanager
class ChannelNumbers:
def __init__(self):
pass
def getChannelNumber(self, frequency, nim):
f = int(self.getMHz(frequency))
descr = self.getTunerDescription(nim)
if "Europe" in descr:
if "DVB-T" in descr:
if 174 < f < 230: # III
d = (f + 1) % 7
return str(int(f - 174)/7 + 5) + (d < 3 and "-" or d > 4 and "+" or "")
elif 470 <= f < 863: # IV,V
d = (f + 2) % 8
return str(int(f - 470) / 8 + 21) + (d < 3.5 and "-" or d > 4.5 and "+" or "")
elif "Australia" in descr:
d = (f + 1) % 7
ds = (d < 3 and "-" or d > 4 and "+" or "")
if 174 < f < 202: # CH6-CH9
return str(int(f - 174)/7 + 6) + ds
elif 202 <= f < 209: # CH9A
return "9A" + ds
elif 209 <= f < 230: # CH10-CH12
return str(int(f - 209)/7 + 10) + ds
elif 526 < f < 820: # CH28-CH69
d = (f - 1) % 7
return str(int(f - 526)/7 + 28) + (d < 3 and "-" or d > 4 and "+" or "")
return ""
def getMHz(self, frequency):
if str(frequency).endswith('MHz'):
return frequency.split()[0]
return (frequency+50000)/100000/10.
def getTunerDescription(self, nim):
description = ""
try:
description = nimmanager.getTerrestrialDescription(nim)
except:
print "[ChannelNumber] nimmanager.getTerrestrialDescription(nim) failed, nim:", nim
return description
def supportedChannels(self, nim):
descr = self.getTunerDescription(nim)
if "Europe" in descr and "DVB-T" in descr:
return True
return False
def channel2frequency(self, channel, nim):
descr = self.getTunerDescription(nim)
if "Europe" in descr and "DVB-T" in descr:
if 5 <= channel <= 12:
return (177500 + 7000*(channel- 5))*1000
elif 21 <= channel <= 69:
return (474000 + 8000*(channel-21))*1000
return 474000000
channelnumbers = ChannelNumbers()
| gpl-2.0 | 7,270,777,003,644,120,000 | 27.230769 | 86 | 0.600545 | false |
omniscale/gbi-server | app/gbi_server/views/context.py | 1 | 8042 | # This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from functools import wraps
from flask import Blueprint, request, Response, g, url_for, current_app
from flask.ext.babel import gettext as _
from sqlalchemy.sql.expression import desc
from geoalchemy2.functions import ST_AsGeoJSON, ST_Transform
from gbi_server.config import SystemConfig
from gbi_server.model import WMTS, WMS, WFS, User
from gbi_server.extensions import db
from gbi_server.lib.couchdb import CouchDBBox, init_user_boxes
context = Blueprint("context", __name__, template_folder="../templates")
def check_auth(username, password):
user = User.by_email(username)
if user and user.check_password(password) and user.active:
g.user = user
return True
else:
return False
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return Response("""
Could not verify your access level for that URL.
You have to login with proper credentials""", 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
return f(*args, **kwargs)
return decorated
@context.route('/context')
@requires_auth
def get_context_document():
init_user_boxes(g.user, current_app.config.get('COUCH_DB_URL'))
wmts_sources = db.session.query(WMTS, ST_AsGeoJSON(ST_Transform(WMTS.view_coverage, 3857))).order_by(desc(WMTS.is_background_layer)).all()
wms_sources = db.session.query(WMS, ST_AsGeoJSON(ST_Transform(WMS.view_coverage, 3857))).order_by(desc(WMS.is_background_layer)).all()
wfs_sources = db.session.query(WFS).all()
response = {
"version": "0.2",
"portal": {
"prefix": current_app.config['PORTAL_PREFIX'],
"title": current_app.config['PORTAL_TITLE'],
},
"wmts_sources": [],
"wms_sources": [],
"wfs_sources": [],
"couchdb_sources": [],
}
couchdb = CouchDBBox(current_app.config['COUCH_DB_URL'], '%s_%s' % (SystemConfig.AREA_BOX_NAME, g.user.id))
for source in wmts_sources:
wmts, view_coverage = source
geom = json.loads(view_coverage)
response['wmts_sources'].append({
"name": wmts.name,
"title": wmts.title,
"url": wmts.client_url(external=True),
"format": wmts.format,
"overlay": wmts.is_overlay,
"username": wmts.username,
"password": wmts.password,
"is_public": wmts.is_public,
"is_protected": wmts.is_protected,
"is_background_layer": wmts.is_background_layer,
"max_tiles": wmts.max_tiles,
"view_restriction": {
"zoom_level_start": wmts.view_level_start,
"zoom_level_end": wmts.view_level_end,
"geometry": geom
},
"download_restriction": {
"zoom_level_start": wmts.view_level_start,
"zoom_level_end": wmts.view_level_end,
}
})
for source in wms_sources:
wms, view_coverage = source
geom = json.loads(view_coverage)
response['wms_sources'].append({
"name": wms.name,
"title": wms.title,
"url": wms.url,
"layer": wms.layer,
"format": wms.format,
"overlay": wms.is_overlay,
"username": wms.username,
"password": wms.password,
"is_public": wms.is_public,
"is_protected": wms.is_protected,
"srs": wms.srs,
"wms_version": wms.version,
"view_restriction": {
"zoom_level_start": wms.view_level_start,
"zoom_level_end": wms.view_level_end,
"geometry": geom
},
"download_restriction": {
"zoom_level_start": wms.view_level_start,
"zoom_level_end": wms.view_level_end,
}
})
for wfs in wfs_sources:
response['wfs_sources'].append({
'id': wfs.id,
'name': wfs.name,
'layer': wfs.layer,
'host': wfs.host,
'url': wfs.url,
'srs': wfs.srs,
'geometry_field': wfs.geometry,
'feature_ns': wfs.ns_uri,
'typename': wfs.ns_prefix,
'search_property': wfs.search_property,
'username': wfs.username,
'password': wfs.password,
'is_protected': wfs.is_protected,
})
if current_app.config['FEATURE_AREA_BOXES']:
response['couchdb_sources'].append({
"name": _('area box'),
"url": current_app.config['COUCH_DB_URL'],
"dbname": '%s_%s' % (SystemConfig.AREA_BOX_NAME, g.user.id),
"username": 'user_%d' % g.user.id,
"password": g.user.authproxy_token,
"writable": True,
"dbname_user": SystemConfig.AREA_BOX_NAME_LOCAL,
})
if current_app.config['FEATURE_DOC_BOXES']:
if g.user.is_consultant:
response['couchdb_sources'].append({
"name": _('file box'),
"url": current_app.config['COUCH_DB_URL'],
"dbname": '%s_%s' % (SystemConfig.FILE_BOX_NAME, g.user.id),
"username": 'user_%d' % g.user.id,
"password": g.user.authproxy_token,
"writable": True,
"dbname_user": SystemConfig.FILE_BOX_NAME_LOCAL,
})
else:
response['couchdb_sources'].append({
"name": _('consultant box'),
"url": current_app.config['COUCH_DB_URL'],
"dbname": '%s_%s' % (SystemConfig.DOWNLOAD_BOX_NAME, g.user.id),
"username": 'user_%d' % g.user.id,
"password": g.user.authproxy_token,
"writable": False,
"dbname_user": SystemConfig.DOWNLOAD_BOX_NAME_LOCAL,
})
response['couchdb_sources'].append({
"name": _('uploadbox'),
"url": current_app.config['COUCH_DB_URL'],
"dbname": '%s_%s' % (SystemConfig.UPLOAD_BOX_NAME, g.user.id),
"username": 'user_%d' % g.user.id,
"password": g.user.authproxy_token,
"writable": True,
"dbname_user": SystemConfig.UPLOAD_BOX_NAME_LOCAL,
})
if current_app.config['PARCEL_SEARCH_DATABASE_URI']:
response['parcel_search_url'] = url_for('search.query', token=g.user.authproxy_token, _external=True)
response['logging'] = {
'url': url_for('logserv.log', user_token=g.user.authproxy_token, _external=True),
}
response['update_coverage'] = {
'url': url_for('authproxy.update_download_coverage',
user_token=g.user.authproxy_token,
_external=True
),
}
response['user'] = {
'email': g.user.email,
'type': g.user.type,
'type_name': g.user.type_name,
}
return json.dumps(response)
| apache-2.0 | 5,225,329,362,212,287,000 | 35.554545 | 142 | 0.564661 | false |
adrienpacifico/openfisca-france | setup.py | 1 | 1776 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
""" -- a versatile microsimulation free software"""
from setuptools import setup, find_packages
setup(
name = 'OpenFisca-France',
version = '0.5.4.dev0',
author = 'OpenFisca Team',
author_email = '[email protected]',
classifiers = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description = u'French tax and benefit system for OpenFisca',
keywords = 'benefit france microsimulation social tax',
license = 'http://www.fsf.org/licensing/licenses/agpl-3.0.html',
url = 'https://github.com/openfisca/openfisca-france',
data_files = [
('share/locale/fr/LC_MESSAGES', ['openfisca_france/i18n/fr/LC_MESSAGES/openfisca-france.mo']),
('share/openfisca/openfisca-france', ['CHANGELOG.md', 'LICENSE', 'README.md']),
],
extras_require = {
'inversion_revenus': [
'scipy >= 0.12',
],
'taxipp': [
'pandas >= 0.13',
],
'test': [
'nose',
],
},
include_package_data = True, # Will read MANIFEST.in
install_requires = [
'Babel >= 0.9.4',
'Biryani[datetimeconv] >= 0.10.4',
'numpy >= 1.6,< 1.10',
'OpenFisca-Core >= 0.5.0',
'PyYAML >= 3.10',
'requests >= 2.8',
],
message_extractors = {'openfisca_france': [
('**.py', 'python', None),
]},
packages = find_packages(exclude=['openfisca_france.tests*']),
test_suite = 'nose.collector',
)
| agpl-3.0 | -3,220,461,249,796,911,600 | 30.157895 | 102 | 0.552928 | false |
Azure/azure-sdk-for-python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2018_02_01/operations/_app_service_certificate_orders_operations.py | 1 | 76895 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AppServiceCertificateOrdersOperations(object):
"""AppServiceCertificateOrdersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2018_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AppServiceCertificateOrderCollection"]
"""List all certificate orders in a subscription.
List all certificate orders in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServiceCertificateOrderCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrderCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateOrderCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateOrderCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.CertificateRegistration/certificateOrders'} # type: ignore
def validate_purchase_information(
self,
app_service_certificate_order, # type: "_models.AppServiceCertificateOrder"
**kwargs # type: Any
):
# type: (...) -> None
"""Validate information for a certificate order.
Validate information for a certificate order.
:param app_service_certificate_order: Information for a certificate order.
:type app_service_certificate_order: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrder
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.validate_purchase_information.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(app_service_certificate_order, 'AppServiceCertificateOrder')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
validate_purchase_information.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.CertificateRegistration/validateCertificateRegistrationInformation'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AppServiceCertificateOrderCollection"]
"""Get certificate orders in a resource group.
Get certificate orders in a resource group.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServiceCertificateOrderCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrderCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateOrderCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateOrderCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders'} # type: ignore
def get(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.AppServiceCertificateOrder"
"""Get a certificate order.
Get a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order..
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateOrder, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrder
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateOrder"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
certificate_distinguished_name, # type: "_models.AppServiceCertificateOrder"
**kwargs # type: Any
):
# type: (...) -> "_models.AppServiceCertificateOrder"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateOrder"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(certificate_distinguished_name, 'AppServiceCertificateOrder')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
certificate_distinguished_name, # type: "_models.AppServiceCertificateOrder"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AppServiceCertificateOrder"]
"""Create or update a certificate purchase order.
Create or update a certificate purchase order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param certificate_distinguished_name: Distinguished name to use for the certificate order.
:type certificate_distinguished_name: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrder
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AppServiceCertificateOrder or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrder]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateOrder"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
certificate_distinguished_name=certificate_distinguished_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}'} # type: ignore
def delete(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete an existing certificate order.
Delete an existing certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}'} # type: ignore
def update(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
certificate_distinguished_name, # type: "_models.AppServiceCertificateOrderPatchResource"
**kwargs # type: Any
):
# type: (...) -> "_models.AppServiceCertificateOrder"
"""Create or update a certificate purchase order.
Create or update a certificate purchase order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param certificate_distinguished_name: Distinguished name to use for the certificate order.
:type certificate_distinguished_name: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrderPatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateOrder, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateOrder
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateOrder"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(certificate_distinguished_name, 'AppServiceCertificateOrderPatchResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateOrder', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}'} # type: ignore
def list_certificates(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AppServiceCertificateCollection"]
"""List all certificates associated with a certificate order.
List all certificates associated with a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServiceCertificateCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_certificates.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_certificates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates'} # type: ignore
def get_certificate(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.AppServiceCertificateResource"
"""Get the certificate associated with a certificate order.
Get the certificate associated with a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
# Construct URL
url = self.get_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}'} # type: ignore
def _create_or_update_certificate_initial(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
name, # type: str
key_vault_certificate, # type: "_models.AppServiceCertificateResource"
**kwargs # type: Any
):
# type: (...) -> "_models.AppServiceCertificateResource"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_certificate_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(key_vault_certificate, 'AppServiceCertificateResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_certificate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}'} # type: ignore
def begin_create_or_update_certificate(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
name, # type: str
key_vault_certificate, # type: "_models.AppServiceCertificateResource"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AppServiceCertificateResource"]
"""Creates or updates a certificate and associates with key vault secret.
Creates or updates a certificate and associates with key vault secret.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:param key_vault_certificate: Key vault certificate resource Id.
:type key_vault_certificate: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AppServiceCertificateResource or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_certificate_initial(
resource_group_name=resource_group_name,
certificate_order_name=certificate_order_name,
name=name,
key_vault_certificate=key_vault_certificate,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}'} # type: ignore
def delete_certificate(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete the certificate associated with a certificate order.
Delete the certificate associated with a certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
# Construct URL
url = self.delete_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}'} # type: ignore
def update_certificate(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
name, # type: str
key_vault_certificate, # type: "_models.AppServiceCertificatePatchResource"
**kwargs # type: Any
):
# type: (...) -> "_models.AppServiceCertificateResource"
"""Creates or updates a certificate and associates with key vault secret.
Creates or updates a certificate and associates with key vault secret.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name: Name of the certificate.
:type name: str
:param key_vault_certificate: Key vault certificate resource Id.
:type key_vault_certificate: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificatePatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServiceCertificateResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2018_02_01.models.AppServiceCertificateResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServiceCertificateResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(key_vault_certificate, 'AppServiceCertificatePatchResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AppServiceCertificateResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/certificates/{name}'} # type: ignore
def reissue(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
reissue_certificate_order_request, # type: "_models.ReissueCertificateOrderRequest"
**kwargs # type: Any
):
# type: (...) -> None
"""Reissue an existing certificate order.
Reissue an existing certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param reissue_certificate_order_request: Parameters for the reissue.
:type reissue_certificate_order_request: ~azure.mgmt.web.v2018_02_01.models.ReissueCertificateOrderRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.reissue.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(reissue_certificate_order_request, 'ReissueCertificateOrderRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
reissue.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/reissue'} # type: ignore
def renew(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
renew_certificate_order_request, # type: "_models.RenewCertificateOrderRequest"
**kwargs # type: Any
):
# type: (...) -> None
"""Renew an existing certificate order.
Renew an existing certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param renew_certificate_order_request: Renew parameters.
:type renew_certificate_order_request: ~azure.mgmt.web.v2018_02_01.models.RenewCertificateOrderRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.renew.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(renew_certificate_order_request, 'RenewCertificateOrderRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
renew.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/renew'} # type: ignore
def resend_email(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Resend certificate email.
Resend certificate email.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
# Construct URL
url = self.resend_email.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
resend_email.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/resendEmail'} # type: ignore
def resend_request_emails(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
name_identifier, # type: "_models.NameIdentifier"
**kwargs # type: Any
):
# type: (...) -> None
"""Verify domain ownership for this certificate order.
Verify domain ownership for this certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param name_identifier: Email address.
:type name_identifier: ~azure.mgmt.web.v2018_02_01.models.NameIdentifier
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.resend_request_emails.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(name_identifier, 'NameIdentifier')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
resend_request_emails.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/resendRequestEmails'} # type: ignore
def retrieve_site_seal(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
site_seal_request, # type: "_models.SiteSealRequest"
**kwargs # type: Any
):
# type: (...) -> "_models.SiteSeal"
"""Verify domain ownership for this certificate order.
Verify domain ownership for this certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:param site_seal_request: Site seal request.
:type site_seal_request: ~azure.mgmt.web.v2018_02_01.models.SiteSealRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SiteSeal, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2018_02_01.models.SiteSeal
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SiteSeal"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.retrieve_site_seal.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(site_seal_request, 'SiteSealRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SiteSeal', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_site_seal.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/retrieveSiteSeal'} # type: ignore
def verify_domain_ownership(
self,
resource_group_name, # type: str
certificate_order_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Verify domain ownership for this certificate order.
Verify domain ownership for this certificate order.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param certificate_order_name: Name of the certificate order.
:type certificate_order_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
# Construct URL
url = self.verify_domain_ownership.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'certificateOrderName': self._serialize.url("certificate_order_name", certificate_order_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
verify_domain_ownership.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{certificateOrderName}/verifyDomainOwnership'} # type: ignore
def retrieve_certificate_actions(
self,
resource_group_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> List["_models.CertificateOrderAction"]
"""Retrieve the list of certificate actions.
Retrieve the list of certificate actions.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the certificate order.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of CertificateOrderAction, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2018_02_01.models.CertificateOrderAction]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.CertificateOrderAction"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
# Construct URL
url = self.retrieve_certificate_actions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[CertificateOrderAction]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_certificate_actions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{name}/retrieveCertificateActions'} # type: ignore
def retrieve_certificate_email_history(
self,
resource_group_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> List["_models.CertificateEmail"]
"""Retrieve email history.
Retrieve email history.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the certificate order.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of CertificateEmail, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2018_02_01.models.CertificateEmail]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.CertificateEmail"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-02-01"
accept = "application/json"
# Construct URL
url = self.retrieve_certificate_email_history.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[CertificateEmail]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
retrieve_certificate_email_history.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CertificateRegistration/certificateOrders/{name}/retrieveEmailHistory'} # type: ignore
| mit | 3,652,307,642,086,196,700 | 50.503684 | 251 | 0.651395 | false |
laurentb/weboob | modules/dlfp/pages/board.py | 2 | 2106 | # -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
import re
from logging import warning
from weboob.browser.pages import HTMLPage, LoggedPage
class Message(object):
TIMESTAMP_REGEXP = re.compile(r'(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})')
def __init__(self, id, timestamp, login, message, is_me):
self.id = id
self.timestamp = timestamp
self.login = login
self.message = message
self.is_me = is_me
self.norloge = timestamp
m = self.TIMESTAMP_REGEXP.match(timestamp)
if m:
self.norloge = '%02d:%02d:%02d' % (int(m.group(4)),
int(m.group(5)),
int(m.group(6)))
else:
warning('Unable to parse timestamp "%s"' % timestamp)
class BoardIndexPage(LoggedPage, HTMLPage):
def get_messages(self, last=None):
msgs = []
for post in self.doc.xpath('//post'):
m = Message(int(post.attrib['id']),
post.attrib['time'],
post.find('login').text,
post.find('message').text,
post.find('login').text.lower() == self.browser.username.lower())
if last is not None and last == m.id:
break
msgs.append(m)
return msgs
| lgpl-3.0 | -499,012,757,471,253,700 | 35.310345 | 89 | 0.591168 | false |
bala4901/odoo | addons/l10n_ch/__openerp__.py | 160 | 2936 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# Translation contributors: brain-tec AG, Agile Business Group
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - Accounting',
'description': """
Swiss localization
==================
**Multilang swiss STERCHI account chart and taxes**
**Author:** Camptocamp SA
**Financial contributors:** Prisme Solutions Informatique SA, Quod SA
**Translation contributors:** brain-tec AG, Agile Business Group
**This release will introduce major changes to l10n_ch.**
Due to important refactoring needs and the Switzerland adoption of new international payment standard during 2013-2014. We have reorganised the swiss localization addons this way:
- **l10n_ch**: Multilang swiss STERCHI account chart and taxes (official addon)
- **l10n_ch_base_bank**: Technical module that introduces a new and simplified version of bank type management
- **l10n_ch_bank**: List of swiss banks
- **l10n_ch_zip**: List of swiss postal zip
- **l10n_ch_dta**: Support of dta payment protocol (will be deprecated end 2014)
- **l10n_ch_payment_slip**: Support of ESR/BVR payment slip report and reconciliation. Report refactored with easy element positioning.
- **l10n_ch_sepa**: Alpha implementation of PostFinance SEPA/PAIN support will be completed during 2013/2014
The modules will be soon available on OpenERP swiss localization on launchpad:
https://launchpad.net/openerp-swiss-localization
""",
'version': '7.0',
'author': 'Camptocamp',
'category': 'Localization/Account Charts',
'website': 'http://www.camptocamp.com',
'depends': ['account', 'l10n_multilang'],
'data': ['sterchi_chart/account.xml',
'sterchi_chart/vat2011.xml',
'sterchi_chart/fiscal_position.xml' ],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': ['images/config_chart_l10n_ch.jpeg','images/l10n_ch_chart.jpeg']
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,963,038,136,650,359,000 | 44.169231 | 180 | 0.677112 | false |
openstack/barbican | barbican/tests/queue/test_client.py | 1 | 2387 | # Copyright (c) 2013-2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from barbican import queue
from barbican.queue import client
from barbican.tests import utils
class WhenUsingAsyncTaskClient(utils.BaseTestCase):
"""Test using the asynchronous task client."""
def setUp(self):
super(WhenUsingAsyncTaskClient, self).setUp()
# Mock out the queue get_client() call:
self.mock_client = mock.MagicMock()
self.mock_client.cast.return_value = None
get_client_config = {
'return_value': self.mock_client
}
self.get_client_patcher = mock.patch(
'barbican.queue.get_client',
**get_client_config
)
self.get_client_patcher.start()
self.client = client.TaskClient()
def tearDown(self):
super(WhenUsingAsyncTaskClient, self).tearDown()
self.get_client_patcher.stop()
def test_should_process_type_order(self):
self.client.process_type_order(order_id=self.order_id,
project_id=self.external_project_id,
request_id=self.request_id)
self.mock_client.cast.assert_called_with(
{}, 'process_type_order', order_id=self.order_id,
project_id=self.external_project_id,
request_id=self.request_id)
class WhenCreatingDirectTaskClient(utils.BaseTestCase):
"""Test using the synchronous task client (i.e. standalone mode)."""
def setUp(self):
super(WhenCreatingDirectTaskClient, self).setUp()
queue.get_client = mock.MagicMock(return_value=None)
self.client = client.TaskClient()
def test_should_use_direct_task_client(self):
self.assertIsInstance(self.client._client,
client._DirectTaskInvokerClient)
| apache-2.0 | 1,069,328,405,754,065,500 | 34.102941 | 75 | 0.659405 | false |
switchboardOp/ansible | lib/ansible/modules/network/cloudengine/ce_vxlan_arp.py | 46 | 24139 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = """
---
module: ce_vxlan_arp
version_added: "2.4"
short_description: Manages ARP attributes of VXLAN on HUAWEI CloudEngine devices.
description:
- Manages ARP attributes of VXLAN on HUAWEI CloudEngine devices.
author: QijunPan (@CloudEngine-Ansible)
options:
evn_bgp:
description:
- Enables EVN BGP.
required: false
choices: ['enable', 'disable']
default: null
evn_source_ip:
description:
- Specifies the source address of an EVN BGP peer.
The value is in dotted decimal notation.
required: false
default: null
evn_peer_ip:
description:
- Specifies the IP address of an EVN BGP peer.
The value is in dotted decimal notation.
required: false
default: null
evn_server:
description:
- Configures the local device as the router reflector (RR) on the EVN network.
required: false
choices: ['enable', 'disable']
default: null
evn_reflect_client:
description:
- Configures the local device as the route reflector (RR) and its peer as the client.
required: false
choices: ['enable', 'disable']
default: null
vbdif_name:
description:
- Full name of VBDIF interface, i.e. Vbdif100.
required: false
default: null
arp_collect_host:
description:
- Enables EVN BGP or BGP EVPN to collect host information.
required: false
choices: ['enable', 'disable']
default: null
host_collect_protocol:
description:
- Enables EVN BGP or BGP EVPN to advertise host information.
required: false
choices: ['bgp','none']
default: null
bridge_domain_id:
description:
- Specifies a BD(bridge domain) ID.
The value is an integer ranging from 1 to 16777215.
required: false
default: null
arp_suppress:
description:
- Enables ARP broadcast suppression in a BD.
required: false
choices: ['enable', 'disable']
default: null
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present', 'absent']
"""
EXAMPLES = '''
- name: vxlan arp module test
hosts: ce128
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Configure EVN BGP on Layer 2 and Layer 3 VXLAN gateways to establish EVN BGP peer relationships.
ce_vxlan_arp:
evn_bgp: enable
evn_source_ip: 6.6.6.6
evn_peer_ip: 7.7.7.7
provider: "{{ cli }}"
- name: Configure a Layer 3 VXLAN gateway as a BGP RR.
ce_vxlan_arp:
evn_bgp: enable
evn_server: enable
provider: "{{ cli }}"
- name: Enable EVN BGP on a Layer 3 VXLAN gateway to collect host information.
ce_vxlan_arp:
vbdif_name: Vbdif100
arp_collect_host: enable
provider: "{{ cli }}"
- name: Enable Layer 2 and Layer 3 VXLAN gateways to use EVN BGP to advertise host information.
ce_vxlan_arp:
host_collect_protocol: bgp
provider: "{{ cli }}"
- name: Enable ARP broadcast suppression on a Layer 2 VXLAN gateway.
ce_vxlan_arp:
bridge_domain_id: 100
arp_suppress: enable
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"evn_bgp": "enable", "evn_source_ip": "6.6.6.6", "evn_peer_ip":"7.7.7.7", state: "present"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"evn_bgp": "disable", "evn_source_ip": null, "evn_peer_ip": []}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"evn_bgp": "enable", "evn_source_ip": "6.6.6.6", "evn_peer_ip": ["7.7.7.7"]}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["evn bgp",
"source-address 6.6.6.6",
"peer 7.7.7.7"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_config, load_config
from ansible.module_utils.ce import ce_argument_spec
def is_config_exist(cmp_cfg, test_cfg):
"""is configuration exist"""
if not cmp_cfg or not test_cfg:
return False
return bool(test_cfg in cmp_cfg)
def is_valid_v4addr(addr):
"""check is ipv4 addr is valid"""
if addr.count('.') == 3:
addr_list = addr.split('.')
if len(addr_list) != 4:
return False
for each_num in addr_list:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
return False
def get_evn_peers(config):
"""get evn peer ip list"""
get = re.findall(r"peer ([0-9]+.[0-9]+.[0-9]+.[0-9]+)", config)
if not get:
return None
else:
return list(set(get))
def get_evn_srouce(config):
"""get evn peer ip list"""
get = re.findall(
r"source-address ([0-9]+.[0-9]+.[0-9]+.[0-9]+)", config)
if not get:
return None
else:
return get[0]
def get_evn_reflect_client(config):
"""get evn reflect client list"""
get = re.findall(
r"peer ([0-9]+.[0-9]+.[0-9]+.[0-9]+)\s*reflect-client", config)
if not get:
return None
else:
return list(get)
class VxlanArp(object):
"""
Manages arp attributes of VXLAN.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# module input info
self.evn_bgp = self.module.params['evn_bgp']
self.evn_source_ip = self.module.params['evn_source_ip']
self.evn_peer_ip = self.module.params['evn_peer_ip']
self.evn_server = self.module.params['evn_server']
self.evn_reflect_client = self.module.params['evn_reflect_client']
self.vbdif_name = self.module.params['vbdif_name']
self.arp_collect_host = self.module.params['arp_collect_host']
self.host_collect_protocol = self.module.params[
'host_collect_protocol']
self.bridge_domain_id = self.module.params['bridge_domain_id']
self.arp_suppress = self.module.params['arp_suppress']
self.state = self.module.params['state']
# host info
self.host = self.module.params['host']
self.username = self.module.params['username']
self.port = self.module.params['port']
# state
self.config = "" # current config
self.changed = False
self.updates_cmd = list()
self.commands = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def init_module(self):
"""init module"""
required_together = [("vbdif_name", "arp_collect_host"), ("bridge_domain_id", "arp_suppress")]
self.module = AnsibleModule(argument_spec=self.spec,
required_together=required_together,
supports_check_mode=True)
def cli_load_config(self, commands):
"""load config by cli"""
if not self.module.check_mode:
load_config(self.module, commands)
def get_current_config(self):
"""get current configuration"""
flags = list()
exp = "| ignore-case section include evn bgp|host collect protocol bgp"
if self.vbdif_name:
exp += "|^interface %s$" % self.vbdif_name
if self.bridge_domain_id:
exp += "|^bridge-domain %s$" % self.bridge_domain_id
flags.append(exp)
config = get_config(self.module, flags)
return config
def cli_add_command(self, command, undo=False):
"""add command to self.update_cmd and self.commands"""
if undo and command.lower() not in ["quit", "return"]:
cmd = "undo " + command
else:
cmd = command
self.commands.append(cmd) # set to device
if command.lower() not in ["quit", "return"]:
self.updates_cmd.append(cmd) # show updates result
def config_bridge_domain(self):
"""manage bridge domain configuration"""
if not self.bridge_domain_id:
return
# bridge-domain bd-id
# [undo] arp broadcast-suppress enable
cmd = "bridge-domain %s" % self.bridge_domain_id
if not is_config_exist(self.config, cmd):
self.module.fail_json(msg="Error: Bridge domain %s is not exist." % self.bridge_domain_id)
cmd = "arp broadcast-suppress enable"
exist = is_config_exist(self.config, cmd)
if self.arp_suppress == "enable" and not exist:
self.cli_add_command("bridge-domain %s" % self.bridge_domain_id)
self.cli_add_command(cmd)
self.cli_add_command("quit")
elif self.arp_suppress == "disable" and exist:
self.cli_add_command("bridge-domain %s" % self.bridge_domain_id)
self.cli_add_command(cmd, undo=True)
self.cli_add_command("quit")
def config_evn_bgp(self):
"""enables EVN BGP and configure evn bgp command"""
evn_bgp_view = False
evn_bgp_enable = False
cmd = "evn bgp"
exist = is_config_exist(self.config, cmd)
if self.evn_bgp == "enable" or exist:
evn_bgp_enable = True
# [undo] evn bgp
if self.evn_bgp:
if self.evn_bgp == "enable" and not exist:
self.cli_add_command(cmd)
evn_bgp_view = True
elif self.evn_bgp == "disable" and exist:
self.cli_add_command(cmd, undo=True)
return
# [undo] source-address ip-address
if evn_bgp_enable and self.evn_source_ip:
cmd = "source-address %s" % self.evn_source_ip
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd)
elif self.state == "absent" and exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd, undo=True)
# [undo] peer ip-address
# [undo] peer ipv4-address reflect-client
if evn_bgp_enable and self.evn_peer_ip:
cmd = "peer %s" % self.evn_peer_ip
exist = is_config_exist(self.config, cmd)
if self.state == "present":
if not exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd)
if self.evn_reflect_client == "enable":
self.cli_add_command(
"peer %s reflect-client" % self.evn_peer_ip)
else:
if self.evn_reflect_client:
cmd = "peer %s reflect-client" % self.evn_peer_ip
exist = is_config_exist(self.config, cmd)
if self.evn_reflect_client == "enable" and not exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd)
elif self.evn_reflect_client == "disable" and exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd, undo=True)
else:
if exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd, undo=True)
# [undo] server enable
if evn_bgp_enable and self.evn_server:
cmd = "server enable"
exist = is_config_exist(self.config, cmd)
if self.evn_server == "enable" and not exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd)
elif self.evn_server == "disable" and exist:
if not evn_bgp_view:
self.cli_add_command("evn bgp")
evn_bgp_view = True
self.cli_add_command(cmd, undo=True)
if evn_bgp_view:
self.cli_add_command("quit")
def config_vbdif(self):
"""configure command at the VBDIF interface view"""
# interface vbdif bd-id
# [undo] arp collect host enable
cmd = "interface %s" % self.vbdif_name.lower().capitalize()
exist = is_config_exist(self.config, cmd)
if not exist:
self.module.fail_json(
msg="Error: Interface %s does not exist." % self.vbdif_name)
cmd = "arp collect host enable"
exist = is_config_exist(self.config, cmd)
if self.arp_collect_host == "enable" and not exist:
self.cli_add_command("interface %s" %
self.vbdif_name.lower().capitalize())
self.cli_add_command(cmd)
self.cli_add_command("quit")
elif self.arp_collect_host == "disable" and exist:
self.cli_add_command("interface %s" %
self.vbdif_name.lower().capitalize())
self.cli_add_command(cmd, undo=True)
self.cli_add_command("quit")
def config_host_collect_protocal(self):
"""Enable EVN BGP or BGP EVPN to advertise host information"""
# [undo] host collect protocol bgp
cmd = "host collect protocol bgp"
exist = is_config_exist(self.config, cmd)
if self.state == "present":
if self.host_collect_protocol == "bgp" and not exist:
self.cli_add_command(cmd)
elif self.host_collect_protocol == "none" and exist:
self.cli_add_command(cmd, undo=True)
else:
if self.host_collect_protocol == "bgp" and exist:
self.cli_add_command(cmd, undo=True)
def is_valid_vbdif(self, ifname):
"""check is interface vbdif is valid"""
if not ifname.upper().startswith('VBDIF'):
return False
bdid = self.vbdif_name.replace(" ", "").upper().replace("VBDIF", "")
if not bdid.isdigit():
return False
if int(bdid) < 1 or int(bdid) > 16777215:
return False
return True
def check_params(self):
"""Check all input params"""
# bridge domain id check
if self.bridge_domain_id:
if not self.bridge_domain_id.isdigit():
self.module.fail_json(
msg="Error: Bridge domain id is not digit.")
if int(self.bridge_domain_id) < 1 or int(self.bridge_domain_id) > 16777215:
self.module.fail_json(
msg="Error: Bridge domain id is not in the range from 1 to 16777215.")
# evn_source_ip check
if self.evn_source_ip:
if not is_valid_v4addr(self.evn_source_ip):
self.module.fail_json(msg="Error: evn_source_ip is invalid.")
# evn_peer_ip check
if self.evn_peer_ip:
if not is_valid_v4addr(self.evn_peer_ip):
self.module.fail_json(msg="Error: evn_peer_ip is invalid.")
# vbdif_name check
if self.vbdif_name:
self.vbdif_name = self.vbdif_name.replace(
" ", "").lower().capitalize()
if not self.is_valid_vbdif(self.vbdif_name):
self.module.fail_json(msg="Error: vbdif_name is invalid.")
# evn_reflect_client and evn_peer_ip must set at the same time
if self.evn_reflect_client and not self.evn_peer_ip:
self.module.fail_json(
msg="Error: evn_reflect_client and evn_peer_ip must set at the same time.")
# evn_server and evn_reflect_client can not set at the same time
if self.evn_server == "enable" and self.evn_reflect_client == "enable":
self.module.fail_json(
msg="Error: evn_server and evn_reflect_client can not set at the same time.")
def get_proposed(self):
"""get proposed info"""
if self.evn_bgp:
self.proposed["evn_bgp"] = self.evn_bgp
if self.evn_source_ip:
self.proposed["evn_source_ip"] = self.evn_source_ip
if self.evn_peer_ip:
self.proposed["evn_peer_ip"] = self.evn_peer_ip
if self.evn_server:
self.proposed["evn_server"] = self.evn_server
if self.evn_reflect_client:
self.proposed["evn_reflect_client"] = self.evn_reflect_client
if self.arp_collect_host:
self.proposed["arp_collect_host"] = self.arp_collect_host
if self.host_collect_protocol:
self.proposed["host_collect_protocol"] = self.host_collect_protocol
if self.arp_suppress:
self.proposed["arp_suppress"] = self.arp_suppress
if self.vbdif_name:
self.proposed["vbdif_name"] = self.evn_peer_ip
if self.bridge_domain_id:
self.proposed["bridge_domain_id"] = self.bridge_domain_id
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
evn_bgp_exist = is_config_exist(self.config, "evn bgp")
if evn_bgp_exist:
self.existing["evn_bgp"] = "enable"
else:
self.existing["evn_bgp"] = "disable"
if evn_bgp_exist:
if is_config_exist(self.config, "server enable"):
self.existing["evn_server"] = "enable"
else:
self.existing["evn_server"] = "disable"
self.existing["evn_source_ip"] = get_evn_srouce(self.config)
self.existing["evn_peer_ip"] = get_evn_peers(self.config)
self.existing["evn_reflect_client"] = get_evn_reflect_client(
self.config)
if is_config_exist(self.config, "arp collect host enable"):
self.existing["host_collect_protocol"] = "enable"
else:
self.existing["host_collect_protocol"] = "disable"
if is_config_exist(self.config, "host collect protocol bgp"):
self.existing["host_collect_protocol"] = "bgp"
else:
self.existing["host_collect_protocol"] = None
if is_config_exist(self.config, "arp broadcast-suppress enable"):
self.existing["arp_suppress"] = "enable"
else:
self.existing["arp_suppress"] = "disable"
def get_end_state(self):
"""get end state info"""
config = self.get_current_config()
evn_bgp_exist = is_config_exist(config, "evn bgp")
if evn_bgp_exist:
self.end_state["evn_bgp"] = "enable"
else:
self.end_state["evn_bgp"] = "disable"
if evn_bgp_exist:
if is_config_exist(config, "server enable"):
self.end_state["evn_server"] = "enable"
else:
self.end_state["evn_server"] = "disable"
self.end_state["evn_source_ip"] = get_evn_srouce(config)
self.end_state["evn_peer_ip"] = get_evn_peers(config)
self.end_state[
"evn_reflect_client"] = get_evn_reflect_client(config)
if is_config_exist(config, "arp collect host enable"):
self.end_state["host_collect_protocol"] = "enable"
else:
self.end_state["host_collect_protocol"] = "disable"
if is_config_exist(config, "host collect protocol bgp"):
self.end_state["host_collect_protocol"] = "bgp"
else:
self.end_state["host_collect_protocol"] = None
if is_config_exist(config, "arp broadcast-suppress enable"):
self.end_state["arp_suppress"] = "enable"
else:
self.end_state["arp_suppress"] = "disable"
def work(self):
"""worker"""
self.check_params()
self.config = self.get_current_config()
self.get_existing()
self.get_proposed()
# deal present or absent
if self.evn_bgp or self.evn_server or self.evn_peer_ip or self.evn_source_ip:
self.config_evn_bgp()
if self.vbdif_name and self.arp_collect_host:
self.config_vbdif()
if self.host_collect_protocol:
self.config_host_collect_protocal()
if self.bridge_domain_id and self.arp_suppress:
self.config_bridge_domain()
if self.commands:
self.cli_load_config(self.commands)
self.changed = True
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
evn_bgp=dict(required=False, type='str',
choices=['enable', 'disable']),
evn_source_ip=dict(required=False, type='str'),
evn_peer_ip=dict(required=False, type='str'),
evn_server=dict(required=False, type='str',
choices=['enable', 'disable']),
evn_reflect_client=dict(
required=False, type='str', choices=['enable', 'disable']),
vbdif_name=dict(required=False, type='str'),
arp_collect_host=dict(required=False, type='str',
choices=['enable', 'disable']),
host_collect_protocol=dict(
required=False, type='str', choices=['bgp', 'none']),
bridge_domain_id=dict(required=False, type='str'),
arp_suppress=dict(required=False, type='str',
choices=['enable', 'disable']),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = VxlanArp(argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 | -6,733,892,660,492,759,000 | 34.188047 | 106 | 0.563031 | false |
aptrishu/coala-bears | bears/general/LicenseCheckBear.py | 13 | 1675 | from coalib.bearlib.abstractions.Linter import linter
from dependency_management.requirements.DistributionRequirement import (
DistributionRequirement)
@linter(executable='licensecheck',
output_format='regex',
output_regex=r'.*: .*UNKNOWN$',
result_message='No license found.')
class LicenseCheckBear:
"""
Attempts to check the given file for a license, by searching the start
of the file for text belonging to various licenses.
For Ubuntu/Debian users, the ``licensecheck_lines`` option has to be used
in accordance with the ``licensecheck_tail`` option.
"""
LANGUAGES = {'All'}
REQUIREMENTS = {
DistributionRequirement(
apt_get='devscripts',
dnf='licensecheck',
portage=None,
zypper='devscripts',
),
}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'[email protected]'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'License'}
@staticmethod
def create_arguments(filename, file, config_file,
licensecheck_lines: int=60,
licensecheck_tail: int=5000):
"""
:param licensecheck_lines:
Specify how many lines of the file header should be parsed for
license information. Set to 0 to parse the whole file (and ignore
``licensecheck_tail``).
:param licensecheck_tail:
Specify how many bytes to parse at end of file. Set to 0 to disable
parsing from end of file.
"""
return ('--lines', str(licensecheck_lines), '--tail',
str(licensecheck_tail), filename)
| agpl-3.0 | -4,778,393,930,779,427,000 | 35.413043 | 79 | 0.617313 | false |
di0spyr0s/pants | tests/python/pants_test/tasks/test_jar_task.py | 6 | 11198 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
from collections import defaultdict
from contextlib import contextmanager
from textwrap import dedent
from six.moves import range
from twitter.common.collections import maybe_list
from pants.backend.jvm.targets.java_agent import JavaAgent
from pants.backend.jvm.targets.jvm_binary import JvmBinary
from pants.backend.jvm.tasks.jar_task import JarTask
from pants.base.build_file_aliases import BuildFileAliases
from pants.goal.products import MultipleRootedProducts
from pants.util.contextutil import open_zip, temporary_dir, temporary_file
from pants.util.dirutil import safe_mkdir, safe_mkdtemp, safe_rmtree
from pants_test.jvm.jar_task_test_base import JarTaskTestBase
class BaseJarTaskTest(JarTaskTestBase):
class TestJarTask(JarTask):
def execute(self):
pass
@classmethod
def task_type(cls):
return cls.TestJarTask
@property
def alias_groups(self):
return super(BaseJarTaskTest, self).alias_groups.merge(BuildFileAliases.create(
targets={
'java_agent': JavaAgent,
'jvm_binary': JvmBinary,
},
))
def setUp(self):
super(BaseJarTaskTest, self).setUp()
self.workdir = safe_mkdtemp()
self.jar_task = self.prepare_execute(self.context())
def tearDown(self):
super(BaseJarTaskTest, self).tearDown()
if self.workdir:
safe_rmtree(self.workdir)
@contextmanager
def jarfile(self):
with temporary_file() as fd:
fd.close()
yield fd.name
def prepare_jar_task(self, context):
return self.prepare_execute(context)
def assert_listing(self, jar, *expected_items):
self.assertEquals(set(['META-INF/', 'META-INF/MANIFEST.MF']) | set(expected_items),
set(jar.namelist()))
class JarTaskTest(BaseJarTaskTest):
MAX_SUBPROC_ARGS = 50
def setUp(self):
super(JarTaskTest, self).setUp()
self.set_options(max_subprocess_args=self.MAX_SUBPROC_ARGS)
self.jar_task = self.prepare_jar_task(self.context())
def test_update_write(self):
with temporary_dir() as chroot:
_path = os.path.join(chroot, 'a/b/c')
safe_mkdir(_path)
data_file = os.path.join(_path, 'd.txt')
with open(data_file, 'w') as fd:
fd.write('e')
with self.jarfile() as existing_jarfile:
with self.jar_task.open_jar(existing_jarfile) as jar:
jar.write(data_file, 'f/g/h')
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, 'f/', 'f/g/', 'f/g/h')
self.assertEquals('e', jar.read('f/g/h'))
def test_update_writestr(self):
def assert_writestr(path, contents, *entries):
with self.jarfile() as existing_jarfile:
with self.jar_task.open_jar(existing_jarfile) as jar:
jar.writestr(path, contents)
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, *entries)
self.assertEquals(contents, jar.read(path))
assert_writestr('a.txt', b'b', 'a.txt')
assert_writestr('a/b/c.txt', b'd', 'a/', 'a/b/', 'a/b/c.txt')
def test_overwrite_write(self):
with temporary_dir() as chroot:
_path = os.path.join(chroot, 'a/b/c')
safe_mkdir(_path)
data_file = os.path.join(_path, 'd.txt')
with open(data_file, 'w') as fd:
fd.write('e')
with self.jarfile() as existing_jarfile:
with self.jar_task.open_jar(existing_jarfile, overwrite=True) as jar:
jar.write(data_file, 'f/g/h')
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, 'f/', 'f/g/', 'f/g/h')
self.assertEquals('e', jar.read('f/g/h'))
def test_overwrite_writestr(self):
with self.jarfile() as existing_jarfile:
with self.jar_task.open_jar(existing_jarfile, overwrite=True) as jar:
jar.writestr('README', b'42')
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, 'README')
self.assertEquals('42', jar.read('README'))
def test_custom_manifest(self):
contents = b'Manifest-Version: 1.0\r\nCreated-By: test\r\n\r\n'
with self.jarfile() as existing_jarfile:
with self.jar_task.open_jar(existing_jarfile, overwrite=True) as jar:
jar.writestr('README', b'42')
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, 'README')
self.assertEquals('42', jar.read('README'))
self.assertNotEqual(contents, jar.read('META-INF/MANIFEST.MF'))
with self.jar_task.open_jar(existing_jarfile, overwrite=False) as jar:
jar.writestr('META-INF/MANIFEST.MF', contents)
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, 'README')
self.assertEquals('42', jar.read('README'))
self.assertEquals(contents, jar.read('META-INF/MANIFEST.MF'))
def test_classpath(self):
def manifest_content(classpath):
return (b'Manifest-Version: 1.0\r\n' +
b'Class-Path: {}\r\n' +
b'Created-By: org.pantsbuild.tools.jar.JarBuilder\r\n\r\n').format(
' '.join(maybe_list(classpath)))
def assert_classpath(classpath):
with self.jarfile() as existing_jarfile:
# Note for -classpath, there is no update, it's already overwriting.
# To verify this, first add a random classpath, and verify it's overwritten by
# the supplied classpath value.
with self.jar_task.open_jar(existing_jarfile) as jar:
jar.classpath('something_should_be_overwritten.jar')
with self.jar_task.open_jar(existing_jarfile) as jar:
jar.classpath(classpath)
with open_zip(existing_jarfile) as jar:
self.assertEqual(manifest_content(classpath), jar.read('META-INF/MANIFEST.MF'))
assert_classpath('a.jar')
assert_classpath(['a.jar', 'b.jar'])
def test_update_jars(self):
with self.jarfile() as main_jar:
with self.jarfile() as included_jar:
with self.jar_task.open_jar(main_jar) as jar:
jar.writestr('a/b', b'c')
with self.jar_task.open_jar(included_jar) as jar:
jar.writestr('e/f', b'g')
with self.jar_task.open_jar(main_jar) as jar:
jar.writejar(included_jar)
with open_zip(main_jar) as jar:
self.assert_listing(jar, 'a/', 'a/b', 'e/', 'e/f')
def test_overwrite_jars(self):
with self.jarfile() as main_jar:
with self.jarfile() as included_jar:
with self.jar_task.open_jar(main_jar) as jar:
jar.writestr('a/b', b'c')
with self.jar_task.open_jar(included_jar) as jar:
jar.writestr('e/f', b'g')
# Create lots of included jars (even though they're all the same)
# so the -jars argument to jar-tool will exceed max_args limit thus
# switch to @argfile calling style.
with self.jar_task.open_jar(main_jar, overwrite=True) as jar:
for i in range(self.MAX_SUBPROC_ARGS + 1):
jar.writejar(included_jar)
with open_zip(main_jar) as jar:
self.assert_listing(jar, 'e/', 'e/f')
class JarBuilderTest(BaseJarTaskTest):
def setUp(self):
super(JarBuilderTest, self).setUp()
self.set_options(max_subprocess_args=100)
def _add_to_classes_by_target(self, context, tgt, filename):
class_products = context.products.get_data('classes_by_target',
lambda: defaultdict(MultipleRootedProducts))
java_agent_products = MultipleRootedProducts()
java_agent_products.add_rel_paths(os.path.join(self.build_root,
os.path.dirname(filename)),
[os.path.basename(filename)])
class_products[tgt] = java_agent_products
def test_agent_manifest(self):
self.add_to_build_file('src/java/pants/agents', dedent("""
java_agent(
name='fake_agent',
premain='bob',
agent_class='fred',
can_redefine=True,
can_retransform=True,
can_set_native_method_prefix=True
)""").strip())
java_agent = self.target('src/java/pants/agents:fake_agent')
context = self.context(target_roots=[java_agent])
jar_task = self.prepare_jar_task(context)
classfile = '.pants.d/javac/classes/FakeAgent.class'
self.create_file(classfile, '0xCAFEBABE')
self._add_to_classes_by_target(context, java_agent, classfile)
context.products.safe_create_data('resources_by_target',
lambda: defaultdict(MultipleRootedProducts))
with self.jarfile() as existing_jarfile:
with jar_task.open_jar(existing_jarfile) as jar:
with jar_task.create_jar_builder(jar) as jar_builder:
jar_builder.add_target(java_agent)
with open_zip(existing_jarfile) as jar:
self.assert_listing(jar, 'FakeAgent.class')
self.assertEqual('0xCAFEBABE', jar.read('FakeAgent.class'))
manifest = jar.read('META-INF/MANIFEST.MF').strip()
all_entries = dict(tuple(re.split(r'\s*:\s*', line, 1)) for line in manifest.splitlines())
expected_entries = {
'Agent-Class': 'fred',
'Premain-Class': 'bob',
'Can-Redefine-Classes': 'true',
'Can-Retransform-Classes': 'true',
'Can-Set-Native-Method-Prefix': 'true',
}
self.assertEquals(set(expected_entries.items()),
set(expected_entries.items()).intersection(set(all_entries.items())))
def test_manifest_items(self):
self.add_to_build_file('src/java/hello', dedent("""
jvm_binary(
name='hello',
main='hello.Hello',
manifest_entries = {
'Foo': 'foo-value',
'Implementation-Version': '1.2.3',
},
)""").strip())
binary_target = self.target('src/java/hello:hello')
context = self.context(target_roots=[binary_target])
classfile = '.pants.d/javac/classes/hello/Hello.class'
self.create_file(classfile, '0xDEADBEEF')
self._add_to_classes_by_target(context, binary_target, classfile)
context.products.safe_create_data('resources_by_target',
lambda: defaultdict(MultipleRootedProducts))
jar_task = self.prepare_jar_task(context)
with self.jarfile() as existing_jarfile:
with jar_task.open_jar(existing_jarfile) as jar:
with jar_task.create_jar_builder(jar) as jar_builder:
jar_builder.add_target(binary_target)
with open_zip(existing_jarfile) as jar:
manifest = jar.read('META-INF/MANIFEST.MF').strip()
all_entries = dict(tuple(re.split(r'\s*:\s*', line, 1)) for line in manifest.splitlines())
expected_entries = {
'Foo': 'foo-value',
'Implementation-Version': '1.2.3',
}
self.assertEquals(set(expected_entries.items()),
set(expected_entries.items()).intersection(set(all_entries.items())))
| apache-2.0 | -3,857,164,723,873,663,500 | 35.714754 | 98 | 0.633238 | false |
Intel-tensorflow/tensorflow | tensorflow/python/debug/lib/debug_utils_test.py | 14 | 13535 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for TensorFlow Debugger (tfdbg) Utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.lib import debug_utils
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
# Import resource_variable_ops for the variables-to-tensor implicit conversion.
from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
@test_util.run_v1_only("Requires tf.Session")
class DebugUtilsTest(test_util.TensorFlowTestCase):
@classmethod
def setUpClass(cls):
cls._sess = session.Session()
with cls._sess:
cls._a_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]])
cls._b_init_val = np.array([[2.0], [-1.0]])
cls._c_val = np.array([[-4.0], [np.nan]])
cls._a_init = constant_op.constant(
cls._a_init_val, shape=[2, 2], name="a1_init")
cls._b_init = constant_op.constant(
cls._b_init_val, shape=[2, 1], name="b_init")
cls._a = variables.VariableV1(cls._a_init, name="a1")
cls._b = variables.VariableV1(cls._b_init, name="b")
cls._c = constant_op.constant(cls._c_val, shape=[2, 1], name="c")
# Matrix product of a and b.
cls._p = math_ops.matmul(cls._a, cls._b, name="p1")
# Sum of two vectors.
cls._s = math_ops.add(cls._p, cls._c, name="s")
cls._graph = cls._sess.graph
# These are all the expected nodes in the graph:
# - Two variables (a, b), each with four nodes (Variable, init, Assign,
# read).
# - One constant (c).
# - One add operation and one matmul operation.
# - One wildcard node name ("*") that covers nodes created internally
# by TensorFlow itself (e.g., Grappler).
cls._expected_num_nodes = 4 * 2 + 1 + 1 + 1 + 1
def setUp(self):
self._run_options = config_pb2.RunOptions()
def _verify_watches(self, watch_opts, expected_output_slot,
expected_debug_ops, expected_debug_urls):
"""Verify a list of debug tensor watches.
This requires all watches in the watch list have exactly the same
output_slot, debug_ops and debug_urls.
Args:
watch_opts: Repeated protobuf field of DebugTensorWatch.
expected_output_slot: Expected output slot index, as an integer.
expected_debug_ops: Expected debug ops, as a list of strings.
expected_debug_urls: Expected debug URLs, as a list of strings.
Returns:
List of node names from the list of debug tensor watches.
"""
node_names = []
for watch in watch_opts:
node_names.append(watch.node_name)
if watch.node_name == "*":
self.assertEqual(-1, watch.output_slot)
self.assertEqual(expected_debug_ops, watch.debug_ops)
self.assertEqual(expected_debug_urls, watch.debug_urls)
else:
self.assertEqual(expected_output_slot, watch.output_slot)
self.assertEqual(expected_debug_ops, watch.debug_ops)
self.assertEqual(expected_debug_urls, watch.debug_urls)
return node_names
def testAddDebugTensorWatches_defaultDebugOp(self):
debug_utils.add_debug_tensor_watch(
self._run_options, "foo/node_a", 1, debug_urls="file:///tmp/tfdbg_1")
debug_utils.add_debug_tensor_watch(
self._run_options, "foo/node_b", 0, debug_urls="file:///tmp/tfdbg_2")
debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts
self.assertEqual(2, len(debug_watch_opts))
watch_0 = debug_watch_opts[0]
watch_1 = debug_watch_opts[1]
self.assertEqual("foo/node_a", watch_0.node_name)
self.assertEqual(1, watch_0.output_slot)
self.assertEqual("foo/node_b", watch_1.node_name)
self.assertEqual(0, watch_1.output_slot)
# Verify default debug op name.
self.assertEqual(["DebugIdentity"], watch_0.debug_ops)
self.assertEqual(["DebugIdentity"], watch_1.debug_ops)
# Verify debug URLs.
self.assertEqual(["file:///tmp/tfdbg_1"], watch_0.debug_urls)
self.assertEqual(["file:///tmp/tfdbg_2"], watch_1.debug_urls)
def testAddDebugTensorWatches_explicitDebugOp(self):
debug_utils.add_debug_tensor_watch(
self._run_options,
"foo/node_a",
0,
debug_ops="DebugNanCount",
debug_urls="file:///tmp/tfdbg_1")
debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts
self.assertEqual(1, len(debug_watch_opts))
watch_0 = debug_watch_opts[0]
self.assertEqual("foo/node_a", watch_0.node_name)
self.assertEqual(0, watch_0.output_slot)
# Verify default debug op name.
self.assertEqual(["DebugNanCount"], watch_0.debug_ops)
# Verify debug URLs.
self.assertEqual(["file:///tmp/tfdbg_1"], watch_0.debug_urls)
def testAddDebugTensorWatches_multipleDebugOps(self):
debug_utils.add_debug_tensor_watch(
self._run_options,
"foo/node_a",
0,
debug_ops=["DebugNanCount", "DebugIdentity"],
debug_urls="file:///tmp/tfdbg_1")
debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts
self.assertEqual(1, len(debug_watch_opts))
watch_0 = debug_watch_opts[0]
self.assertEqual("foo/node_a", watch_0.node_name)
self.assertEqual(0, watch_0.output_slot)
# Verify default debug op name.
self.assertEqual(["DebugNanCount", "DebugIdentity"], watch_0.debug_ops)
# Verify debug URLs.
self.assertEqual(["file:///tmp/tfdbg_1"], watch_0.debug_urls)
def testAddDebugTensorWatches_multipleURLs(self):
debug_utils.add_debug_tensor_watch(
self._run_options,
"foo/node_a",
0,
debug_ops="DebugNanCount",
debug_urls=["file:///tmp/tfdbg_1", "file:///tmp/tfdbg_2"])
debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts
self.assertEqual(1, len(debug_watch_opts))
watch_0 = debug_watch_opts[0]
self.assertEqual("foo/node_a", watch_0.node_name)
self.assertEqual(0, watch_0.output_slot)
# Verify default debug op name.
self.assertEqual(["DebugNanCount"], watch_0.debug_ops)
# Verify debug URLs.
self.assertEqual(["file:///tmp/tfdbg_1", "file:///tmp/tfdbg_2"],
watch_0.debug_urls)
def testWatchGraph_allNodes(self):
debug_utils.watch_graph(
self._run_options,
self._graph,
debug_ops=["DebugIdentity", "DebugNanCount"],
debug_urls="file:///tmp/tfdbg_1")
debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts
self.assertEqual(self._expected_num_nodes, len(debug_watch_opts))
# Verify that each of the nodes in the graph with output tensors in the
# graph have debug tensor watch.
node_names = self._verify_watches(debug_watch_opts, 0,
["DebugIdentity", "DebugNanCount"],
["file:///tmp/tfdbg_1"])
# Verify the node names.
self.assertIn("a1_init", node_names)
self.assertIn("a1", node_names)
self.assertIn("a1/Assign", node_names)
self.assertIn("a1/read", node_names)
self.assertIn("b_init", node_names)
self.assertIn("b", node_names)
self.assertIn("b/Assign", node_names)
self.assertIn("b/read", node_names)
self.assertIn("c", node_names)
self.assertIn("p1", node_names)
self.assertIn("s", node_names)
# Assert that the wildcard node name has been created.
self.assertIn("*", node_names)
def testWatchGraph_nodeNameAllowlist(self):
debug_utils.watch_graph(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
node_name_regex_allowlist="(a1$|a1_init$|a1/.*|p1$)")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertEqual(
sorted(["a1_init", "a1", "a1/Assign", "a1/read", "p1"]),
sorted(node_names))
def testWatchGraph_opTypeAllowlist(self):
debug_utils.watch_graph(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
op_type_regex_allowlist="(Variable|MatMul)")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertEqual(sorted(["a1", "b", "p1"]), sorted(node_names))
def testWatchGraph_nodeNameAndOpTypeAllowlists(self):
debug_utils.watch_graph(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
node_name_regex_allowlist="([a-z]+1$)",
op_type_regex_allowlist="(MatMul)")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertEqual(["p1"], node_names)
def testWatchGraph_tensorDTypeAllowlist(self):
debug_utils.watch_graph(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
tensor_dtype_regex_allowlist=".*_ref")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertItemsEqual(["a1", "a1/Assign", "b", "b/Assign"], node_names)
def testWatchGraph_nodeNameAndTensorDTypeAllowlists(self):
debug_utils.watch_graph(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
node_name_regex_allowlist="^a.*",
tensor_dtype_regex_allowlist=".*_ref")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertItemsEqual(["a1", "a1/Assign"], node_names)
def testWatchGraph_nodeNameDenylist(self):
debug_utils.watch_graph_with_denylists(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
node_name_regex_denylist="(a1$|a1_init$|a1/.*|p1$)")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertEqual(
sorted(["b_init", "b", "b/Assign", "b/read", "c", "s"]),
sorted(node_names))
def testWatchGraph_opTypeDenylist(self):
debug_utils.watch_graph_with_denylists(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
op_type_regex_denylist="(Variable|Identity|Assign|Const)")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertEqual(sorted(["p1", "s"]), sorted(node_names))
def testWatchGraph_nodeNameAndOpTypeDenylists(self):
debug_utils.watch_graph_with_denylists(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
node_name_regex_denylist="p1$",
op_type_regex_denylist="(Variable|Identity|Assign|Const)")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertEqual(["s"], node_names)
def testWatchGraph_tensorDTypeDenylists(self):
debug_utils.watch_graph_with_denylists(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
tensor_dtype_regex_denylist=".*_ref")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertNotIn("a1", node_names)
self.assertNotIn("a1/Assign", node_names)
self.assertNotIn("b", node_names)
self.assertNotIn("b/Assign", node_names)
self.assertIn("s", node_names)
def testWatchGraph_nodeNameAndTensorDTypeDenylists(self):
debug_utils.watch_graph_with_denylists(
self._run_options,
self._graph,
debug_urls="file:///tmp/tfdbg_1",
node_name_regex_denylist="^s$",
tensor_dtype_regex_denylist=".*_ref")
node_names = self._verify_watches(
self._run_options.debug_options.debug_tensor_watch_opts, 0,
["DebugIdentity"], ["file:///tmp/tfdbg_1"])
self.assertNotIn("a1", node_names)
self.assertNotIn("a1/Assign", node_names)
self.assertNotIn("b", node_names)
self.assertNotIn("b/Assign", node_names)
self.assertNotIn("s", node_names)
if __name__ == "__main__":
googletest.main()
| apache-2.0 | 3,162,440,409,753,238,500 | 35.779891 | 88 | 0.644108 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.