repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
chrisfranzen/django
refs/heads/master
django/contrib/auth/models.py
3
from __future__ import unicode_literals import re import warnings from django.core.exceptions import ImproperlyConfigured from django.core.mail import send_mail from django.core import validators from django.db import models from django.db.models.manager import EmptyManager from django.utils.crypto import get_random_string from django.utils.http import urlquote from django.utils import six from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from django.contrib import auth # UNUSABLE_PASSWORD is still imported here for backwards compatibility from django.contrib.auth.hashers import ( check_password, make_password, is_password_usable, UNUSABLE_PASSWORD) from django.contrib.auth.signals import user_logged_in from django.contrib.contenttypes.models import ContentType from django.utils.encoding import python_2_unicode_compatible def update_last_login(sender, user, **kwargs): """ A signal receiver which updates the last_login date for the user logging in. """ user.last_login = timezone.now() user.save(update_fields=['last_login']) user_logged_in.connect(update_last_login) class SiteProfileNotAvailable(Exception): pass class PermissionManager(models.Manager): def get_by_natural_key(self, codename, app_label, model): return self.get( codename=codename, content_type=ContentType.objects.get_by_natural_key(app_label, model), ) @python_2_unicode_compatible class Permission(models.Model): """ The permissions system provides a way to assign permissions to specific users and groups of users. The permission system is used by the Django admin site, but may also be useful in your own code. The Django admin site uses permissions as follows: - The "add" permission limits the user's ability to view the "add" form and add an object. - The "change" permission limits a user's ability to view the change list, view the "change" form and change an object. - The "delete" permission limits the ability to delete an object. Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date." Three basic permissions -- add, change and delete -- are automatically created for each Django model. """ name = models.CharField(_('name'), max_length=50) content_type = models.ForeignKey(ContentType) codename = models.CharField(_('codename'), max_length=100) objects = PermissionManager() class Meta: verbose_name = _('permission') verbose_name_plural = _('permissions') unique_together = (('content_type', 'codename'),) ordering = ('content_type__app_label', 'content_type__model', 'codename') def __str__(self): return "%s | %s | %s" % ( six.text_type(self.content_type.app_label), six.text_type(self.content_type), six.text_type(self.name)) def natural_key(self): return (self.codename,) + self.content_type.natural_key() natural_key.dependencies = ['contenttypes.contenttype'] class GroupManager(models.Manager): """ The manager for the auth's Group model. """ def get_by_natural_key(self, name): return self.get(name=name) @python_2_unicode_compatible class Group(models.Model): """ Groups are a generic way of categorizing users to apply permissions, or some other label, to those users. A user can belong to any number of groups. A user in a group automatically has all the permissions granted to that group. For example, if the group Site editors has the permission can_edit_home_page, any user in that group will have that permission. Beyond permissions, groups are a convenient way to categorize users to apply some label, or extended functionality, to them. For example, you could create a group 'Special users', and you could write code that would do special things to those users -- such as giving them access to a members-only portion of your site, or sending them members-only email messages. """ name = models.CharField(_('name'), max_length=80, unique=True) permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True) objects = GroupManager() class Meta: verbose_name = _('group') verbose_name_plural = _('groups') def __str__(self): return self.name def natural_key(self): return (self.name,) class BaseUserManager(models.Manager): @classmethod def normalize_email(cls, email): """ Normalize the address by lowercasing the domain part of the email address. """ email = email or '' try: email_name, domain_part = email.strip().rsplit('@', 1) except ValueError: pass else: email = '@'.join([email_name, domain_part.lower()]) return email def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyz' 'ABCDEFGHJKLMNPQRSTUVWXYZ' '23456789'): """ Generates a random password with the given length and given allowed_chars. Note that the default value of allowed_chars does not have "I" or "O" or letters and digits that look similar -- just to avoid confusion. """ return get_random_string(length, allowed_chars) def get_by_natural_key(self, username): return self.get(**{self.model.USERNAME_FIELD: username}) class UserManager(BaseUserManager): def create_user(self, username, email=None, password=None, **extra_fields): """ Creates and saves a User with the given username, email and password. """ now = timezone.now() if not username: raise ValueError('The given username must be set') email = UserManager.normalize_email(email) user = self.model(username=username, email=email, is_staff=False, is_active=True, is_superuser=False, last_login=now, date_joined=now, **extra_fields) user.set_password(password) user.save(using=self._db) return user def create_superuser(self, username, email, password, **extra_fields): u = self.create_user(username, email, password, **extra_fields) u.is_staff = True u.is_active = True u.is_superuser = True u.save(using=self._db) return u # A few helper functions for common logic between User and AnonymousUser. def _user_get_all_permissions(user, obj): permissions = set() for backend in auth.get_backends(): if hasattr(backend, "get_all_permissions"): if obj is not None: permissions.update(backend.get_all_permissions(user, obj)) else: permissions.update(backend.get_all_permissions(user)) return permissions def _user_has_perm(user, perm, obj): for backend in auth.get_backends(): if hasattr(backend, "has_perm"): if obj is not None: if backend.has_perm(user, perm, obj): return True else: if backend.has_perm(user, perm): return True return False def _user_has_module_perms(user, app_label): for backend in auth.get_backends(): if hasattr(backend, "has_module_perms"): if backend.has_module_perms(user, app_label): return True return False @python_2_unicode_compatible class AbstractBaseUser(models.Model): password = models.CharField(_('password'), max_length=128) last_login = models.DateTimeField(_('last login'), default=timezone.now) is_active = True REQUIRED_FIELDS = [] class Meta: abstract = True def get_username(self): "Return the identifying username for this User" return getattr(self, self.USERNAME_FIELD) def __str__(self): return self.get_username() def natural_key(self): return (self.get_username(),) def is_anonymous(self): """ Always returns False. This is a way of comparing User objects to anonymous users. """ return False def is_authenticated(self): """ Always return True. This is a way to tell if the user has been authenticated in templates. """ return True def set_password(self, raw_password): self.password = make_password(raw_password) def check_password(self, raw_password): """ Returns a boolean of whether the raw_password was correct. Handles hashing formats behind the scenes. """ def setter(raw_password): self.set_password(raw_password) self.save(update_fields=["password"]) return check_password(raw_password, self.password, setter) def set_unusable_password(self): # Sets a value that will never be a valid hash self.password = make_password(None) def has_usable_password(self): return is_password_usable(self.password) def get_full_name(self): raise NotImplementedError() def get_short_name(self): raise NotImplementedError() class AbstractUser(AbstractBaseUser): """ An abstract base class implementing a fully featured User model with admin-compliant permissions. Username, password and email are required. Other fields are optional. """ username = models.CharField(_('username'), max_length=30, unique=True, help_text=_('Required. 30 characters or fewer. Letters, numbers and ' '@/./+/-/_ characters'), validators=[ validators.RegexValidator(re.compile('^[\w.@+-]+$'), _('Enter a valid username.'), 'invalid') ]) first_name = models.CharField(_('first name'), max_length=30, blank=True) last_name = models.CharField(_('last name'), max_length=30, blank=True) email = models.EmailField(_('email address'), blank=True) is_staff = models.BooleanField(_('staff status'), default=False, help_text=_('Designates whether the user can log into this admin ' 'site.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('Designates whether this user should be treated as ' 'active. Unselect this instead of deleting accounts.')) is_superuser = models.BooleanField(_('superuser status'), default=False, help_text=_('Designates that this user has all permissions without ' 'explicitly assigning them.')) date_joined = models.DateTimeField(_('date joined'), default=timezone.now) groups = models.ManyToManyField(Group, verbose_name=_('groups'), blank=True, help_text=_('The groups this user belongs to. A user will ' 'get all permissions granted to each of ' 'his/her group.')) user_permissions = models.ManyToManyField(Permission, verbose_name=_('user permissions'), blank=True, help_text='Specific permissions for this user.') objects = UserManager() USERNAME_FIELD = 'username' REQUIRED_FIELDS = ['email'] class Meta: verbose_name = _('user') verbose_name_plural = _('users') abstract = True def get_absolute_url(self): return "/users/%s/" % urlquote(self.username) def get_full_name(self): """ Returns the first_name plus the last_name, with a space in between. """ full_name = '%s %s' % (self.first_name, self.last_name) return full_name.strip() def get_short_name(self): "Returns the short name for the user." return self.first_name def get_group_permissions(self, obj=None): """ Returns a list of permission strings that this user has through his/her groups. This method queries all available auth backends. If an object is passed in, only permissions matching this object are returned. """ permissions = set() for backend in auth.get_backends(): if hasattr(backend, "get_group_permissions"): if obj is not None: permissions.update(backend.get_group_permissions(self, obj)) else: permissions.update(backend.get_group_permissions(self)) return permissions def get_all_permissions(self, obj=None): return _user_get_all_permissions(self, obj) def has_perm(self, perm, obj=None): """ Returns True if the user has the specified permission. This method queries all available auth backends, but returns immediately if any backend returns True. Thus, a user who has permission from a single auth backend is assumed to have permission in general. If an object is provided, permissions for this specific object are checked. """ # Active superusers have all permissions. if self.is_active and self.is_superuser: return True # Otherwise we need to check the backends. return _user_has_perm(self, perm, obj) def has_perms(self, perm_list, obj=None): """ Returns True if the user has each of the specified permissions. If object is passed, it checks if the user has all required perms for this object. """ for perm in perm_list: if not self.has_perm(perm, obj): return False return True def has_module_perms(self, app_label): """ Returns True if the user has any permissions in the given app label. Uses pretty much the same logic as has_perm, above. """ # Active superusers have all permissions. if self.is_active and self.is_superuser: return True return _user_has_module_perms(self, app_label) def email_user(self, subject, message, from_email=None): """ Sends an email to this User. """ send_mail(subject, message, from_email, [self.email]) def get_profile(self): """ Returns site-specific profile for this user. Raises SiteProfileNotAvailable if this site does not allow profiles. """ warnings.warn("The use of AUTH_PROFILE_MODULE to define user profiles has been deprecated.", PendingDeprecationWarning) if not hasattr(self, '_profile_cache'): from django.conf import settings if not getattr(settings, 'AUTH_PROFILE_MODULE', False): raise SiteProfileNotAvailable( 'You need to set AUTH_PROFILE_MODULE in your project ' 'settings') try: app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') except ValueError: raise SiteProfileNotAvailable( 'app_label and model_name should be separated by a dot in ' 'the AUTH_PROFILE_MODULE setting') try: model = models.get_model(app_label, model_name) if model is None: raise SiteProfileNotAvailable( 'Unable to load the profile model, check ' 'AUTH_PROFILE_MODULE in your project settings') self._profile_cache = model._default_manager.using( self._state.db).get(user__id__exact=self.id) self._profile_cache.user = self except (ImportError, ImproperlyConfigured): raise SiteProfileNotAvailable return self._profile_cache class User(AbstractUser): """ Users within the Django authentication system are represented by this model. Username, password and email are required. Other fields are optional. """ class Meta: swappable = 'AUTH_USER_MODEL' @python_2_unicode_compatible class AnonymousUser(object): id = None pk = None username = '' is_staff = False is_active = False is_superuser = False _groups = EmptyManager() _user_permissions = EmptyManager() def __init__(self): pass def __str__(self): return 'AnonymousUser' def __eq__(self, other): return isinstance(other, self.__class__) def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return 1 # instances always return the same hash value def save(self): raise NotImplementedError def delete(self): raise NotImplementedError def set_password(self, raw_password): raise NotImplementedError def check_password(self, raw_password): raise NotImplementedError def _get_groups(self): return self._groups groups = property(_get_groups) def _get_user_permissions(self): return self._user_permissions user_permissions = property(_get_user_permissions) def get_group_permissions(self, obj=None): return set() def get_all_permissions(self, obj=None): return _user_get_all_permissions(self, obj=obj) def has_perm(self, perm, obj=None): return _user_has_perm(self, perm, obj=obj) def has_perms(self, perm_list, obj=None): for perm in perm_list: if not self.has_perm(perm, obj): return False return True def has_module_perms(self, module): return _user_has_module_perms(self, module) def is_anonymous(self): return True def is_authenticated(self): return False
vaygr/ansible
refs/heads/devel
lib/ansible/modules/network/bigswitch/bigmon_policy.py
27
#!/usr/bin/python # -*- coding: utf-8 -*- # Ansible module to manage Big Monitoring Fabric service chains # (c) 2016, Ted Elhourani <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: bigmon_policy author: "Ted (@tedelhourani)" short_description: Create and remove a bigmon out-of-band policy. description: - Create and remove a bigmon out-of-band policy. version_added: "2.3" options: name: description: - The name of the policy. required: true policy_description: description: - Description of policy. action: description: - Forward matching packets to delivery interfaces, Drop is for measure rate of matching packets, but do not forward to delivery interfaces, capture packets and write to a PCAP file, or enable NetFlow generation. default: forward choices: ['forward', 'drop', 'flow-gen'] priority: description: - A priority associated with this policy. The higher priority policy takes precedence over a lower priority. default: 100 duration: description: - Run policy for duration duration or until delivery_packet_count packets are delivered, whichever comes first. default: 0 start_time: description: - Date the policy becomes active default: ansible_date_time.iso8601 delivery_packet_count: description: - Run policy until delivery_packet_count packets are delivered. default: 0 state: description: - Whether the policy should be present or absent. default: present choices: ['present', 'absent'] controller: description: - The controller address. required: true validate_certs: description: - If C(false), SSL certificates will not be validated. This should only be used on personally controlled devices using self-signed certificates. required: false default: true choices: [true, false] access_token: description: - Bigmon access token. If this isn't set, the environment variable C(BIGSWITCH_ACCESS_TOKEN) is used. ''' EXAMPLES = ''' - name: policy to aggregate filter and deliver data center (DC) 1 traffic bigmon_policy: name: policy1 policy_description: DC 1 traffic policy action: drop controller: '{{ inventory_hostname }}' state: present validate_certs: false ''' RETURN = ''' # ''' import datetime import os import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.bigswitch.bigswitch import Rest from ansible.module_utils._text import to_native def policy(module): try: access_token = module.params['access_token'] or os.environ['BIGSWITCH_ACCESS_TOKEN'] except KeyError as e: module.fail_json(msg='Unable to load %s' % e.message, exception=traceback.format_exc()) name = module.params['name'] policy_description = module.params['policy_description'] action = module.params['action'] priority = module.params['priority'] duration = module.params['duration'] start_time = module.params['start_time'] delivery_packet_count = module.params['delivery_packet_count'] state = module.params['state'] controller = module.params['controller'] rest = Rest(module, {'content-type': 'application/json', 'Cookie': 'session_cookie=' + access_token}, 'https://' + controller + ':8443/api/v1/data/controller/applications/bigtap') if name is None: module.fail_json(msg='parameter `name` is missing') response = rest.get('policy?config=true', data={}) if response.status_code != 200: module.fail_json(msg="failed to obtain existing policy config: {}".format(response.json['description'])) config_present = False matching = [policy for policy in response.json if policy['name'] == name and policy['duration'] == duration and policy['delivery-packet-count'] == delivery_packet_count and policy['policy-description'] == policy_description and policy['action'] == action and policy['priority'] == priority] if matching: config_present = True if state in ('present') and config_present: module.exit_json(changed=False) if state in ('absent') and not config_present: module.exit_json(changed=False) if state in ('present'): data = {'name': name, 'action': action, 'policy-description': policy_description, 'priority': priority, 'duration': duration, 'start-time': start_time, 'delivery-packet-count': delivery_packet_count} response = rest.put('policy[name="%s"]' % name, data=data) if response.status_code == 204: module.exit_json(changed=True) else: module.fail_json(msg="error creating policy '{}': {}".format(name, response.json['description'])) if state in ('absent'): response = rest.delete('policy[name="%s"]' % name, data={}) if response.status_code == 204: module.exit_json(changed=True) else: module.fail_json(msg="error deleting policy '{}': {}".format(name, response.json['description'])) def main(): module = AnsibleModule( argument_spec=dict( name=dict(type='str', required=True), policy_description=dict(type='str', default=''), action=dict(choices=['forward', 'drop', 'capture', 'flow-gen'], default='forward'), priority=dict(type='int', default=100), duration=dict(type='int', default=0), start_time=dict(type='str', default=datetime.datetime.now().isoformat() + '+00:00'), delivery_packet_count=dict(type='int', default=0), controller=dict(type='str', required=True), state=dict(choices=['present', 'absent'], default='present'), validate_certs=dict(type='bool', default='True'), access_token=dict(type='str', no_log=True) ) ) try: policy(module) except Exception as e: module.fail_json(msg=to_native(e), exception=traceback.format_exc()) if __name__ == '__main__': main()
RebeccaWPerry/vispy
refs/heads/master
vispy/app/backends/tests/test_ipynb_util.py
18
# -*- coding: utf-8 -*- # Copyright (c) 2015, Vispy Development Team. # Distributed under the (new) BSD License. See LICENSE.txt for more info. import numpy as np from vispy.app.backends._ipynb_util import (_extract_buffers, _serialize_command, create_glir_message) from vispy.testing import run_tests_if_main, assert_equal def test_extract_buffers(): arr = np.random.rand(10, 2).astype(np.float32) arr2 = np.random.rand(20, 2).astype(np.int16) # No DATA command. commands = [('CREATE', 4, 'VertexBuffer')] commands_modified, buffers = _extract_buffers(commands) assert_equal(commands_modified, commands) assert_equal(buffers, []) # A single DATA command. commands = [('DATA', 4, 0, arr)] commands_modified, buffers = _extract_buffers(commands) assert_equal(commands_modified, [('DATA', 4, 0, {'buffer_index': 0})]) assert_equal(buffers, [arr]) # Several commands. commands = [('DATA', 0, 10, arr), ('UNIFORM', 4, 'u_scale', 'vec3', (1, 2, 3)), ('DATA', 2, 20, arr2) ] commands_modified_expected = [ ('DATA', 0, 10, {'buffer_index': 0}), ('UNIFORM', 4, 'u_scale', 'vec3', (1, 2, 3)), ('DATA', 2, 20, {'buffer_index': 1})] commands_modified, buffers = _extract_buffers(commands) assert_equal(commands_modified, commands_modified_expected) assert_equal(buffers, [arr, arr2]) def test_serialize_command(): command = ('CREATE', 4, 'VertexBuffer') command_serialized = _serialize_command(command) assert_equal(command_serialized, list(command)) command = ('UNIFORM', 4, 'u_scale', 'vec3', (1, 2, 3)) commands_serialized_expected = ['UNIFORM', 4, 'u_scale', 'vec3', [1, 2, 3]] command_serialized = _serialize_command(command) assert_equal(command_serialized, commands_serialized_expected) def test_create_glir_message_binary(): arr = np.zeros((3, 2)).astype(np.float32) arr2 = np.ones((4, 5)).astype(np.int16) commands = [('CREATE', 1, 'VertexBuffer'), ('UNIFORM', 2, 'u_scale', 'vec3', (1, 2, 3)), ('DATA', 3, 0, arr), ('UNIFORM', 4, 'u_pan', 'vec2', np.array([1, 2, 3])), ('DATA', 5, 20, arr2)] msg = create_glir_message(commands) assert_equal(msg['msg_type'], 'glir_commands') commands_serialized = msg['commands'] assert_equal(commands_serialized, [['CREATE', 1, 'VertexBuffer'], ['UNIFORM', 2, 'u_scale', 'vec3', [1, 2, 3]], ['DATA', 3, 0, {'buffer_index': 0}], ['UNIFORM', 4, 'u_pan', 'vec2', [1, 2, 3]], ['DATA', 5, 20, {'buffer_index': 1}]]) buffers_serialized = msg['buffers'] buf0 = buffers_serialized[0] assert_equal(buf0, b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') # noqa buf1 = buffers_serialized[1] assert_equal(buf1, b'\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00') # noqa def test_create_glir_message_base64(): arr = np.zeros((3, 2)).astype(np.float32) arr2 = np.ones((4, 5)).astype(np.int16) commands = [('CREATE', 1, 'VertexBuffer'), ('UNIFORM', 2, 'u_scale', 'vec3', (1, 2, 3)), ('DATA', 3, 0, arr), ('UNIFORM', 4, 'u_pan', 'vec2', np.array([1, 2, 3])), ('DATA', 5, 20, arr2)] msg = create_glir_message(commands, array_serialization='base64') assert_equal(msg['msg_type'], 'glir_commands') commands_serialized = msg['commands'] assert_equal(commands_serialized, [['CREATE', 1, 'VertexBuffer'], ['UNIFORM', 2, 'u_scale', 'vec3', [1, 2, 3]], ['DATA', 3, 0, {'buffer_index': 0}], ['UNIFORM', 4, 'u_pan', 'vec2', [1, 2, 3]], ['DATA', 5, 20, {'buffer_index': 1}]]) buffers_serialized = msg['buffers'] buf0 = buffers_serialized[0] assert_equal(buf0['storage_type'], 'base64') assert_equal(buf0['buffer'], 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA') buf1 = buffers_serialized[1] assert_equal(buf0['storage_type'], 'base64') assert_equal(buf1['buffer'], 'AQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAA==') run_tests_if_main()
jspargo/AneMo
refs/heads/master
django/lib/python2.7/site-packages/django/template/loaders/cached.py
95
""" Wrapper class that takes a list of template loaders as an argument and attempts to load templates from them in order, caching the result. """ import hashlib from django.template.base import TemplateDoesNotExist from django.template.loader import BaseLoader, get_template_from_string, find_template_loader, make_origin from django.utils.encoding import force_bytes class Loader(BaseLoader): is_usable = True def __init__(self, loaders): self.template_cache = {} self.find_template_cache = {} self._loaders = loaders self._cached_loaders = [] @property def loaders(self): # Resolve loaders on demand to avoid circular imports if not self._cached_loaders: # Set self._cached_loaders atomically. Otherwise, another thread # could see an incomplete list. See #17303. cached_loaders = [] for loader in self._loaders: cached_loaders.append(find_template_loader(loader)) self._cached_loaders = cached_loaders return self._cached_loaders def cache_key(self, template_name, template_dirs): if template_dirs: # If template directories were specified, use a hash to differentiate return '-'.join([template_name, hashlib.sha1(force_bytes('|'.join(template_dirs))).hexdigest()]) else: return template_name def find_template(self, name, dirs=None): """ Helper method. Lookup the template :param name: in all the configured loaders """ key = self.cache_key(name, dirs) try: result = self.find_template_cache[key] except KeyError: result = None for loader in self.loaders: try: template, display_name = loader(name, dirs) except TemplateDoesNotExist: pass else: result = (template, make_origin(display_name, loader, name, dirs)) break self.find_template_cache[key] = result if result: return result else: self.template_cache[key] = TemplateDoesNotExist raise TemplateDoesNotExist(name) def load_template(self, template_name, template_dirs=None): key = self.cache_key(template_name, template_dirs) template_tuple = self.template_cache.get(key) # A cached previous failure: if template_tuple is TemplateDoesNotExist: raise TemplateDoesNotExist elif template_tuple is None: template, origin = self.find_template(template_name, template_dirs) if not hasattr(template, 'render'): try: template = get_template_from_string(template, origin, template_name) except TemplateDoesNotExist: # If compiling the template we found raises TemplateDoesNotExist, # back off to returning the source and display name for the template # we were asked to load. This allows for correct identification (later) # of the actual template that does not exist. self.template_cache[key] = (template, origin) self.template_cache[key] = (template, None) return self.template_cache[key] def reset(self): "Empty the template cache." self.template_cache.clear() self.find_template_cache.clear()
xmw/hsh-link
refs/heads/master
hsh-link.py
1
# vim: tabstop=4 # vim: set fileencoding=utf-8 # copyright Michael Weber (michael at xmw dot de) 2014 from config import STORAGE_DIR, LINK_DIR, FILE_SIZE_MAX, MIME_ALLOWED, BASE_PROTO, BASE_PATH, COOKIE_SECRET, THEMES OUTPUT = 'raw', 'html', 'long', 'short', 'qr', 'qr_png', 'qr_utf8', 'qr_ascii' import base64, hashlib, io, ipaddress, magic, mod_python.util, mod_python.apache, mod_python.Cookie, os, PIL.ImageOps, qrencode, re, time import fixup try: import pyclamd clamav = pyclamd.ClamdAgnostic() clamav.ping() except (ValueError, NameError): clamav = None hsh = lambda s: base64.urlsafe_b64encode(hashlib.sha1(s.encode()).digest()).decode().rstrip('=') def subdirfn(repo, fn): if fn.count('/') or len(fn) < 4: return ['', ''] return os.path.join(repo, fn[0:2], fn[2:4]), fn def is_storage(repo, fn): return os.path.exists(os.path.join(*subdirfn(repo, fn))) def read_storage(repo, fn): fn = os.path.join(*subdirfn(repo, fn)) if os.path.exists(fn): f = open(fn, 'r') content = f.read() f.close() return content return None def write_storage(repo, fn, data): d, fn = subdirfn(repo, fn) if not os.path.exists(d): os.makedirs(d, mode=0o770) f = open(os.path.join(d, fn), 'w') f.write(data) f.close() def get_link_history(link, ret=None): data = read_storage(LINK_DIR, hsh(link)) if data: ret = [] for line in data.rstrip('\n').split('\n'): rev_, hash_ = line.split('\t', 1) ret.append((int(rev_), hash_)) return ret def get_link(link, rev=None, ret=(None, None)): data = get_link_history(link) if data: if rev != None: for (rev_, data_) in data[::-1]: if rev_ == rev: return rev_, data_ else: return data[-1] return ret def append_link_history(link, data_hash): hist = get_link_history(link, []) if len(hist): num = hist[-1][0] + 1 else: num = 0 hist.append((num, data_hash)) write_storage(LINK_DIR, hsh(link), '\n'.join(map(lambda s: '%i\t%s' % (s[0], s[1]), hist)) + '\n') return num def find_storage(repo, partfn): d, partfn = subdirfn(repo, partfn) if not os.path.exists(d): return None cand = list(filter(lambda s: s.startswith(partfn), os.listdir(d))) if not cand: return None return sorted(cand, key=lambda fn: os.lstat(os.path.join(d, fn)).st_ctime)[0] def uniq_name(repo, fn): for i in range(4, len(fn)): if find_storage(repo, fn[:i]) == fn: return fn[:i] return fn def mptcp2ipaddress(s): if len(s) == 32: return ipaddress.ip_address(( '{06}{07}{04}{05}:{02}{03}{00}{01}:' + '{14}{15}{12}{13}:{10}{11}{08}{09}:' + '{22}{23}{20}{21}:{18}{19}{16}{17}:' + '{30}{31}{28}{29}:{26}{27}{24}{25}' ).format(*s)) elif len(s) == 8: return ipaddress.ip_address('.'.join(map(lambda s: str(int(s, 16)), ('{6}{7}.{4}{5}.{2}{3}.{0}{1}'.format(*s)).split('.')))) else: raise ValueError("Unsupported IP address length") def is_mptcp(req): ip = ipaddress.ip_address(req.subprocess_env['REMOTE_ADDR']) port = int(req.subprocess_env['REMOTE_PORT']) for line in open('/proc/net/mptcp', 'r').readlines()[1:]: mp_ip, mp_port = line.split()[5].split(':') if ip == mptcp2ipaddress(mp_ip) and port == int(mp_port, 16): return True return False def handler(req): debug = [] err = lambda s: debug.append(str(s) + '<br>') path = req.unparsed_uri[len(BASE_PATH):].split('?')[0] if path == 'robots.txt' or path.startswith('.artwork/'): # fall back to next apache handler return mod_python.apache.DECLINED if not req.method in ('GET', 'POST', 'PUT'): return mod_python.apache.HTTP_BAD_REQUEST req.add_common_vars() var = mod_python.util.FieldStorage(req) def get_last_value(name, ret=None): cand = var.getlist(name) if len(cand): ret = cand[-1].value if isinstance(ret, bytes): ret = ret.decode() return ret try: rev = int(get_last_value('rev')) except TypeError: rev=None except ValueError: return mod_python.apache.HTTP_BAD_REQUEST # understand path, link+revision link = get_last_value('link') or None # empty form input if link: rev, data_hash = get_link(link, rev, get_link(link)) else: if len(path) == 0: data_hash = hsh('') elif is_storage(STORAGE_DIR, path): data_hash = path elif get_link(path)[1]: link = path rev, data_hash = get_link(link, rev, get_link(link)) elif find_storage(STORAGE_DIR, path): data_hash = find_storage(STORAGE_DIR, path) else: data_hash = None # load data data = read_storage(STORAGE_DIR, data_hash or '') or '' # handle new data if req.method == 'PUT': content = req.read() else: content = get_last_value('content', None) if req.method == 'POST': if get_last_value('linefeed') == 'unix': content = content.replace('\r\n', '\n') if content != None: if get_last_value('append') in (None, '0'): data = '' data = data + content if len(data) > FILE_SIZE_MAX: return mod_python.apache.HTTP_REQUEST_ENTITY_TOO_LARGE data_hash = hsh(data) if not is_storage(STORAGE_DIR, data_hash): if clamav and clamav.scan_stream(data.encode()): return mod_python.apache.HTTP_FORBIDDEN write_storage(STORAGE_DIR, data_hash, data) #update link if link != None and data_hash != None: if data_hash != get_link(link, rev)[1]: rev = append_link_history(link, data_hash) if link == None and data_hash != None and not find_storage(STORAGE_DIR, path): link = path rev = append_link_history(link, data_hash) if data_hash == None: return mod_python.apache.HTTP_NOT_FOUND #guess output format agent = req.headers_in.get('User-Agent', '').lower() if list(filter(lambda a: agent.count(a), ('mozilla', 'opera', 'validator', 'w3m', 'lynx', 'links'))): agent, output = 'graphic', 'html' else: agent, output = 'text', 'raw' if content: output = 'long' output = get_last_value('output', output) if output == 'qr': output = agent == 'graphic' and 'qr_png' or 'qr_utf8' # update browser url? BASE_URL = BASE_PROTO + req.headers_in['Host'] + BASE_PATH if get_last_value('output', 'html') == 'html': if link == None: short_hash = uniq_name(STORAGE_DIR, data_hash) if path != data_hash and path != short_hash: mod_python.util.redirect(req, "%s%s" % (BASE_URL, short_hash), text="%s%s\n" % (BASE_URL, data_hash)) else: if path != link: mod_python.util.redirect(req, "%s%s" % (BASE_URL, link), text="%s%s\n" % (BASE_URL, data_hash)) # url shortener and mime magic if not req.headers_in.get('referer', '').startswith(BASE_URL): m = re.compile('^(?:http|https|ftp)://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$') if m.match(data): mod_python.util.redirect(req, data.rstrip(), permanent=True, text=data) return mod_python.apache.OK m = magic.Magic(magic.MAGIC_MIME).from_buffer(data).decode() if m.startswith('image/') or m == 'application/pdf': output = 'raw' #output req.content_type = "text/plain; charset=utf-8" text = [] out = text.append if output == 'html': # handle theme theme = THEMES[1] cookie = mod_python.Cookie.get_cookie(req, 'theme', mod_python.Cookie.MarshalCookie, secret=COOKIE_SECRET) if type(cookie) is mod_python.Cookie.MarshalCookie: if cookie.value in THEMES: theme = cookie.value if get_last_value('theme') in THEMES: theme = get_last_value('theme') cookie = mod_python.Cookie.MarshalCookie( 'theme', theme, secret=COOKIE_SECRET) cookie.expires = time.time() + 86400 * 365 mod_python.Cookie.add_cookie(req, cookie) req.content_type = "text/html; charset=utf-8" out('<!DOCTYPE html>\n\n<html>\n<head>') out('<meta http-equiv="Content-Type" content="text/html; ' 'charset=utf-8">') out('<link rel="stylesheet" type="text/css" ' 'href="%s.artwork/hsh-link-basic.css">' % BASE_PATH) out('<link rel="stylesheet" type="text/css" ' 'href="%s.artwork/hsh-link-%s.css">' % (BASE_PATH, theme)) out('<script src="%s.artwork/hsh-link.js"></script>' % BASE_PATH) out('<title>%s</title>\n</head>' % BASE_URL) out('<body onLoad="body_loaded()">\n<div class="container">') out('<form action="%s" method="POST" enctype="multipart/form-data">' % BASE_PATH) out('<div class="control">' '<a href="%s" title="start from scratch">clear</a>' % BASE_PATH) short_hash = data_hash and uniq_name(STORAGE_DIR, data_hash) out('<a title="short hash of content: %s" href="%s%s">short</a>' % (short_hash, BASE_PATH, short_hash)) out('<a title="long hash of content: %s" href="%s%s">long</a>' % (data_hash, BASE_PATH, data_hash)) if link: out('link=<a title="latest revision of link: %s" ' 'href="%s%s">%s</a>' % (link, BASE_PATH, link, link)) out('<input type="hidden" name="link" value="%s">' % link) if rev != None and get_link(link, rev - 1, None): out('rev=<a title="previous revision of link: %s" ' 'href="?rev=%s">-</a>' % (rev - 1, rev - 1)) else: out('rev=&nbsp;') out('<a title="current revision of link: %s" ' 'href="?rev=%i">%i</a>' % ( rev or 0, rev or 0, rev or 0)) if rev != None and get_link(link, rev + 1, None): out('<a title="next revision of link: %s" ' 'href="?rev=%s">+</a>' % (rev + 1, rev + 1)) else: out('&nbsp;') else: out('link=<input type="text" placeholder="add a tag/symlink ..." ' 'name="link" class="link" value="">') out('linefeed=<select name="linefeed" id="linefeed" ' 'onchange="data_modified()">') lf = (data and data.count('\r\n')) and ('', ' selected') or (' selected', '') out('<option value="unix"%s>unix</option><option value="dos"%s>dos</option></select>' % lf) out(' output=<select name="output" id="output" onchange="output_selected()">') for output_ in OUTPUT: out('<option value="%s"%s>%s</option>' % (output_, output == output_ and ' selected' or '', output_)) out('</select>') out('<input type="submit" id="save" title="save changes" ' 'value="save"> </div>') out('<div class="text"><textarea placeholder="Start typing ..." ' 'name="content" id="content" onclick="update_lineno()" ' 'onkeyup="update_lineno()" oninput="data_modified()">%s</textarea>' '</div>' % data) out('<div class="footer">(c) <a href="http://xmw.de/">xmw.de</a> 2014 ' '<a href="https://github.com/xmw/hsh-link">sources</a> ' '<a href="http://validator.w3.org/check?uri=referer">html5</a> ' '<a href="http://jigsaw.w3.org/css-validator/check/referer">css3</a> ' 'theme=<a href="?theme=xmw">xmw</a> ' '<a href="?theme=white">white</a>' ' line=<input type="text" name="lineno" ' 'id="lineno" value="1" size="4" readonly>') out('<a href="http://www.multipath-tcp.org/">mptcp</a>=%s' % (is_mptcp(req) and 'yes' or 'no')) out('mimetype=%s' % magic.Magic(magic.MAGIC_MIME).from_buffer(data.encode()).decode()) out('</div></form></div>\n</body>\n</html>\n') elif output in ('qr_png', 'qr_ascii', 'qr_utf8'): ver, s, img = qrencode.encode(BASE_URL + (link or data_hash or ''), level=qrencode.QR_ECLEVEL_L, hint=qrencode.QR_MODE_8, case_sensitive=True) img = PIL.ImageOps.expand(img, border=1, fill='white') if output == 'qr_png': img = img.resize((s * 8, s * 8), PIL.Image.NEAREST) req.content_type = "image/png; charset=utf-8" img.save(req, 'PNG') elif output == 'qr_ascii': sym = (' ', '@@') for y in range(img.size[1]): out(''.join(map(lambda x: sym[img.getpixel((x,y)) != 0], range(img.size[0])))) out('') elif output == 'qr_utf8': sym = (' ', '▄') , ('▀', '█') for y in range(img.size[1]//2): out(''.join(map(lambda x: sym[img.getpixel((x,y*2)) != 0][img.getpixel((x,y*2+1)) != 0], range(img.size[0])))) if img.size[1] % 2: out(''.join(map(lambda x: sym[img.getpixel((x,img.size[1]-1)) != 0][0], range(img.size[0])))) out('') elif output == 'raw': req.content_type = '' out(data) elif output == 'long': out("%s%s\n" % (BASE_URL, data_hash)) elif output == 'short': out("%s%s\n" % (BASE_URL, uniq_name(STORAGE_DIR, data_hash))) else: return mod_python.apache.HTTP_BAD_REQUEST text += debug req.write("\n".join(text)) return mod_python.apache.OK
SergiySW/RaiWalletBot
refs/heads/master
transaction_booster.py
1
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Nano Telegram bot # @NanoWalletBot https://t.me/NanoWalletBot # # Source code: # https://github.com/SergiySW/NanoWalletBot # # Released under the BSD 3-Clause License # # # Run by cron sometimes during distribution period # or manually in case of unsynchronization # import urllib3, certifi, json import time frontiers_url = 'https://raiwallet.info/api/frontiers.json' # Request to node from common_rpc import * # frontiers check to boost transactions def checker(): # list frontiers frontiers = rpc({"action":"frontiers","account":"xrb_1111111111111111111111111111111111111111111111111111hifc8npp","count":"1000000"}, 'frontiers') #http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',ca_certs=certifi.where()) #response = http.request('GET', frontiers_url, timeout=20.0) #remote_frontiers = json.loads(response.data)['frontiers'] # remote list frontiers remote_frontiers = rpc_remote({"action":"frontiers","account":"xrb_1111111111111111111111111111111111111111111111111111hifc8npp","count":"1000000"}, 'frontiers') for account in frontiers: frontier = frontiers[account] try: remote_frontier = remote_frontiers[account] # if frontiers not same if (not (frontier == remote_frontier)): rpc({"action":"republish","hash":remote_frontier}, 'success') print("Hash {0} republished".format(remote_frontier)) time.sleep(0.3) except KeyError, IndexError: # doesn't exist blocks = rpc({"action":"chain","block":frontier,"count":32}, 'blocks') hash = blocks[len(blocks) - 1] rpc({"action":"republish","hash":hash}, 'success') time.sleep(0.3) for account in remote_frontiers: remote_frontier = remote_frontiers[account] try: frontier = frontiers[account] # if frontiers not same if (not (frontier == remote_frontier)): rpc_remote({"action":"republish","hash":frontier}, 'success') print("Hash {0} republished remotely".format(frontier)) time.sleep(0.3) # insert blocks blocks = rpc_remote({"action":"successors","block":frontier,"count":128}, 'blocks') for block in blocks: if (not (frontier == block)): content = rpc_remote({"action":"block","hash":block}, 'contents') try: rpc({"action":"process","block":content}, '') except Exception as e: time.sleep(0.1) print("Block {0} processed".format(block)) except KeyError, IndexError: # doesn't exist blocks = rpc_remote({"action":"chain","block":remote_frontier,"count":32}, 'blocks') hash = blocks[len(blocks) - 1] rpc_remote({"action":"republish","hash":hash}, 'success') time.sleep(0.3) def starter(): time_start = int(time.time()) checker() time_end = int(time.time()) total_time = time_end - time_start print('Total time: {0}'.format(total_time)) checker()
fotinakis/sentry
refs/heads/master
src/sentry/api/endpoints/organization_member_details.py
5
from __future__ import absolute_import from django.db import transaction from django.db.models import Q from rest_framework import serializers from rest_framework.response import Response from sentry import roles from sentry.api.bases.organization import ( OrganizationEndpoint, OrganizationPermission ) from sentry.api.exceptions import ResourceDoesNotExist from sentry.models import ( AuditLogEntryEvent, AuthIdentity, AuthProvider, OrganizationMember ) ERR_NO_AUTH = 'You cannot remove this member with an unauthenticated API request.' ERR_INSUFFICIENT_ROLE = 'You cannot remove a member who has more access than you.' ERR_INSUFFICIENT_SCOPE = 'You are missing the member:delete scope.' ERR_ONLY_OWNER = 'You cannot remove the only remaining owner of the organization.' ERR_UNINVITABLE = 'You cannot send an invitation to a user who is already a full member.' class OrganizationMemberSerializer(serializers.Serializer): reinvite = serializers.BooleanField() class RelaxedMemberPermission(OrganizationPermission): scope_map = { 'GET': ['member:read', 'member:write', 'member:delete'], 'POST': ['member:write', 'member:delete'], 'PUT': ['member:write', 'member:delete'], # DELETE checks for role comparison as you can either remove a member # with a lower access role, or yourself, without having the req. scope 'DELETE': ['member:read', 'member:write', 'member:delete'], } class OrganizationMemberDetailsEndpoint(OrganizationEndpoint): permission_classes = [RelaxedMemberPermission] def _get_member(self, request, organization, member_id): if member_id == 'me': queryset = OrganizationMember.objects.filter( organization=organization, user__id=request.user.id, user__is_active=True, ) else: queryset = OrganizationMember.objects.filter( Q(user__is_active=True) | Q(user__isnull=True), organization=organization, id=member_id, ) return queryset.select_related('user').get() def _is_only_owner(self, member): if member.role != roles.get_top_dog().id: return False queryset = OrganizationMember.objects.filter( organization=member.organization_id, role=roles.get_top_dog().id, user__isnull=False, user__is_active=True, ).exclude(id=member.id) if queryset.exists(): return False return True def put(self, request, organization, member_id): try: om = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist serializer = OrganizationMemberSerializer(data=request.DATA, partial=True) if not serializer.is_valid(): return Response(status=400) has_sso = AuthProvider.objects.filter( organization=organization, ).exists() result = serializer.object # XXX(dcramer): if/when this expands beyond reinvite we need to check # access level if result.get('reinvite'): if om.is_pending: om.send_invite_email() elif has_sso and not getattr(om.flags, 'sso:linked'): om.send_sso_link_email() else: # TODO(dcramer): proper error message return Response({'detail': ERR_UNINVITABLE}, status=400) return Response(status=204) def delete(self, request, organization, member_id): try: om = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist if request.user.is_authenticated() and not request.is_superuser(): try: acting_member = OrganizationMember.objects.get( organization=organization, user=request.user, ) except OrganizationMember.DoesNotExist: return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400) else: if acting_member != om: if not request.access.has_scope('member:delete'): return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400) elif not roles.can_manage(acting_member.role, om.role): return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400) # TODO(dcramer): do we even need this check? elif not request.access.has_scope('member:delete'): return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400) if self._is_only_owner(om): return Response({'detail': ERR_ONLY_OWNER}, status=403) audit_data = om.get_audit_log_data() with transaction.atomic(): AuthIdentity.objects.filter( user=om.user, auth_provider__organization=organization, ).delete() om.delete() self.create_audit_entry( request=request, organization=organization, target_object=om.id, target_user=om.user, event=AuditLogEntryEvent.MEMBER_REMOVE, data=audit_data, ) return Response(status=204)
cybaek/twisted-network-programming-essentials-2nd-edition-python3
refs/heads/master
ch3/adderrback.py
1
from twisted.internet.defer import Deferred def myErrback(failure): print(failure) d = Deferred() d.addErrback(myErrback) d.errback(Exception("Triggering callback."))
j-fuentes/werkzeug
refs/heads/master
examples/simplewiki/database.py
45
# -*- coding: utf-8 -*- """ simplewiki.database ~~~~~~~~~~~~~~~~~~~ The database. :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details. :license: BSD. """ from datetime import datetime from sqlalchemy import Table, Column, Integer, String, DateTime, \ ForeignKey, MetaData, join from sqlalchemy.orm import relation, create_session, scoped_session, \ mapper from simplewiki.utils import application, local_manager, parse_creole # create a global metadata metadata = MetaData() def new_db_session(): """ This function creates a new session if there is no session yet for the current context. It looks up the application and if it finds one it creates a session bound to the active database engine in that application. If there is no application bound to the context it raises an exception. """ return create_session(application.database_engine, autoflush=True, autocommit=False) # and create a new global session factory. Calling this object gives # you the current active session session = scoped_session(new_db_session, local_manager.get_ident) # our database tables. page_table = Table('pages', metadata, Column('page_id', Integer, primary_key=True), Column('name', String(60), unique=True) ) revision_table = Table('revisions', metadata, Column('revision_id', Integer, primary_key=True), Column('page_id', Integer, ForeignKey('pages.page_id')), Column('timestamp', DateTime), Column('text', String), Column('change_note', String(200)) ) class Revision(object): """ Represents one revision of a page. This is useful for editing particular revision of pages or creating new revisions. It's also used for the diff system and the revision log. """ query = session.query_property() def __init__(self, page, text, change_note='', timestamp=None): if isinstance(page, (int, long)): self.page_id = page else: self.page = page self.text = text self.change_note = change_note self.timestamp = timestamp or datetime.utcnow() def render(self): """Render the page text into a genshi stream.""" return parse_creole(self.text) def __repr__(self): return '<%s %r:%r>' % ( self.__class__.__name__, self.page_id, self.revision_id ) class Page(object): """ Represents a simple page without any revisions. This is for example used in the page index where the page contents are not relevant. """ query = session.query_property() def __init__(self, name): self.name = name @property def title(self): return self.name.replace('_', ' ') def __repr__(self): return '<%s %r>' % (self.__class__.__name__, self.name) class RevisionedPage(Page, Revision): """ Represents a wiki page with a revision. Thanks to multiple inhertiance and the ability of SQLAlchemy to map to joins we can combine `Page` and `Revision` into one class here. """ query = session.query_property() def __init__(self): raise TypeError('cannot create WikiPage instances, use the Page and ' 'Revision classes for data manipulation.') def __repr__(self): return '<%s %r:%r>' % ( self.__class__.__name__, self.name, self.revision_id ) # setup mappers mapper(Revision, revision_table) mapper(Page, page_table, properties=dict( revisions=relation(Revision, backref='page', order_by=Revision.revision_id.desc()) )) mapper(RevisionedPage, join(page_table, revision_table), properties=dict( page_id=[page_table.c.page_id, revision_table.c.page_id], ))
cosmoharrigan/pylearn2
refs/heads/master
pylearn2/devtools/__init__.py
147
__authors__ = "Ian Goodfellow" __copyright__ = "Copyright 2010-2012, Universite de Montreal" __credits__ = ["Ian Goodfellow"] __license__ = "3-clause BSD" __maintainer__ = "LISA Lab" __email__ = "pylearn-dev@googlegroups"
PhiInnovations/mdp28-linux-bsp
refs/heads/master
scripts/pybootchartgui/pybootchartgui/__init__.py
12133432
kylef/lithium
refs/heads/master
lithium/blog/templatetags/__init__.py
12133432
Suninus/erpnext
refs/heads/develop
erpnext/accounts/report/item_wise_purchase_register/__init__.py
12133432
helenst/django
refs/heads/master
tests/one_to_one/__init__.py
12133432
gradel/mezzanine
refs/heads/master
mezzanine/galleries/migrations/__init__.py
12133432
pkolios/qlutter-todo
refs/heads/master
qlutter_todo/tests/__init__.py
12133432
embedded1/django-oscar-paypal
refs/heads/master
paypal/payflow/admin.py
14
from django.contrib import admin from paypal.payflow import models class TxnAdmin(admin.ModelAdmin): list_display = ['pnref', 'comment1', 'amount', 'get_trxtype_display', 'get_tender_display', 'result', 'respmsg', 'date_created'] readonly_fields = [ 'trxtype', 'tender', 'amount', 'pnref', 'ppref', 'cvv2match', 'comment1', 'avsaddr', 'avszip', 'result', 'respmsg', 'authcode', 'request', 'response', 'raw_request', 'raw_response', 'response_time', 'date_created', ] admin.site.register(models.PayflowTransaction, TxnAdmin)
simonflueckiger/tesserocr-windows_build
refs/heads/master
res/tesserocr/__init__.py
2
from ._tesserocr import *
igsr/igsr_analysis
refs/heads/master
Utils/RunProgram.py
1
''' Created on 03 May 2018 @author: ernesto ''' import subprocess import re import os class RunProgram(object): """ SuperClass used to run an external program within a Python script """ def __init__(self, program=None, path=None, args=None, arg_sep=None, parameters=None, cmd_line=None, downpipe=None, log_name=None, log_file=None): """ Constructor Parameters ---------- program : str, default Program to be run. path : str, optional Folder containing the 'program'. args : list, optional List of named tuples tuples formed by an argument (or option) and its respective value: i.e. arg=namedtuple('Argument', 'option value'). parameters : list, optional List of parameters ['c','d']. arg_sep : char, default=' ' char used as a separator between argument and value. i.e. if '=' then we will get 'a'=1 Default is a single whitespace (i.e. ' '). cmd_line : str, optional String with command line to run. downpipe: list, optional List of RunProgram objects that will be executed in a pipe after self.program has been executed. log_name: str, optional Name of the logger. log_file: filename, optional Path to the file that will be used by the logging library. """ self.cmd_line = cmd_line self.program = program if self.program is None and self.cmd_line is None: raise ValueError("Parameter 'cmd_line' or 'program' must be provided.") self.path = path self.args = args self.arg_sep = arg_sep if arg_sep is not None else ' ' self.parameters = parameters self.downpipe = downpipe self.log_name = log_name self.log_file = log_file # create the command line if is None if self.cmd_line is None: self.cmd_line = self.create_command_line() def create_command_line(self): """ :return: """ if self.path is not None: cmd_line = [os.path.join(self.path, self.program)] else: cmd_line = [self.program] # construct the command line if self.args is not None: cmd_line.append(' '.join([f"{option}{self.arg_sep}{parameter}" for option, parameter in self.args])) if self.parameters is not None: cmd_line.append(' '.join(["{0}".format(param) for param in self.parameters])) if self.downpipe is not None: cmd_line.append(' '.join(["| {0}".format(runO.cmd_line) for runO in self.downpipe])) return ' '.join(cmd_line) def run_popen(self, raise_exc=True): """ Run self.program using subprocess Popen method (see https://docs.python.org/2/library/subprocess.html#module-subprocess) Parameters ---------- raise_exc: bool, default=True If true, then raise an Exception when error is found. Returns ------- tuple A tuple containing the STDOUT and STDERR from this program """ log_f = open(self.log_file, 'w') if self.log_file is not None else None # execute cmd_line p = subprocess.Popen(self.cmd_line, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, bufsize=256, universal_newlines=True) # stderr patt = re.compile('#* ERROR|Error') is_exception = False stderr = "" for line in p.stderr: line = str(line.rstrip()) stderr += line + "\n" if log_f is not None: log_f.write(line + "\n") m = patt.match(line) if m: is_exception = True # stdout stdout = "" for line in p.stdout: line = str(line.rstrip()) stdout += line + "\n" if is_exception is True and raise_exc is True: raise Exception(stderr) if log_f is not None: log_f.close() return (stdout, stderr, is_exception) def run_checkoutput(self): """ Run self.program using subprocess check_output method (see https://docs.python.org/2/library/subprocess.html#module-subprocess) Returns ------- Stdout produced after running the program """ try: stdout = subprocess.check_output(self.cmd_line, shell=True) except subprocess.CalledProcessError as e: raise return stdout
halfak/Wiki-Class
refs/heads/master
articlequality/utilities/fetch_text.py
3
""" ``$ articlequality fetch_text -h`` :: Fetches text & metadata for labelings using a MediaWiki API. Usage: fetch_text --api-host=<url> [--labelings=<path>] [--output=<path>] [--verbose] Options: -h --help Show this documentation. --api-host=<url> The hostname of a MediaWiki e.g. "https://en.wikipedia.org" --labelings=<path> Path to a containting observations with extracted labels. [default: <stdin>] --output=<path> Path to a file to write new observations (with text) out to. [default: <stdout>] --verbose Prints dots and stuff to stderr """ import sys import mwapi from docopt import docopt from revscoring.utilities.util import dump_observation, read_observations from .extract_text import not_an_article def main(argv=None): args = docopt(__doc__, argv=argv) if args['--labelings'] == '<stdin>': labelings = read_observations(sys.stdin) else: labelings = read_observations(open(args['--labelings'])) if args['--output'] == '<stdout>': output = sys.stdout else: output = open(args['--output']) session = mwapi.Session(args['--api-host'], user_agent="ArticleQuality fetch_text utility.") verbose = args['--verbose'] run(labelings, output, session, verbose) def run(labelings, output, session, verbose): for labeling in fetch_text(session, labelings, verbose=verbose): if labeling['text'] is not None: dump_observation(labeling, output) def fetch_text(session, labelings, verbose=False): """ Fetches article text and metadata for labelings from a MediaWiki API. :Parameters: session : :class:`mwapi.Session` An API session to use for querying labelings : `iterable`(`dict`) A collection of labeling events to add text to verbose : `bool` Print dots and stuff :Returns: An `iterator` of labelings augmented with 'page_id', 'rev_id', 'text', 'page_title' and 'talk_page_title'. Note that labelings of articles that aren't found will not be included. """ for labeling in labelings: talk_info = get_talk_page_info(session, labeling['talk_page_id']) try: rev_doc = get_last_rev_before(session, talk_info['subjectid'], labeling['timestamp']) except (KeyError, TypeError): # No subject page found continue if rev_doc is None: if verbose: sys.stderr.write("?") sys.stderr.write( str(labeling['talk_page_id']) + " " + labeling['timestamp']) sys.stderr.flush() else: if verbose: sys.stderr.write(".") sys.stderr.flush() # Update the talk page title in case it was moved after the dump labeling['talk_page_title'] = talk_info['title'] labeling['page_id'] = rev_doc['page'].get("pageid") labeling['page_title'] = rev_doc['page'].get("title") labeling['rev_id'] = rev_doc.get("revid") text = rev_doc['slots']["main"].get("content") if not_an_article(text): labeling['text'] = None else: labeling['text'] = text yield labeling if verbose: sys.stderr.write("\n") sys.stderr.flush() def get_talk_page_info(session, talk_page_id): doc = session.get(action="query", prop="info", pageids=talk_page_id, inprop="subjectid", formatversion=2) try: return doc['query']['pages'][0] except (KeyError, IndexError): # No info found return None def get_last_rev_before(session, page_id, timestamp): doc = session.get(action="query", prop="revisions", pageids=page_id, rvstart=timestamp, rvlimit=1, rvdir="older", rvprop=["ids", "content"], rvslots=["main"], redirects=True, formatversion=2) try: page_doc = doc['query']['pages'][0] except (KeyError, IndexError): # No pages found return None try: rev_doc = page_doc['revisions'][0] rev_doc['page'] = {k: v for k, v in page_doc.items() if k != "revisions"} except (KeyError, IndexError): # No revisions matched return None return rev_doc
ThinkOpen-Solutions/odoo
refs/heads/stable
addons/account_test/account_test.py
342
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (c) 2005-2006 TINY SPRL. (http://tiny.be) All Rights Reserved. # # $Id: product_expiry.py 4304 2006-10-25 09:54:51Z ged $ # # WARNING: This program as such is intended to be used by professional # programmers who take the whole responsability of assessing all potential # consequences resulting from its eventual inadequacies and bugs # End users who are looking for a ready-to-use solution with commercial # garantees and support are strongly adviced to contract a Free Software # Service Company # # This program is Free Software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # ############################################################################## from openerp.osv import fields, osv CODE_EXEC_DEFAULT = '''\ res = [] cr.execute("select id, code from account_journal") for record in cr.dictfetchall(): res.append(record['code']) result = res ''' class accounting_assert_test(osv.osv): _name = "accounting.assert.test" _order = "sequence" _columns = { 'name': fields.char('Test Name', required=True, select=True, translate=True), 'desc': fields.text('Test Description', select=True, translate=True), 'code_exec': fields.text('Python code', required=True), 'active': fields.boolean('Active'), 'sequence': fields.integer('Sequence'), } _defaults = { 'code_exec': CODE_EXEC_DEFAULT, 'active': True, 'sequence': 10, }
40223102/w17b_test
refs/heads/master
static/Brython3.1.1-20150328-091302/Lib/browser/timer.py
610
from browser import window def wrap(func): # Transforms a function f into another function that prints a # traceback in case of exception def f(*args, **kw): try: return func(*args, **kw) except Exception as exc: sys.stderr.write(exc) return f clear_interval = window.clearInterval clear_timeout = window.clearTimeout def set_interval(func,interval): return window.setInterval(wrap(func),interval) def set_timeout(func,interval): return int(window.setTimeout(wrap(func),interval)) def request_animation_frame(func): return int(window.requestAnimationFrame(func)) def cancel_animation_frame(int_id): window.cancelAnimationFrame(int_id)
RyanBeatty/Steer-Clear-Backend
refs/heads/master
steerclear/utils/cas.py
3
""" cas.py By: Ryan Beatty Enable automatic login to W&M CAS server by emulating a browser to perform login """ import mechanize from urllib2 import HTTPError # url of the login route for the W&M CAS server WM_CAS_SERVER_URL = 'https://cas.wm.edu/cas/login?service=http%3A//localhost%3A8080/login/' """ validate_user ------------- Checks if a Users' username and password credentials are valid, meaning that they are a valid W&M student and can use the steerclear service """ def validate_user(username, password): # create new browser object browser = mechanize.Browser() # disable robots.txt and automatic redirects # disabling redirects means that a successful login # to the CAS server (which returns a 302) will raise an exception. # a failed login attempt will not raise any error browser.set_handle_robots(False) browser.set_handle_redirect(False) # open login page browser.open(WM_CAS_SERVER_URL) # select the login form browser.select_form(nr=0) # fill in the form with the users username and password browser['username'] = username browser['password'] = password try: # submit the form browser.submit() except HTTPError: # if exception is raised, that means a 302 status code # was returned, signifing a successful login return True # failed to login, so user submitted invalid credentials return False
arante/pyloc
refs/heads/master
py3/cs-circle/pendulum.py
2
#! /usr/bin/env python # Billy Wilson Arante # 12/21/2015 """ In physics, for a pendulum with length L and initial angle A, its horizontal displacement X(T) at time T is given by the formula: X(T) = L × cos(A × cos(T × √9.8/L)) - L × cos(A) """ import math L = float(input()) A = float(input()) T = 0 X = 0.0 while (T <= 9): X = L * math.cos(A * math.cos(T * math.sqrt(9.8 / L))) - L * math.cos(A) print(X) T = T + 1
programadorjc/django
refs/heads/master
django/utils/checksums.py
310
""" Common checksum routines. """ __all__ = ['luhn'] import warnings from django.utils import six from django.utils.deprecation import RemovedInDjango110Warning warnings.warn( "django.utils.checksums will be removed in Django 1.10. The " "luhn() function is now included in django-localflavor 1.1+.", RemovedInDjango110Warning ) LUHN_ODD_LOOKUP = (0, 2, 4, 6, 8, 1, 3, 5, 7, 9) # sum_of_digits(index * 2) def luhn(candidate): """ Checks a candidate number for validity according to the Luhn algorithm (used in validation of, for example, credit cards). Both numeric and string candidates are accepted. """ if not isinstance(candidate, six.string_types): candidate = str(candidate) try: evens = sum(int(c) for c in candidate[-1::-2]) odds = sum(LUHN_ODD_LOOKUP[int(c)] for c in candidate[-2::-2]) return ((evens + odds) % 10 == 0) except ValueError: # Raised if an int conversion fails return False
pierg75/pier-sosreport
refs/heads/master
sos/plugins/nis.py
1
# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin class Nis(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): """Network information service """ plugin_name = 'nis' profiles = ('identity', 'services') files = ('/var/yp', '/etc/ypserv.conf') packages = ('ypserv') def setup(self): self.add_copy_spec([ "/etc/yp*.conf", "/var/yp/*" ]) self.add_cmd_output("domainname") # vim: set et ts=4 sw=4 :
suiyuan2009/tensorflow
refs/heads/master
tensorflow/contrib/slim/python/slim/nets/resnet_is_training_test.py
50
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Specifying is_training in resnet_arg_scope is being deprecated. Test that everything behaves as expected in the meantime. Note: This test modifies the layers.batch_norm function. Other tests that use layers.batch_norm may not work if added to this file. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib import layers from tensorflow.contrib.framework.python.ops import add_arg_scope from tensorflow.contrib.framework.python.ops import arg_scope from tensorflow.contrib.slim.python.slim.nets import resnet_utils from tensorflow.contrib.slim.python.slim.nets import resnet_v1 from tensorflow.contrib.slim.python.slim.nets import resnet_v2 from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test def create_test_input(batch, height, width, channels): """Create test input tensor.""" if None in [batch, height, width, channels]: return array_ops.placeholder(dtypes.float32, (batch, height, width, channels)) else: return math_ops.to_float( np.tile( np.reshape( np.reshape(np.arange(height), [height, 1]) + np.reshape(np.arange(width), [1, width]), [1, height, width, 1]), [batch, 1, 1, channels])) class ResnetIsTrainingTest(test.TestCase): def _testDeprecatingIsTraining(self, network_fn): batch_norm_fn = layers.batch_norm @add_arg_scope def batch_norm_expect_is_training(*args, **kwargs): assert kwargs['is_training'] return batch_norm_fn(*args, **kwargs) @add_arg_scope def batch_norm_expect_is_not_training(*args, **kwargs): assert not kwargs['is_training'] return batch_norm_fn(*args, **kwargs) global_pool = True num_classes = 10 inputs = create_test_input(2, 224, 224, 3) # Default argument for resnet_arg_scope layers.batch_norm = batch_norm_expect_is_training with arg_scope(resnet_utils.resnet_arg_scope()): network_fn(inputs, num_classes, global_pool=global_pool, scope='resnet1') layers.batch_norm = batch_norm_expect_is_training with arg_scope(resnet_utils.resnet_arg_scope()): network_fn( inputs, num_classes, is_training=True, global_pool=global_pool, scope='resnet2') layers.batch_norm = batch_norm_expect_is_not_training with arg_scope(resnet_utils.resnet_arg_scope()): network_fn( inputs, num_classes, is_training=False, global_pool=global_pool, scope='resnet3') # resnet_arg_scope with is_training set to True (deprecated) layers.batch_norm = batch_norm_expect_is_training with arg_scope(resnet_utils.resnet_arg_scope(is_training=True)): network_fn(inputs, num_classes, global_pool=global_pool, scope='resnet4') layers.batch_norm = batch_norm_expect_is_training with arg_scope(resnet_utils.resnet_arg_scope(is_training=True)): network_fn( inputs, num_classes, is_training=True, global_pool=global_pool, scope='resnet5') layers.batch_norm = batch_norm_expect_is_not_training with arg_scope(resnet_utils.resnet_arg_scope(is_training=True)): network_fn( inputs, num_classes, is_training=False, global_pool=global_pool, scope='resnet6') # resnet_arg_scope with is_training set to False (deprecated) layers.batch_norm = batch_norm_expect_is_not_training with arg_scope(resnet_utils.resnet_arg_scope(is_training=False)): network_fn(inputs, num_classes, global_pool=global_pool, scope='resnet7') layers.batch_norm = batch_norm_expect_is_training with arg_scope(resnet_utils.resnet_arg_scope(is_training=False)): network_fn( inputs, num_classes, is_training=True, global_pool=global_pool, scope='resnet8') layers.batch_norm = batch_norm_expect_is_not_training with arg_scope(resnet_utils.resnet_arg_scope(is_training=False)): network_fn( inputs, num_classes, is_training=False, global_pool=global_pool, scope='resnet9') layers.batch_norm = batch_norm_fn def testDeprecatingIsTrainingResnetV1(self): self._testDeprecatingIsTraining(resnet_v1.resnet_v1_50) def testDeprecatingIsTrainingResnetV2(self): self._testDeprecatingIsTraining(resnet_v2.resnet_v2_50) if __name__ == '__main__': test.main()
elcodigok/mtff
refs/heads/master
lib/iplib/setup.py
1
#!/usr/bin/env python import sys from distutils.core import setup long_desc = """You can use this Python module to convert amongst many different notations and to manage couples of address/netmask in the CIDR notation. """ classifiers = """\ Development Status :: 4 - Beta Development Status :: 5 - Production/Stable Environment :: Console Environment :: No Input/Output (Daemon) Intended Audience :: Developers Intended Audience :: End Users/Desktop License :: OSI Approved :: GNU General Public License (GPL) Natural Language :: English Operating System :: OS Independent Programming Language :: Python Topic :: Software Development :: Libraries :: Python Modules Topic :: System :: Networking Topic :: Internet Topic :: Utilities """ params = {'name': 'iplib', 'version': '1.1', 'description': 'convert amongst many different IPv4 notations', 'long_description': long_desc, 'author': 'Davide Alberani', 'author_email': '[email protected]', 'maintainer': 'Davide Alberani', 'maintainer_email': '[email protected]', 'url': 'http://erlug.linux.it/~da/soft/iplib/', 'license': 'GPL', 'py_modules': ['iplib'], 'scripts': ['./bin/cidrinfo.py', './bin/ipconv.py', './bin/nmconv.py']} if sys.version_info >= (2, 1): params['keywords'] = ['ip', 'address', 'quad', 'dot', 'notation', 'binary', 'octal', 'hexadecimal', 'netmask', 'cidr', 'internet'] params['platforms'] = 'any' if sys.version_info >= (2, 3): params['download_url'] = 'http://erlug.linux.it/~da/soft/iplib/' params['classifiers'] = filter(None, classifiers.split("\n")) setup(**params)
Webcampak/v1.0
refs/heads/master
src/bin/wpakPhidget.py
1
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright 2010-2012 Infracom & Eurotechnia ([email protected]) # This file is part of the Webcampak project. # Webcampak is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, # or (at your option) any later version. # Webcampak is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; # without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # You should have received a copy of the GNU General Public License along with Webcampak. # If not, see http://www.gnu.org/licenses/ import os, sys, smtplib, datetime, tempfile, subprocess, datetime, shutil, time, ftplib import getopt import time import smtplib import zipfile import socket import urllib import pwd import locale import gettext from wpakErrorManagement import ErrorManagement # The class contains functions used to manipulate phidget class Phidget: def __init__(self, c, cfgcurrentsource, g, debug, cfgnow, cmdType, ErrorManagement): #gettext.install('webcampak') self.C = c self.Cfgcurrentsource = cfgcurrentsource self.G = g self.Debug = debug self.Cfgphidgetbin = self.G.getConfig('cfgbasedir') + self.G.getConfig('cfgbindir') + self.G.getConfig('cfgphidgetbin') self.Cfgnowtimestamp = cfgnow.strftime("%s") self.ErrorManagement = ErrorManagement # Function: Display # Description; This function receives test to be displayed on the phidget, it will format it and send it to SetDisplay # Return: Nothing def Display(self, FirstLine, SecondLine): # Prepare le texte a envoyer au Phidget if self.G.getConfig('cfgphidgetactivate') == "yes": if FirstLine == 0: # On souhaite afficher le temps ecoule et une barre de progression. cfgtmp = datetime.datetime.now() cfgtmptimestamp = cfgtmp.strftime("%s") CaptureInterTime = int(cfgtmptimestamp) - int(self.Cfgnowtimestamp) CaptureInterGlobalTime = int(self.ErrorManagement.LatestCaptureTime()) if CaptureInterGlobalTime > 0: CaptureProgress = int(CaptureInterTime * 12 / CaptureInterGlobalTime) else: CaptureProgress = int(CaptureInterTime * 12 / 45) StatusBar = "[" # [##########__] for j in range(1, 13): if j <= round(CaptureProgress): StatusBar = StatusBar + "#" else: StatusBar = StatusBar + "_" StatusBar = StatusBar + "]" if CaptureInterGlobalTime <= 0: StatusBar = _("[_CALIBRAGE_]") FirstLine = str(CaptureInterTime) + "s " + StatusBar Phidget.SetDisplay(self, FirstLine, SecondLine) # Function: SetDisplay # Description; This function send text to be displayed on the phidget ## FirstLine: Text to display on the first line ## SecondLine: Text to display on the second line # Return: Nothing def SetDisplay(self, DispFirstLine, DispSecondLine): #global PhidgetError if os.path.isfile(self.Cfgphidgetbin): # and PhidgetError == False: if self.G.getConfig('cfgphidgetdisplaysource') == self.Cfgcurrentsource: if DispFirstLine != "": cfgtmp = datetime.datetime.now() cfgtmptimestamp = cfgtmp.strftime("%s") CaptureInterTime = int(cfgtmptimestamp) - int(self.Cfgnowtimestamp) try: Command = "sudo " + self.Cfgphidgetbin + " LCD_1 '" + str(DispFirstLine) + "' " import shlex, subprocess args = shlex.split(Command) p = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.PIPE) output, errors = p.communicate() if "ERREUR" in output: PhidgetError = True except: self.Debug.Display(_("Phidget: First Line: Missing device")) if DispSecondLine != "": try: Command = "sudo " + self.Cfgphidgetbin + " LCD_2 '" + str(DispSecondLine) + "' " import shlex, subprocess args = shlex.split(Command) p = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.PIPE) output, errors = p.communicate() if "ERREUR" in output: PhidgetError = True except: self.Debug.Display(_("Phidget: Second Line: Missing device")) # Function: SetBackLight # Description; This function activate or not the backlight # Return: Nothing def SetBackLight(self, Set): if self.G.getConfig('cfgphidgetactivate') == "yes": #global PhidgetError if os.path.isfile(self.Cfgphidgetbin): # and PhidgetError == False: if self.G.getConfig('cfgphidgetdisplaysource') == self.Cfgcurrentsource: try: Command = "sudo " + self.Cfgphidgetbin + ' LCD_backlight ' + str(Set) import shlex, subprocess args = shlex.split(Command) p = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.PIPE) output, errors = p.communicate() if "ERREUR" in output: PhidgetError = True except: self.Debug.Display(_("Phidget: Backlight: Missing device")) # Function: GetSensor # Description; This function get a sensor value # Return: Sensor value def GetSensor(self, Sensor): if self.G.getConfig('cfgphidgetactivate') == "yes": #global PhidgetError #if os.path.isfile(self.Cfgphidgetbin) and PhidgetError == False: #print self.Cfgphidgetbin if os.path.isfile(self.Cfgphidgetbin): #try: Command = "sudo " + self.Cfgphidgetbin + ' getanalog ' + str(Sensor) import shlex, subprocess args = shlex.split(Command) p = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.PIPE) output, errors = p.communicate() if "ERREUR" in output: PhidgetError = True #self.Debug.Display(output) self.Debug.Display(errors) SensorValue = output.strip() return SensorValue #except: # self.Debug.Display(_("Phidget: Missing device")) # Function: CameraRestart # Description; This function restart the camera via a phidget relay # Return: Nothing def CameraRestart(self): if self.G.getConfig('cfgphidgetactivate') == "yes": PhidgetPort = self.C.getConfig('cfgphidgetcameraport') self.Debug.Display(_("Phidget: Shutting down the camera (power socket cut)")) Phidget.Display(self, _("-- Camera Error --"), _("5 sec. Pwr cut")) Command = "sudo " + self.Cfgphidgetbin + ' setout ' + str(PhidgetPort) os.system(Command) self.Debug.Display(_("Phidget: 5 seconds break")) time.sleep(5) Phidget.Display(self, _("-- Camera Error --"), _("Restart Camera")) Command = "sudo " + self.Cfgphidgetbin + ' clrout ' + str(PhidgetPort) os.system(Command) self.Debug.Display(_("Phidget: Powering on the Camera (power socket uncut)"))
dushu1203/chromium.src
refs/heads/nw12
components/policy/resources/PRESUBMIT.py
51
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # If this presubmit check fails or misbehaves, please complain to # [email protected], [email protected] or [email protected]. import itertools import sys import xml.dom.minidom def _GetPolicyTemplates(template_path): # Read list of policies in the template. eval() is used instead of a JSON # parser because policy_templates.json is not quite JSON, and uses some # python features such as #-comments and '''strings'''. policy_templates.json # is actually maintained as a python dictionary. with open(template_path) as f: template_data = eval(f.read(), {}) policies = ( policy for policy in template_data['policy_definitions'] if policy['type'] != 'group' ) groups = ( policy['policies'] for policy in template_data['policy_definitions'] if policy['type'] == 'group' ) subpolicies = ( policy for group in groups for policy in group ) return list(itertools.chain(policies, subpolicies)) def _CheckPolicyTemplatesSyntax(input_api, output_api): local_path = input_api.PresubmitLocalPath() filepath = input_api.os_path.join(local_path, 'policy_templates.json') if any(f.AbsoluteLocalPath() == filepath for f in input_api.AffectedFiles()): old_sys_path = sys.path try: tools_path = input_api.os_path.normpath( input_api.os_path.join(local_path, input_api.os_path.pardir, 'tools')) sys.path = [ tools_path ] + sys.path # Optimization: only load this when it's needed. import syntax_check_policy_template_json checker = syntax_check_policy_template_json.PolicyTemplateChecker() if checker.Run([], filepath) > 0: return [output_api.PresubmitError('Syntax error(s) in file:', [filepath])] finally: sys.path = old_sys_path return [] def _CheckPolicyTestCases(input_api, output_api, policies): # Read list of policies in chrome/test/data/policy/policy_test_cases.json. root = input_api.change.RepositoryRoot() policy_test_cases_file = input_api.os_path.join( root, 'chrome', 'test', 'data', 'policy', 'policy_test_cases.json') test_names = input_api.json.load(open(policy_test_cases_file)).keys() tested_policies = frozenset(name.partition('.')[0] for name in test_names if name[:2] != '--') policy_names = frozenset(policy['name'] for policy in policies) # Finally check if any policies are missing. missing = policy_names - tested_policies extra = tested_policies - policy_names error_missing = ('Policy \'%s\' was added to policy_templates.json but not ' 'to src/chrome/test/data/policy/policy_test_cases.json. ' 'Please update both files.') error_extra = ('Policy \'%s\' is tested by ' 'src/chrome/test/data/policy/policy_test_cases.json but is not' ' defined in policy_templates.json. Please update both files.') results = [] for policy in missing: results.append(output_api.PresubmitError(error_missing % policy)) for policy in extra: results.append(output_api.PresubmitError(error_extra % policy)) return results def _CheckPolicyHistograms(input_api, output_api, policies): root = input_api.change.RepositoryRoot() histograms = input_api.os_path.join( root, 'tools', 'metrics', 'histograms', 'histograms.xml') with open(histograms) as f: tree = xml.dom.minidom.parseString(f.read()) enums = (tree.getElementsByTagName('histogram-configuration')[0] .getElementsByTagName('enums')[0] .getElementsByTagName('enum')) policy_enum = [e for e in enums if e.getAttribute('name') == 'EnterprisePolicies'][0] policy_ids = frozenset([int(e.getAttribute('value')) for e in policy_enum.getElementsByTagName('int')]) error_missing = ('Policy \'%s\' was added to policy_templates.json but not ' 'to src/tools/metrics/histograms/histograms.xml. ' 'Please update both files.') results = [] for policy in policies: if policy['id'] not in policy_ids: results.append(output_api.PresubmitError(error_missing % policy['name'])) return results def _CommonChecks(input_api, output_api): results = [] results.extend(_CheckPolicyTemplatesSyntax(input_api, output_api)) os_path = input_api.os_path local_path = input_api.PresubmitLocalPath() template_path = os_path.join(local_path, 'policy_templates.json') affected_files = input_api.AffectedFiles() if any(f.AbsoluteLocalPath() == template_path for f in affected_files): try: policies = _GetPolicyTemplates(template_path) except: results.append(output_api.PresubmitError('Invalid Python/JSON syntax.')) return results results.extend(_CheckPolicyTestCases(input_api, output_api, policies)) results.extend(_CheckPolicyHistograms(input_api, output_api, policies)) return results def CheckChangeOnUpload(input_api, output_api): return _CommonChecks(input_api, output_api) def CheckChangeOnCommit(input_api, output_api): return _CommonChecks(input_api, output_api)
aaronelliotross/django-tastypie
refs/heads/master
tests/core/forms.py
18
from django import forms from core.models import Note class NoteForm(forms.ModelForm): foobaz = forms.CharField() class Meta: model = Note class VeryCustomNoteForm(NoteForm): class Meta: model = Note fields = ['title', 'content', 'created', 'is_active', 'foobaz'] # Notes: # * VeryCustomNoteForm will ONLY have the four listed fields. # * VeryCustomNoteForm does NOT inherit the ``foobaz`` field from it's # parent class (unless manually specified).
irwinlove/django
refs/heads/master
django/utils/jslex.py
251
"""JsLex: a lexer for Javascript""" # Originally from https://bitbucket.org/ned/jslex from __future__ import unicode_literals import re class Tok(object): """ A specification for a token class. """ num = 0 def __init__(self, name, regex, next=None): self.id = Tok.num Tok.num += 1 self.name = name self.regex = regex self.next = next def literals(choices, prefix="", suffix=""): """ Create a regex from a space-separated list of literal `choices`. If provided, `prefix` and `suffix` will be attached to each choice individually. """ return "|".join(prefix + re.escape(c) + suffix for c in choices.split()) class Lexer(object): """ A generic multi-state regex-based lexer. """ def __init__(self, states, first): self.regexes = {} self.toks = {} for state, rules in states.items(): parts = [] for tok in rules: groupid = "t%d" % tok.id self.toks[groupid] = tok parts.append("(?P<%s>%s)" % (groupid, tok.regex)) self.regexes[state] = re.compile("|".join(parts), re.MULTILINE | re.VERBOSE) self.state = first def lex(self, text): """ Lexically analyze `text`. Yields pairs (`name`, `tokentext`). """ end = len(text) state = self.state regexes = self.regexes toks = self.toks start = 0 while start < end: for match in regexes[state].finditer(text, start): name = match.lastgroup tok = toks[name] toktext = match.group(name) start += len(toktext) yield (tok.name, toktext) if tok.next: state = tok.next break self.state = state class JsLexer(Lexer): """ A Javascript lexer >>> lexer = JsLexer() >>> list(lexer.lex("a = 1")) [('id', 'a'), ('ws', ' '), ('punct', '='), ('ws', ' '), ('dnum', '1')] This doesn't properly handle non-ASCII characters in the Javascript source. """ # Because these tokens are matched as alternatives in a regex, longer # possibilities must appear in the list before shorter ones, for example, # '>>' before '>'. # # Note that we don't have to detect malformed Javascript, only properly # lex correct Javascript, so much of this is simplified. # Details of Javascript lexical structure are taken from # http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf # A useful explanation of automatic semicolon insertion is at # http://inimino.org/~inimino/blog/javascript_semicolons both_before = [ Tok("comment", r"/\*(.|\n)*?\*/"), Tok("linecomment", r"//.*?$"), Tok("ws", r"\s+"), Tok("keyword", literals(""" break case catch class const continue debugger default delete do else enum export extends finally for function if import in instanceof new return super switch this throw try typeof var void while with """, suffix=r"\b"), next='reg'), Tok("reserved", literals("null true false", suffix=r"\b"), next='div'), Tok("id", r""" ([a-zA-Z_$ ]|\\u[0-9a-fA-Z]{4}) # first char ([a-zA-Z_$0-9]|\\u[0-9a-fA-F]{4})* # rest chars """, next='div'), Tok("hnum", r"0[xX][0-9a-fA-F]+", next='div'), Tok("onum", r"0[0-7]+"), Tok("dnum", r""" ( (0|[1-9][0-9]*) # DecimalIntegerLiteral \. # dot [0-9]* # DecimalDigits-opt ([eE][-+]?[0-9]+)? # ExponentPart-opt | \. # dot [0-9]+ # DecimalDigits ([eE][-+]?[0-9]+)? # ExponentPart-opt | (0|[1-9][0-9]*) # DecimalIntegerLiteral ([eE][-+]?[0-9]+)? # ExponentPart-opt ) """, next='div'), Tok("punct", literals(""" >>>= === !== >>> <<= >>= <= >= == != << >> && || += -= *= %= &= |= ^= """), next="reg"), Tok("punct", literals("++ -- ) ]"), next='div'), Tok("punct", literals("{ } ( [ . ; , < > + - * % & | ^ ! ~ ? : ="), next='reg'), Tok("string", r'"([^"\\]|(\\(.|\n)))*?"', next='div'), Tok("string", r"'([^'\\]|(\\(.|\n)))*?'", next='div'), ] both_after = [ Tok("other", r"."), ] states = { # slash will mean division 'div': both_before + [ Tok("punct", literals("/= /"), next='reg'), ] + both_after, # slash will mean regex 'reg': both_before + [ Tok("regex", r""" / # opening slash # First character is.. ( [^*\\/[] # anything but * \ / or [ | \\. # or an escape sequence | \[ # or a class, which has ( [^\]\\] # anything but \ or ] | \\. # or an escape sequence )* # many times \] ) # Following characters are same, except for excluding a star ( [^\\/[] # anything but \ / or [ | \\. # or an escape sequence | \[ # or a class, which has ( [^\]\\] # anything but \ or ] | \\. # or an escape sequence )* # many times \] )* # many times / # closing slash [a-zA-Z0-9]* # trailing flags """, next='div'), ] + both_after, } def __init__(self): super(JsLexer, self).__init__(self.states, 'reg') def prepare_js_for_gettext(js): """ Convert the Javascript source `js` into something resembling C for xgettext. What actually happens is that all the regex literals are replaced with "REGEX". """ def escape_quotes(m): """Used in a regex to properly escape double quotes.""" s = m.group(0) if s == '"': return r'\"' else: return s lexer = JsLexer() c = [] for name, tok in lexer.lex(js): if name == 'regex': # C doesn't grok regexes, and they aren't needed for gettext, # so just output a string instead. tok = '"REGEX"' elif name == 'string': # C doesn't have single-quoted strings, so make all strings # double-quoted. if tok.startswith("'"): guts = re.sub(r"\\.|.", escape_quotes, tok[1:-1]) tok = '"' + guts + '"' elif name == 'id': # C can't deal with Unicode escapes in identifiers. We don't # need them for gettext anyway, so replace them with something # innocuous tok = tok.replace("\\", "U") c.append(tok) return ''.join(c)
Danisan/odoo-1
refs/heads/8.0
addons/hw_escpos/controllers/main.py
26
# -*- coding: utf-8 -*- import commands import logging import simplejson import os import os.path import io import base64 import openerp import time import random import math import md5 import openerp.addons.hw_proxy.controllers.main as hw_proxy import pickle import re import subprocess import traceback try: from .. escpos import * from .. escpos.exceptions import * from .. escpos.printer import Usb except ImportError: escpos = printer = None from threading import Thread, Lock from Queue import Queue, Empty try: import usb.core except ImportError: usb = None from PIL import Image from openerp import http from openerp.http import request from openerp.tools.translate import _ _logger = logging.getLogger(__name__) # workaround https://bugs.launchpad.net/openobject-server/+bug/947231 # related to http://bugs.python.org/issue7980 from datetime import datetime datetime.strptime('2012-01-01', '%Y-%m-%d') class EscposDriver(Thread): def __init__(self): Thread.__init__(self) self.queue = Queue() self.lock = Lock() self.status = {'status':'connecting', 'messages':[]} def supported_devices(self): if not os.path.isfile('escpos_devices.pickle'): return supported_devices.device_list else: try: f = open('escpos_devices.pickle','r') return pickle.load(f) f.close() except Exception as e: self.set_status('error',str(e)) return supported_devices.device_list def add_supported_device(self,device_string): r = re.compile('[0-9A-Fa-f]{4}:[0-9A-Fa-f]{4}'); match = r.search(device_string) if match: match = match.group().split(':') vendor = int(match[0],16) product = int(match[1],16) name = device_string.split('ID') if len(name) >= 2: name = name[1] else: name = name[0] _logger.info('ESC/POS: adding support for device: '+match[0]+':'+match[1]+' '+name) device_list = supported_devices.device_list[:] if os.path.isfile('escpos_devices.pickle'): try: f = open('escpos_devices.pickle','r') device_list = pickle.load(f) f.close() except Exception as e: self.set_status('error',str(e)) device_list.append({ 'vendor': vendor, 'product': product, 'name': name, }) try: f = open('escpos_devices.pickle','w+') f.seek(0) pickle.dump(device_list,f) f.close() except Exception as e: self.set_status('error',str(e)) def connected_usb_devices(self): connected = [] for device in self.supported_devices(): if usb.core.find(idVendor=device['vendor'], idProduct=device['product']) != None: connected.append(device) return connected def lockedstart(self): with self.lock: if not self.isAlive(): self.daemon = True self.start() def get_escpos_printer(self): printers = self.connected_usb_devices() if len(printers) > 0: self.set_status('connected','Connected to '+printers[0]['name']) return Usb(printers[0]['vendor'], printers[0]['product']) else: self.set_status('disconnected','Printer Not Found') return None def get_status(self): self.push_task('status') return self.status def open_cashbox(self,printer): printer.cashdraw(2) printer.cashdraw(5) def set_status(self, status, message = None): _logger.info(status+' : '+ (message or 'no message')) if status == self.status['status']: if message != None and (len(self.status['messages']) == 0 or message != self.status['messages'][-1]): self.status['messages'].append(message) else: self.status['status'] = status if message: self.status['messages'] = [message] else: self.status['messages'] = [] if status == 'error' and message: _logger.error('ESC/POS Error: '+message) elif status == 'disconnected' and message: _logger.warning('ESC/POS Device Disconnected: '+message) def run(self): printer = None if not escpos: _logger.error('ESC/POS cannot initialize, please verify system dependencies.') return while True: try: error = True timestamp, task, data = self.queue.get(True) printer = self.get_escpos_printer() if printer == None: if task != 'status': self.queue.put((timestamp,task,data)) error = False time.sleep(5) continue elif task == 'receipt': if timestamp >= time.time() - 1 * 60 * 60: self.print_receipt_body(printer,data) printer.cut() elif task == 'xml_receipt': if timestamp >= time.time() - 1 * 60 * 60: printer.receipt(data) elif task == 'cashbox': if timestamp >= time.time() - 12: self.open_cashbox(printer) elif task == 'printstatus': self.print_status(printer) elif task == 'status': pass error = False except NoDeviceError as e: print "No device found %s" %str(e) except HandleDeviceError as e: print "Impossible to handle the device due to previous error %s" % str(e) except TicketNotPrinted as e: print "The ticket does not seems to have been fully printed %s" % str(e) except NoStatusError as e: print "Impossible to get the status of the printer %s" % str(e) except Exception as e: self.set_status('error', str(e)) errmsg = str(e) + '\n' + '-'*60+'\n' + traceback.format_exc() + '-'*60 + '\n' _logger.error(errmsg); finally: if error: self.queue.put((timestamp, task, data)) if printer: printer.close() def push_task(self,task, data = None): self.lockedstart() self.queue.put((time.time(),task,data)) def print_status(self,eprint): localips = ['0.0.0.0','127.0.0.1','127.0.1.1'] ips = [ c.split(':')[1].split(' ')[0] for c in commands.getoutput("/sbin/ifconfig").split('\n') if 'inet addr' in c ] ips = [ ip for ip in ips if ip not in localips ] eprint.text('\n\n') eprint.set(align='center',type='b',height=2,width=2) eprint.text('PosBox Status\n') eprint.text('\n') eprint.set(align='center') if len(ips) == 0: eprint.text('ERROR: Could not connect to LAN\n\nPlease check that the PosBox is correc-\ntly connected with a network cable,\n that the LAN is setup with DHCP, and\nthat network addresses are available') elif len(ips) == 1: eprint.text('IP Address:\n'+ips[0]+'\n') else: eprint.text('IP Addresses:\n') for ip in ips: eprint.text(ip+'\n') if len(ips) >= 1: eprint.text('\nHomepage:\nhttp://'+ips[0]+':8069\n') eprint.text('\n\n') eprint.cut() def print_receipt_body(self,eprint,receipt): def check(string): return string != True and bool(string) and string.strip() def price(amount): return ("{0:."+str(receipt['precision']['price'])+"f}").format(amount) def money(amount): return ("{0:."+str(receipt['precision']['money'])+"f}").format(amount) def quantity(amount): if math.floor(amount) != amount: return ("{0:."+str(receipt['precision']['quantity'])+"f}").format(amount) else: return str(amount) def printline(left, right='', width=40, ratio=0.5, indent=0): lwidth = int(width * ratio) rwidth = width - lwidth lwidth = lwidth - indent left = left[:lwidth] if len(left) != lwidth: left = left + ' ' * (lwidth - len(left)) right = right[-rwidth:] if len(right) != rwidth: right = ' ' * (rwidth - len(right)) + right return ' ' * indent + left + right + '\n' def print_taxes(): taxes = receipt['tax_details'] for tax in taxes: eprint.text(printline(tax['tax']['name'],price(tax['amount']), width=40,ratio=0.6)) # Receipt Header if receipt['company']['logo']: eprint.set(align='center') eprint.print_base64_image(receipt['company']['logo']) eprint.text('\n') else: eprint.set(align='center',type='b',height=2,width=2) eprint.text(receipt['company']['name'] + '\n') eprint.set(align='center',type='b') if check(receipt['company']['contact_address']): eprint.text(receipt['company']['contact_address'] + '\n') if check(receipt['company']['phone']): eprint.text('Tel:' + receipt['company']['phone'] + '\n') if check(receipt['company']['vat']): eprint.text('VAT:' + receipt['company']['vat'] + '\n') if check(receipt['company']['email']): eprint.text(receipt['company']['email'] + '\n') if check(receipt['company']['website']): eprint.text(receipt['company']['website'] + '\n') if check(receipt['header']): eprint.text(receipt['header']+'\n') if check(receipt['cashier']): eprint.text('-'*32+'\n') eprint.text('Served by '+receipt['cashier']+'\n') # Orderlines eprint.text('\n\n') eprint.set(align='center') for line in receipt['orderlines']: pricestr = price(line['price_display']) if line['discount'] == 0 and line['unit_name'] == 'Unit(s)' and line['quantity'] == 1: eprint.text(printline(line['product_name'],pricestr,ratio=0.6)) else: eprint.text(printline(line['product_name'],ratio=0.6)) if line['discount'] != 0: eprint.text(printline('Discount: '+str(line['discount'])+'%', ratio=0.6, indent=2)) if line['unit_name'] == 'Unit(s)': eprint.text( printline( quantity(line['quantity']) + ' x ' + price(line['price']), pricestr, ratio=0.6, indent=2)) else: eprint.text( printline( quantity(line['quantity']) + line['unit_name'] + ' x ' + price(line['price']), pricestr, ratio=0.6, indent=2)) # Subtotal if the taxes are not included taxincluded = True if money(receipt['subtotal']) != money(receipt['total_with_tax']): eprint.text(printline('','-------')); eprint.text(printline(_('Subtotal'),money(receipt['subtotal']),width=40, ratio=0.6)) print_taxes() #eprint.text(printline(_('Taxes'),money(receipt['total_tax']),width=40, ratio=0.6)) taxincluded = False # Total eprint.text(printline('','-------')); eprint.set(align='center',height=2) eprint.text(printline(_(' TOTAL'),money(receipt['total_with_tax']),width=40, ratio=0.6)) eprint.text('\n\n'); # Paymentlines eprint.set(align='center') for line in receipt['paymentlines']: eprint.text(printline(line['journal'], money(line['amount']), ratio=0.6)) eprint.text('\n'); eprint.set(align='center',height=2) eprint.text(printline(_(' CHANGE'),money(receipt['change']),width=40, ratio=0.6)) eprint.set(align='center') eprint.text('\n'); # Extra Payment info if receipt['total_discount'] != 0: eprint.text(printline(_('Discounts'),money(receipt['total_discount']),width=40, ratio=0.6)) if taxincluded: print_taxes() #eprint.text(printline(_('Taxes'),money(receipt['total_tax']),width=40, ratio=0.6)) # Footer if check(receipt['footer']): eprint.text('\n'+receipt['footer']+'\n\n') eprint.text(receipt['name']+'\n') eprint.text( str(receipt['date']['date']).zfill(2) +'/'+ str(receipt['date']['month']+1).zfill(2) +'/'+ str(receipt['date']['year']).zfill(4) +' '+ str(receipt['date']['hour']).zfill(2) +':'+ str(receipt['date']['minute']).zfill(2) ) driver = EscposDriver() driver.push_task('printstatus') hw_proxy.drivers['escpos'] = driver class EscposProxy(hw_proxy.Proxy): @http.route('/hw_proxy/open_cashbox', type='json', auth='none', cors='*') def open_cashbox(self): _logger.info('ESC/POS: OPEN CASHBOX') driver.push_task('cashbox') @http.route('/hw_proxy/print_receipt', type='json', auth='none', cors='*') def print_receipt(self, receipt): _logger.info('ESC/POS: PRINT RECEIPT') driver.push_task('receipt',receipt) @http.route('/hw_proxy/print_xml_receipt', type='json', auth='none', cors='*') def print_xml_receipt(self, receipt): _logger.info('ESC/POS: PRINT XML RECEIPT') driver.push_task('xml_receipt',receipt) @http.route('/hw_proxy/escpos/add_supported_device', type='http', auth='none', cors='*') def add_supported_device(self, device_string): _logger.info('ESC/POS: ADDED NEW DEVICE:'+device_string) driver.add_supported_device(device_string) return "The device:\n"+device_string+"\n has been added to the list of supported devices.<br/><a href='/hw_proxy/status'>Ok</a>" @http.route('/hw_proxy/escpos/reset_supported_devices', type='http', auth='none', cors='*') def reset_supported_devices(self): try: os.remove('escpos_devices.pickle') except Exception as e: pass return 'The list of supported devices has been reset to factory defaults.<br/><a href="/hw_proxy/status">Ok</a>'
mtrbean/scipy
refs/heads/master
scipy/io/harwell_boeing/_fortran_format_parser.py
127
""" Preliminary module to handle fortran formats for IO. Does not use this outside scipy.sparse io for now, until the API is deemed reasonable. The *Format classes handle conversion between fortran and python format, and FortranFormatParser can create *Format instances from raw fortran format strings (e.g. '(3I4)', '(10I3)', etc...) """ from __future__ import division, print_function, absolute_import import re import warnings import numpy as np __all__ = ["BadFortranFormat", "FortranFormatParser", "IntFormat", "ExpFormat"] TOKENS = { "LPAR": r"\(", "RPAR": r"\)", "INT_ID": r"I", "EXP_ID": r"E", "INT": r"\d+", "DOT": r"\.", } class BadFortranFormat(SyntaxError): pass def number_digits(n): return int(np.floor(np.log10(np.abs(n))) + 1) class IntFormat(object): @classmethod def from_number(cls, n, min=None): """Given an integer, returns a "reasonable" IntFormat instance to represent any number between 0 and n if n > 0, -n and n if n < 0 Parameters ---------- n : int max number one wants to be able to represent min : int minimum number of characters to use for the format Returns ------- res : IntFormat IntFormat instance with reasonable (see Notes) computed width Notes ----- Reasonable should be understood as the minimal string length necessary without losing precision. For example, IntFormat.from_number(1) will return an IntFormat instance of width 2, so that any 0 and 1 may be represented as 1-character strings without loss of information. """ width = number_digits(n) + 1 if n < 0: width += 1 repeat = 80 // width return cls(width, min, repeat=repeat) def __init__(self, width, min=None, repeat=None): self.width = width self.repeat = repeat self.min = min def __repr__(self): r = "IntFormat(" if self.repeat: r += "%d" % self.repeat r += "I%d" % self.width if self.min: r += ".%d" % self.min return r + ")" @property def fortran_format(self): r = "(" if self.repeat: r += "%d" % self.repeat r += "I%d" % self.width if self.min: r += ".%d" % self.min return r + ")" @property def python_format(self): return "%" + str(self.width) + "d" class ExpFormat(object): @classmethod def from_number(cls, n, min=None): """Given a float number, returns a "reasonable" ExpFormat instance to represent any number between -n and n. Parameters ---------- n : float max number one wants to be able to represent min : int minimum number of characters to use for the format Returns ------- res : ExpFormat ExpFormat instance with reasonable (see Notes) computed width Notes ----- Reasonable should be understood as the minimal string length necessary to avoid losing precision. """ # len of one number in exp format: sign + 1|0 + "." + # number of digit for fractional part + 'E' + sign of exponent + # len of exponent finfo = np.finfo(n.dtype) # Number of digits for fractional part n_prec = finfo.precision + 1 # Number of digits for exponential part n_exp = number_digits(np.max(np.abs([finfo.maxexp, finfo.minexp]))) width = 1 + 1 + n_prec + 1 + n_exp + 1 if n < 0: width += 1 repeat = int(np.floor(80 / width)) return cls(width, n_prec, min, repeat=repeat) def __init__(self, width, significand, min=None, repeat=None): """\ Parameters ---------- width : int number of characters taken by the string (includes space). """ self.width = width self.significand = significand self.repeat = repeat self.min = min def __repr__(self): r = "ExpFormat(" if self.repeat: r += "%d" % self.repeat r += "E%d.%d" % (self.width, self.significand) if self.min: r += "E%d" % self.min return r + ")" @property def fortran_format(self): r = "(" if self.repeat: r += "%d" % self.repeat r += "E%d.%d" % (self.width, self.significand) if self.min: r += "E%d" % self.min return r + ")" @property def python_format(self): return "%" + str(self.width-1) + "." + str(self.significand) + "E" class Token(object): def __init__(self, type, value, pos): self.type = type self.value = value self.pos = pos def __str__(self): return """Token('%s', "%s")""" % (self.type, self.value) def __repr__(self): return self.__str__() class Tokenizer(object): def __init__(self): self.tokens = list(TOKENS.keys()) self.res = [re.compile(TOKENS[i]) for i in self.tokens] def input(self, s): self.data = s self.curpos = 0 self.len = len(s) def next_token(self): curpos = self.curpos tokens = self.tokens while curpos < self.len: for i, r in enumerate(self.res): m = r.match(self.data, curpos) if m is None: continue else: self.curpos = m.end() return Token(self.tokens[i], m.group(), self.curpos) else: raise SyntaxError("Unknown character at position %d (%s)" % (self.curpos, self.data[curpos])) # Grammar for fortran format: # format : LPAR format_string RPAR # format_string : repeated | simple # repeated : repeat simple # simple : int_fmt | exp_fmt # int_fmt : INT_ID width # exp_fmt : simple_exp_fmt # simple_exp_fmt : EXP_ID width DOT significand # extended_exp_fmt : EXP_ID width DOT significand EXP_ID ndigits # repeat : INT # width : INT # significand : INT # ndigits : INT # Naive fortran formatter - parser is hand-made class FortranFormatParser(object): """Parser for fortran format strings. The parse method returns a *Format instance. Notes ----- Only ExpFormat (exponential format for floating values) and IntFormat (integer format) for now. """ def __init__(self): self.tokenizer = Tokenizer() def parse(self, s): self.tokenizer.input(s) tokens = [] try: while True: t = self.tokenizer.next_token() if t is None: break else: tokens.append(t) return self._parse_format(tokens) except SyntaxError as e: raise BadFortranFormat(str(e)) def _get_min(self, tokens): next = tokens.pop(0) if not next.type == "DOT": raise SyntaxError() next = tokens.pop(0) return next.value def _expect(self, token, tp): if not token.type == tp: raise SyntaxError() def _parse_format(self, tokens): if not tokens[0].type == "LPAR": raise SyntaxError("Expected left parenthesis at position " "%d (got '%s')" % (0, tokens[0].value)) elif not tokens[-1].type == "RPAR": raise SyntaxError("Expected right parenthesis at position " "%d (got '%s')" % (len(tokens), tokens[-1].value)) tokens = tokens[1:-1] types = [t.type for t in tokens] if types[0] == "INT": repeat = int(tokens.pop(0).value) else: repeat = None next = tokens.pop(0) if next.type == "INT_ID": next = self._next(tokens, "INT") width = int(next.value) if tokens: min = int(self._get_min(tokens)) else: min = None return IntFormat(width, min, repeat) elif next.type == "EXP_ID": next = self._next(tokens, "INT") width = int(next.value) next = self._next(tokens, "DOT") next = self._next(tokens, "INT") significand = int(next.value) if tokens: next = self._next(tokens, "EXP_ID") next = self._next(tokens, "INT") min = int(next.value) else: min = None return ExpFormat(width, significand, min, repeat) else: raise SyntaxError("Invalid formater type %s" % next.value) def _next(self, tokens, tp): if not len(tokens) > 0: raise SyntaxError() next = tokens.pop(0) self._expect(next, tp) return next
wylliam-silva/leiteilustrador
refs/heads/master
node_modules/node-gyp/gyp/tools/graphviz.py
2679
#!/usr/bin/env python # Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Using the JSON dumped by the dump-dependency-json generator, generate input suitable for graphviz to render a dependency graph of targets.""" import collections import json import sys def ParseTarget(target): target, _, suffix = target.partition('#') filename, _, target = target.partition(':') return filename, target, suffix def LoadEdges(filename, targets): """Load the edges map from the dump file, and filter it to only show targets in |targets| and their depedendents.""" file = open('dump.json') edges = json.load(file) file.close() # Copy out only the edges we're interested in from the full edge list. target_edges = {} to_visit = targets[:] while to_visit: src = to_visit.pop() if src in target_edges: continue target_edges[src] = edges[src] to_visit.extend(edges[src]) return target_edges def WriteGraph(edges): """Print a graphviz graph to stdout. |edges| is a map of target to a list of other targets it depends on.""" # Bucket targets by file. files = collections.defaultdict(list) for src, dst in edges.items(): build_file, target_name, toolset = ParseTarget(src) files[build_file].append(src) print 'digraph D {' print ' fontsize=8' # Used by subgraphs. print ' node [fontsize=8]' # Output nodes by file. We must first write out each node within # its file grouping before writing out any edges that may refer # to those nodes. for filename, targets in files.items(): if len(targets) == 1: # If there's only one node for this file, simplify # the display by making it a box without an internal node. target = targets[0] build_file, target_name, toolset = ParseTarget(target) print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, target_name) else: # Group multiple nodes together in a subgraph. print ' subgraph "cluster_%s" {' % filename print ' label = "%s"' % filename for target in targets: build_file, target_name, toolset = ParseTarget(target) print ' "%s" [label="%s"]' % (target, target_name) print ' }' # Now that we've placed all the nodes within subgraphs, output all # the edges between nodes. for src, dsts in edges.items(): for dst in dsts: print ' "%s" -> "%s"' % (src, dst) print '}' def main(): if len(sys.argv) < 2: print >>sys.stderr, __doc__ print >>sys.stderr print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) return 1 edges = LoadEdges('dump.json', sys.argv[1:]) WriteGraph(edges) return 0 if __name__ == '__main__': sys.exit(main())
0branch/punkmoney
refs/heads/master
tracker/utils/parser.py
1
# -*- coding: utf-8 -*- """ PunkMoney 0.2 :: parser.py Main class for interpreting #PunkMoney statements. """ from mysql import Connection from harvester import Harvester from config import HASHTAG, ALT_HASHTAG, SETTINGS from objects import Event import re from datetime import datetime from dateutil.relativedelta import * import time ''' Parser class ''' class Parser(Harvester): def __init__(self): self.setupLogging() self.connectDB() self.TW = self.connectTwitter() ''' Parse new tweets ''' def parseNew(self): # Process new tweets for tweet in self.getTweets(): try: # If tweet contains 'RT' (e.g. is a retweet), skip retweet = False for w in tweet['content'].split(): if w == 'RT': self.setParsed(tweet['tweet_id']) raise Exception("Tweet is a retweet") # Save tweet author to user database self.saveUser(tweet['author']) # Determine tweet type promise = re.search('promise', tweet['content'], re.IGNORECASE) transfer = re.search('transfer @(\w+)(.*)', tweet['content'], re.IGNORECASE) thanks = re.search('@(\w+) thanks (for)?(.*)', tweet['content'], re.IGNORECASE) offer = re.search('(i )?(offer[s]?) (.*)', tweet['content'], re.IGNORECASE) need = re.search('(i )?(need[s]?) (.*)', tweet['content'], re.IGNORECASE) close = re.match('@(\w+ )?close (.*)', tweet['content'], re.IGNORECASE) request = re.search('@(\w+ )(i )?request(.*)', tweet['content'], re.IGNORECASE) # strip urls from text r = re.search("(.*)(?P<url>https?://[^\s]+)(.*)", tweet['content'], re.IGNORECASE) if r: tweet['content'] = r.group(1) + ' ' + r.group(3) # Parse tweet according to type if promise: self.parsePromise(tweet) elif transfer: tweet['to_user'] = transfer.group(1).lower() self.parseTransfer(tweet) elif thanks: tweet['recipient'] = thanks.group(1) if thanks.group(2): if thanks.group(2).lower() == 'for': tweet['message'] = thanks.group(3) self.parseThanks(tweet) elif offer: self.parseOffer(tweet) elif need: self.parseNeed(tweet) elif close: self.parseClose(tweet) elif request: tweet['recipient'] = request.group(1) tweet['message'] = request.group(3) self.parseRequest(tweet) else: self.setParsed(tweet['tweet_id'], '-') raise Exception("Tweet was not recognised") except Exception, e: self.logWarning("Parsing tweet %s failed: %s" % (tweet['tweet_id'], e)) self.setParsed(tweet['tweet_id'], '-') continue ''' Parser functions ''' # parsePromise # parses and saves a new promise def parsePromise(self, tweet): try: # Tweet flag default true tweet_errors = True # Strip out hashtag h = re.search('(.*)(%s|%s)(.*)' % (HASHTAG, ALT_HASHTAG), tweet['content'], re.IGNORECASE) if h: statement = h.group(1) + h.group(3) else: raise Exception("Hashtag not found") # Get recipient r = re.search('(.*)@(\w+)(.*)', statement) if r: tweet['recipient'] = r.group(2) statement = r.group(1).strip() + r.group(3) self.saveUser(tweet['recipient'], intro=True) else: # (Don't tweet this as an error) tweet_errors = False; raise Exception("Recipient not found") # Check not to self if tweet['recipient'] == tweet['author']: raise Exception("Issuer and recipient are the same") # Check Transferability (optional) t = re.match('(.*)( NT )(.*)', statement, re.IGNORECASE) if t: tweet['transferable'] = False statement = t.group(1) + t.group(3) else: tweet['transferable'] = True # Check expiry (optional) ''' 'Expires in' syntax ''' e = re.match('(.*) Expires in (\d+) (\w+)(.*)', statement, re.IGNORECASE) if e: num = e.group(2) unit = e.group(3) tweet['expiry'] = self.getExpiry(tweet['created'], num, unit) statement = e.group(1) + e.group(4) else: tweet['expiry'] = None # Get condition c = re.match('(.*)( if )(.*)', statement, re.IGNORECASE) if c: tweet['condition'] = c.group(3) else: tweet['condition'] = None # Get promise p = re.match('(.*)(promise)(.*)', statement, re.IGNORECASE) if p: if p.group(1).strip().lower() == 'i': promise = p.group(3) else: promise = p.group(1).strip() + p.group(3) else: raise Exception("Promise not found") # Clean up promise ''' Remove trailing white space, full stop and word 'you' (if found) ''' promise = promise.strip() while promise[-1] == '.': promise = promise[:-1] if promise[0:4] == 'you ': promise = promise[4:] tweet['promise'] = promise # Processing promise self.setParsed(tweet['tweet_id']) self.createNote(tweet) E = Event(tweet['tweet_id'], tweet['tweet_id'], 0, tweet['created'], tweet['author'], tweet['recipient']) E.save() self.sendTweet('@%s promised @%s %s http://www.punkmoney.org/note/%s' % (tweet['author'], tweet['recipient'], promise, tweet['tweet_id'])) self.logInfo('[P] @%s promised @%s %s.' % (tweet['author'], tweet['recipient'], tweet['tweet_id'])) except Exception, e: self.logWarning("Processing promise %s failed: %s" % (tweet['tweet_id'], e)) if tweet_errors is not False: self.sendTweet('@%s Sorry, your promise [%s] didn\'t parse. Try again: http://www.punkmoney.org/print/' % (tweet['author'], tweet['tweet_id'])) self.setParsed(tweet['tweet_id'], '-') # parseTransfer # parse and save a transfer def parseTransfer(self, tweet): try: self.logInfo("Parsing tweet %s [transfer]" % tweet['tweet_id']) # Get issuer and recipient from_user = tweet['author'] to_user = tweet['to_user'] # Create user self.saveUser(to_user) # If issuer and recipient are the same, skip if from_user == to_user: raise Exception("Issuer and recipient are the same") # Find original tweet this is a reply to original_id = self.findOriginal(tweet['reply_to_id'], tweet['tweet_id']) # Check note exists if original_id is None: raise Exception("Original note could not be found") # Get original note note = self.getNote(original_id) # Check transferer is current bearer if from_user != note['bearer']: raise Exception("User %s is not the current note bearer" % from_user) # Check note is open (i.e. not expired or redeemed) if note['status'] != 0: if note['status'] == 1: raise Exception("Note has already been redeemed") if note['status'] == 2: raise Exception("Note has expired") # Check note is transferable if note['transferable'] != 1: raise Exception("Note is non-transferable") # Check recipient is trusted [Disabled] ''' if self.checkTrusted(note['issuer'], to_user) is False: raise Exception("Transferee not trusted by issuer") ''' self.saveUser(to_user, intro=True) # Process transfer self.setParsed(tweet['tweet_id']) self.updateNote(note['id'], 'bearer', to_user) # Create event E = Event(note['id'], tweet['tweet_id'], 3, tweet['created'], from_user, to_user) E.save() # Log transfer self.logInfo('[Tr] @%s transferred %s to @%s' % (tweet['author'], note['id'], to_user)) self.setParsed(tweet['tweet_id']) except Exception, e: self.setParsed(tweet['tweet_id'], '-') self.logWarning("Processing transfer %s failed: %s" % (tweet['tweet_id'], e)) # parseThanks # parse and save thanks def parseThanks(self, tweet): try: self.logInfo("Parsing tweet %s [thanks]" % tweet['tweet_id']) from_user = tweet['author'] # If tweet has no reply to id if tweet['reply_to_id'] is None: h = re.search('(.*)(%s|%s)(.*)' % (HASHTAG, ALT_HASHTAG), tweet['message'], re.IGNORECASE) if h: tweet['message'] = h.group(1).strip() + h.group(3).strip() self.createThanks(tweet) tweet['message'] = 'for ' + tweet['message'] self.saveUser(tweet['recipient'], intro=True) # Log thanks message = '[Thanks] @%s thanked @%s %s' % (tweet['author'], tweet['recipient'], tweet['message']) self.logInfo(message) self.sendTweet('@%s thanked @%s %s http://www.punkmoney.org/note/%s' % (tweet['author'], tweet['recipient'], tweet['message'], tweet['tweet_id'])) self.setParsed(tweet['tweet_id']) # If tweet has a reply_to_id, parse as redemption else: original_id = self.findOriginal(tweet['reply_to_id'], tweet['tweet_id']) note = self.getNote(original_id) to_user = note['issuer'] # Check original exists if note is False: raise Exception("Original note not found") # Check tweet author is current bearer if note['bearer'] != from_user: raise Exception("User is not the current note bearer") # Check note is open (i.e. not expired or redeemed) if note['status'] != 0: if note['status'] == 1: raise Exception("Note has already been redeemed") if note['status'] == 2: raise Exception("Note has expired") message = note['promise'] # Process thanks self.updateNote(note['id'], 'status', 1) E = Event(note['id'], tweet['tweet_id'], 1, tweet['created'], from_user, to_user) E.save() # Log thanks self.logInfo('[T] @%s thanked @%s for %s' % (to_user, from_user, message)) # Tweet event self.sendTweet('@%s thanked @%s for %s http://www.punkmoney.org/note/%s' % (from_user, to_user, note['promise'], note['id'])) self.setParsed(tweet['tweet_id']) except Exception, e: self.logWarning("Processing thanks %s failed: %s" % (tweet['tweet_id'], e)) self.setParsed(tweet['tweet_id'], '-') # parseOffer # parse and save offers def parseOffer(self, tweet): try: # Strip out hashtag h = re.search('(.*)(%s|%s)(.*)' % (HASHTAG, ALT_HASHTAG), tweet['content'], re.IGNORECASE) if h: statement = h.group(1) + h.group(3) else: raise Exception("Hashtag not found") # Check expiry (optional) ''' 'Expires in' syntax ''' e = re.search('(.*) Expires in (\d+) (\w+)(.*)', statement, re.IGNORECASE) if e: num = e.group(2) unit = e.group(3) tweet['expiry'] = self.getExpiry(tweet['created'], num, unit) statement = e.group(1) + e.group(4) else: tweet['expiry'] = None # Get thing offered/needed p = re.match('(.*)(offer[s]?)(.*)', statement, re.IGNORECASE) if p: if p.group(1).strip().lower() == 'i': item = p.group(3) else: item = p.group(1).strip() + p.group(3) else: raise Exception("Item not found") # Get condition c = re.match('(.*)( if )(.*)', item, re.IGNORECASE) if c: tweet['condition'] = c.group(3) else: tweet['condition'] = None # Clean up promise ''' Remove trailing white space, full stop and word 'you' (if found) ''' item = item.strip() while item[-1] == '.': item = item[:-1] tweet['item'] = item self.createOffer(4, tweet) # Create event E = Event(tweet['tweet_id'], '0', 4, tweet['created'], tweet['author'], '') E.save() # Log event self.logInfo('[O] @%s offers %s.' % (tweet['author'], tweet['tweet_id'])) # Tweet event self.sendTweet('[O] @%s offers %s http://www.punkmoney.org/note/%s' % (tweet['author'], item, tweet['tweet_id'])) self.setParsed(tweet['tweet_id']) except Exception, e: self.logWarning("Processing %s failed: %s" % (tweet['tweet_id'], e)) self.sendTweet('@%s Sorry, your offer [%s] didn\'t parse. Try again: http://www.punkmoney.org/print/' % (tweet['author'], tweet['tweet_id'])) self.setParsed(tweet['tweet_id'], '-') # parseNeed # parse and save offer def parseNeed(self, tweet): try: # Strip out hashtag h = re.search('(.*)(%s|%s)(.*)' % (HASHTAG, ALT_HASHTAG), tweet['content'], re.IGNORECASE) if h: statement = h.group(1) + h.group(3) else: raise Exception("Hashtag not found") # Check expiry (optional) ''' 'Expires in' syntax ''' e = re.search('(.*) Expires in (\d+) (\w+)(.*)', statement, re.IGNORECASE) if e: num = e.group(2) unit = e.group(3) tweet['expiry'] = self.getExpiry(tweet['created'], num, unit) statement = e.group(1) + e.group(4) else: tweet['expiry'] = None # Get thing offered/needed p = re.match('(.*)(need[s]?)(.*)', statement, re.IGNORECASE) if p: if p.group(1).strip().lower() == 'i': item = p.group(3) else: item = p.group(1).strip() + p.group(3) else: raise Exception("Item not found") # Get condition c = re.match('(.*)( if )(.*)', item, re.IGNORECASE) if c: tweet['condition'] = c.group(3) else: tweet['condition'] = None # Clean up promise ''' Remove trailing white space, full stop and word 'you' (if found) ''' item = item.strip() while item[-1] == '.': item = item[:-1] tweet['item'] = item self.createOffer(5, tweet) # Create event E = Event(tweet['tweet_id'], '0', 5, tweet['created'], tweet['author'], '') E.save() # Log event self.logInfo('[N] @%s needs %s.' % (tweet['author'], tweet['tweet_id'])) # Tweet self.sendTweet('[N] @%s needs %s http://www.punkmoney.org/note/%s' % (tweet['author'], item, tweet['tweet_id'])) self.setParsed(tweet['tweet_id']) except Exception, e: self.logWarning("Processing %s failed: %s" % (tweet['tweet_id'], e)) self.sendTweet('@%s Sorry, your need [%s] didn\'t parse. Try again: http://www.punkmoney.org/print/' % (tweet['author'], tweet['tweet_id'])) self.setParsed(tweet['tweet_id'], '-') # parseClose # parse and process a close statement def parseClose(self, tweet): try: # Check if this is a close instruction c = re.match('(.*)(close)(.*)', tweet['content']) # check tweet has a reply_to_id if c: if tweet.get('reply_to_id', False) is not False: original_id = self.findOriginal(tweet['reply_to_id'], tweet['tweet_id']) note = self.getNote(original_id) if tweet['author'].lower() != note['issuer']: raise Exception("Close attempt by non-issuer") if note['status'] != 0: raise Exception("Note already closed") self.updateNote(note['id'], 'status', 1) if note['type'] == 4: code = 6 elif note['type'] == 5: code = 7 elif note['type'] == 10: code = 11 # Create event E = Event(note['id'], tweet['tweet_id'], code, tweet['created'], tweet['author'], '') E.save() # Log event self.logInfo("[X] '%s' closed note %s" % (tweet['author'], note['id'])) self.setParsed(tweet['tweet_id']) elif tweet.get('reply_to_id', False) is False: self.setParsed(tweet['tweet_id'], '-') raise Exception("Close failed: original not found") except Exception, e: self.logWarning("Processing %s failed: %s" % (tweet['tweet_id'], e)) self.setParsed(tweet['tweet_id'], '-') # parseRequest # parse and process a request statement def parseRequest(self, tweet): try: h = re.search('(.*)(%s|%s)(.*)' % (HASHTAG, ALT_HASHTAG), tweet['message'], re.IGNORECASE) if h: tweet['message'] = h.group(1).strip() + h.group(3).strip() # Check not to self if tweet['recipient'].lower() == tweet['author'].lower(): raise Exception("Issuer and recipient are the same") # Check expiry (optional) ''' 'Expires in' syntax ''' e = re.search('(.*) Expires in (\d+) (\w+)(.*)', tweet['message'], re.IGNORECASE) if e: num = e.group(2) unit = e.group(3) tweet['expiry'] = self.getExpiry(tweet['created'], num, unit) tweet['message'] = e.group(1) + e.group(4) else: tweet['expiry'] = None # Clean up tweet['message'] = tweet['message'].strip() while tweet['message'][-1] == '.': tweet['message'] = tweet['message'][:-1] # Send intro tweet self.saveUser(tweet['recipient'].lower(), intro=True) # Create a request self.createRequest(tweet) # Log self.logInfo('[R] @%s requested %s from @%s' % (tweet['author'], tweet['message'], tweet['recipient'])) self.sendTweet('[R] @%s requested %s from @%s http://www.punkmoney.org/note/%s' % (tweet['author'], tweet['message'], tweet['recipient'], tweet['tweet_id'])) self.setParsed(tweet['tweet_id']) except Exception, e: self.logWarning("Parsing request %s failed: %s" % (tweet['tweet_id'], e)) self.setParsed(tweet['tweet_id'], '-') ''' Helper functions ''' # getTweets # Get unparsed tweets from database def getTweets(self): try: tweets = [] for tweet in self.getRows("SELECT timestamp, tweet_id, author, content, reply_to_id FROM tracker_tweets WHERE parsed is Null ORDER BY timestamp ASC"): tweet = { 'created' : tweet[0], 'tweet_id' : tweet[1], 'author' : tweet[2], 'content' : tweet[3], 'reply_to_id' : tweet[4] } tweets.append(tweet) except Exception, e: raise Exception("Getting tweets from database failed: %s" % e) return tweets else: return tweets # setParsed # Set parsed flag to 1 if successful, or '-' if not def setParsed(self, tweet_id, val=1): try: query = "UPDATE tracker_tweets SET parsed = %s WHERE tweet_id = %s" self.queryDB(query, (val, tweet_id)) except Exception, e: raise Exception("Setting parsed flag for tweet %s failed: %s" % (tweet_id, e)) else: return True # getExpiry # Takes created date time, a number and unit (day, week, month, year,) & returns expiry datetime def getExpiry(self, created, num, unit): try: num = int(num) if unit == 'minutes' or unit == 'minute': expiry = created + relativedelta(minutes=+num) if unit == 'hours' or unit == 'hour': expiry = created + relativedelta(hours=+num) if unit == 'days' or unit == 'day': expiry = created + relativedelta(days=+num) if unit == 'weeks' or unit == 'week': expiry = created + relativedelta(days=+7*num) if unit == 'months' or unit == 'month': expiry = created + relativedelta(months=+num) if unit == 'years' or unit == 'year': expiry = created + relativedelta(years=+num) except Exception, e: raise Exception("Calculating expiry date failed: %s" % e) else: return expiry # findOriginal # Given a reply_to_id, finds the original def findOriginal(self, reply_to_id, tweet_id): tweet = {} def getLast(reply_to_id): query = "SELECT created, id, issuer, promise, type FROM tracker_notes WHERE id = %s" % reply_to_id tweet = self.getRows(query)[0] return tweet try: note_types = [0, 4, 5, 10] print getLast(reply_to_id)[4] while int(getLast(reply_to_id)[4]) not in note_types: reply_to_id = getLast(reply_to_id)[4] else: tweet = getLast(reply_to_id) except Exception, e: raise Exception("Original note for id %s not found" % tweet_id) else: return tweet[1] # createNote # Create a new note from a parsed tweet def createNote(self, tweet): try: query = "SELECT id FROM tracker_notes WHERE id = '%s'" % tweet['tweet_id'] if self.getSingleValue(query) is None: query = "INSERT INTO tracker_notes(id, issuer, bearer, promise, created, expiry, status, transferable, type, conditional) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" params = (tweet['tweet_id'], tweet['author'].lower(), tweet['recipient'].lower(), tweet['promise'], tweet['created'], tweet['expiry'], 0, tweet['transferable'], 0, tweet['condition']) self.queryDB(query, params) else: self.logWarning('Note %s already exists' % tweet['tweet_id']) return False except Exception, e: raise Exception("Creating note from tweet %s failed: %s" % (tweet['tweet_id'], e)) else: return True # createOffer # Create an offer or need from parsed tweet def createOffer(self, code, tweet): try: query = "SELECT id FROM tracker_notes WHERE id = '%s'" % tweet['tweet_id'] if self.getSingleValue(query) is None: query = "INSERT INTO tracker_notes(id, issuer, bearer, promise, created, expiry, status, transferable, type, conditional) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" params = (tweet['tweet_id'], tweet['author'].lower(), '', tweet['item'].lower(), tweet['created'], tweet['expiry'], 0, 0, code, tweet['condition']) self.queryDB(query, params) else: self.logWarning('Note %s already exists' % tweet['tweet_id']) return False except Exception, e: raise Exception("Creating note from tweet %s failed: %s" % (tweet['tweet_id'], e)) else: return True # createThanks # Create a thanks note def createThanks(self, tweet): try: query = "SELECT id FROM tracker_notes WHERE id = '%s'" % tweet['tweet_id'] if self.getSingleValue(query) is None: query = "INSERT INTO tracker_notes(id, issuer, bearer, promise, created, expiry, status, transferable, type) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)" params = (tweet['tweet_id'], tweet['author'].lower(), tweet['recipient'].lower(), tweet['message'], tweet['created'], None, 0, 0, 1) self.queryDB(query, params) # Create an event E = Event(tweet['tweet_id'],1,1,tweet['created'],tweet['author'], tweet['recipient']) E.save() else: self.logWarning('Note %s already exists' % tweet['tweet_id']) return False except Exception, e: raise Exception("Creating thanks note from tweet %s failed: %s" % (tweet['tweet_id'], e)) else: return True # createRequest # Create a request note def createRequest(self, tweet): try: query = "SELECT id FROM tracker_notes WHERE id = '%s'" % tweet['tweet_id'] if self.getSingleValue(query) is None: query = "INSERT INTO tracker_notes(id, issuer, bearer, promise, created, expiry, status, transferable, type) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)" params = (tweet['tweet_id'], tweet['author'].lower(), tweet['recipient'].lower(), tweet['message'], tweet['created'], tweet['expiry'], 0, 0, 10) self.queryDB(query, params) # Create an event E = Event(tweet['tweet_id'],10,10,tweet['created'], tweet['author'], tweet['recipient']) E.save() else: self.logWarning('Note %s already exists' % tweet['tweet_id']) return False except Exception, e: raise Exception("Creating thanks note from tweet %s failed: %s" % (tweet['tweet_id'], e)) else: return True # getNote # Return a note given its id def getNote(self, note_id): try: query = "SELECT id, issuer, bearer, promise, created, expiry, status, transferable, type FROM tracker_notes WHERE id = %s" % note_id note = self.getRows(query)[0] note = {'id' : note[0], 'issuer' : note[1], 'bearer' : note[2], 'promise' : note[3], 'created' : note[4], 'expiry' : note[5], 'status' : note[6], 'transferable' : note[7], 'type' : note[8]} except Exception, e: raise Exception("Original note %s not found" % (note_id, e)) return False else: return note # updateNote # Update a note def updateNote(self, note_id, field, value): try: query = "UPDATE tracker_notes SET %s = %s where id = %s" % (field, '%s', '%s') params = (value, note_id) self.queryDB(query, params) except Exception, e: raise Exception("Updating note %s failed: %s" % (note_id, e)) else: return True # saveUser # Check user exists def saveUser(self, username, intro = False): username = username.lower() try: query = "SELECT id FROM tracker_users WHERE username = '%s'" % username.lower() r = self.getSingleValue(query) except Exception, e: self.logError("Checking user exists failed: %s" % e) return False try: if r: return True else: self.logInfo("Saving new user %s" % username) query = "INSERT INTO tracker_users (username) VALUES (%s)" params = (username.lower()) self.queryDB(query, params) # Send intro tweet if intro == True: try: message = '@' + username + ' Hi there. Someone just sent you #PunkMoney. Here\'s how to get started: http://is.gd/bezeyu' self.sendTweet(message) except: pass # Follow user try: if self.TW.exists_friendship('punk_money', username) is False: self.TW.create_friendship(username) except: pass except Exception, e: raise Exception("Creating new user failed: %s" % e) else: return True # updateExpired # Update status of any expired notes def updateExpired(self): try: self.logInfo("Checking for expirations") query = "SELECT id, bearer, issuer, type FROM tracker_notes WHERE expiry < now() AND status = 0" for note in self.getRows(query): self.logInfo('Note %s expired' % note[0]) self.updateNote(note[0], 'status', 2) # promise if note[3] == 0: code = 2 # offer elif note[3] == 4: code = 8 # need elif note[3] == 5: code = 9 # request elif note[3] == 10: code = 12 # Create event E = Event(note[0], 0, code, datetime.now(), note[2], note[1]) E.save() except Exception, e: raise Exception("Cleaning database failed: %s" % e) # sendTweet # Tweet a message from the main account def sendTweet(self, message): if SETTINGS.get('tweet', False) is True: try: self.TW.update_status(message) except: self.logError("Tweeting message failed (%s)" % message) # checkTrusted # Check if there is a trust path between two users def checkTrusted(self,from_user, to_user): try: query = "SELECT COUNT(*) FROM tracker_events WHERE type = 1 AND from_user = '%s' AND to_user = '%s'" % (from_user, to_user) u = self.getSingleValue(query) print u if u > 0: return True else: return False except Exception, e: raise Exception('Checking TrustList for user %s failed: %s' % (username,e))
TechEmpower/FrameworkBenchmarks
refs/heads/master
frameworks/Python/pyramid/gunicorn_conf.py
24
import multiprocessing import os import sys _is_pypy = hasattr(sys, 'pypy_version_info') _is_travis = os.environ.get('TRAVIS') == 'true' workers = multiprocessing.cpu_count() * 3 if _is_travis: workers = 2 bind = "0.0.0.0:8080" keepalive = 120 errorlog = '-' pidfile = '/tmp/gunicorn.pid' if _is_pypy: worker_class = "tornado" else: worker_class = "meinheld.gmeinheld.MeinheldWorker" def post_fork(server, worker): # Disalbe access log import meinheld.server meinheld.server.set_access_logger(None)
maheshakya/scikit-learn
refs/heads/master
sklearn/utils/tests/test_testing.py
33
import warnings import unittest import sys from nose.tools import assert_raises from sklearn.utils.testing import ( _assert_less, _assert_greater, assert_less_equal, assert_greater_equal, assert_warns, assert_no_warnings, assert_equal, set_random_state, assert_raise_message) from sklearn.tree import DecisionTreeClassifier from sklearn.lda import LDA try: from nose.tools import assert_less def test_assert_less(): # Check that the nose implementation of assert_less gives the # same thing as the scikit's assert_less(0, 1) _assert_less(0, 1) assert_raises(AssertionError, assert_less, 1, 0) assert_raises(AssertionError, _assert_less, 1, 0) except ImportError: pass try: from nose.tools import assert_greater def test_assert_greater(): # Check that the nose implementation of assert_less gives the # same thing as the scikit's assert_greater(1, 0) _assert_greater(1, 0) assert_raises(AssertionError, assert_greater, 0, 1) assert_raises(AssertionError, _assert_greater, 0, 1) except ImportError: pass def test_assert_less_equal(): assert_less_equal(0, 1) assert_less_equal(1, 1) assert_raises(AssertionError, assert_less_equal, 1, 0) def test_assert_greater_equal(): assert_greater_equal(1, 0) assert_greater_equal(1, 1) assert_raises(AssertionError, assert_greater_equal, 0, 1) def test_set_random_state(): lda = LDA() tree = DecisionTreeClassifier() # LDA doesn't have random state: smoke test set_random_state(lda, 3) set_random_state(tree, 3) assert_equal(tree.random_state, 3) def test_assert_raise_message(): def _raise_ValueError(message): raise ValueError(message) assert_raise_message(ValueError, "test", _raise_ValueError, "test") assert_raises(AssertionError, assert_raise_message, ValueError, "something else", _raise_ValueError, "test") assert_raises(ValueError, assert_raise_message, TypeError, "something else", _raise_ValueError, "test") # This class is inspired from numpy 1.7 with an alteration to check # the reset warning filters after calls to assert_warns. # This assert_warns behavior is specific to scikit-learn because #`clean_warning_registry()` is called internally by assert_warns # and clears all previous filters. class TestWarns(unittest.TestCase): def test_warn(self): def f(): warnings.warn("yo") return 3 # Test that assert_warns is not impacted by externally set # filters and is reset internally. # This is because `clean_warning_registry()` is called internally by # assert_warns and clears all previous filters. warnings.simplefilter("ignore", UserWarning) assert_equal(assert_warns(UserWarning, f), 3) # Test that the warning registry is empty after assert_warns assert_equal(sys.modules['warnings'].filters, []) assert_raises(AssertionError, assert_no_warnings, f) assert_equal(assert_no_warnings(lambda x: x, 1), 1) def test_warn_wrong_warning(self): def f(): warnings.warn("yo", DeprecationWarning) failed = False filters = sys.modules['warnings'].filters[:] try: try: # Should raise an AssertionError assert_warns(UserWarning, f) failed = True except AssertionError: pass finally: sys.modules['warnings'].filters = filters if failed: raise AssertionError("wrong warning caught by assert_warn")
JamesTFarrington/hendrix
refs/heads/WSGI
hendrix/contrib/cache/resource.py
5
import cStringIO import urlparse from . import decompressBuffer, compressBuffer from .backends.memory_cache import MemoryCacheBackend from hendrix.utils import responseInColor from twisted.internet import reactor from twisted.web import proxy, client from twisted.web.server import NOT_DONE_YET from urllib import quote as urlquote class CacheClient(proxy.ProxyClient): """ SHOW ME THE CACHE BABY! """ def __init__(self, command, rest, version, headers, data, father, resource): proxy.ProxyClient.__init__( self, command, rest, version, headers, data, father ) self.resource = resource self.buffer = cStringIO.StringIO() self._response = None def handleHeader(self, key, value): "extends handleHeader to save headers to a local response object" key_lower = key.lower() if key_lower == 'location': value = self.modLocationPort(value) self._response.headers[key_lower] = value if key_lower != 'cache-control': # This causes us to not pass on the 'cache-control' parameter # to the browser # TODO: we should have a means of giving the user the option to # configure how they want to manage browser-side cache control proxy.ProxyClient.handleHeader(self, key, value) def handleStatus(self, version, code, message): "extends handleStatus to instantiate a local response object" proxy.ProxyClient.handleStatus(self, version, code, message) # client.Response is currently just a container for needed data self._response = client.Response(version, code, message, {}, None) def modLocationPort(self, location): """ Ensures that the location port is a the given port value Used in `handleHeader` """ components = urlparse.urlparse(location) reverse_proxy_port = self.father.getHost().port reverse_proxy_host = self.father.getHost().host # returns an ordered dict of urlparse.ParseResult components _components = components._asdict() _components['netloc'] = '%s:%d' % ( reverse_proxy_host, reverse_proxy_port ) return urlparse.urlunparse(_components.values()) def handleResponseEnd(self): """ Extends handleResponseEnd to not care about the user closing/refreshing their browser before the response is finished. Also calls cacheContent in a thread that we don't care when it finishes. """ try: if not self._finished: reactor.callInThread( self.resource.cacheContent, self.father, self._response, self.buffer ) proxy.ProxyClient.handleResponseEnd(self) except RuntimeError: # because we don't care if the user hits # refresh before the request is done pass def handleResponsePart(self, buffer): """ Sends the content to the browser and keeps a local copy of it. buffer is just a str of the content to be shown, father is the intial request. """ self.father.write(buffer) self.buffer.write(buffer) def compressBuffer(self, buffer): """ Note that this code compresses into a buffer held in memory, rather than a disk file. This is done through the use of cStringIO.StringIO(). """ # http://jython.xhaus.com/http-compression-in-python-and-jython/ zbuf = cStringIO.StringIO() zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=9) zfile.write(buffer) zfile.close() return zbuf.getvalue() class CacheClientFactory(proxy.ProxyClientFactory): protocol = CacheClient def __init__(self, command, rest, version, headers, data, father, resource): self.command = command self.rest = rest self.version = version self.headers = headers self.data = data self.father = father self.resource = resource def buildProtocol(self, addr): return self.protocol( self.command, self.rest, self.version, self.headers, self.data, self.father, self.resource ) class CacheProxyResource(proxy.ReverseProxyResource, MemoryCacheBackend): """ This is a state persistent subclass of the built-in ReverseProxyResource. """ def __init__(self, host, to_port, path, reactor=reactor): """ The 'to_port' arg points to the port of the server that we are sending a request to """ proxy.ReverseProxyResource.__init__( self, host, to_port, path, reactor=reactor ) self.proxyClientFactoryClass = CacheClientFactory def getChild(self, path, request): """ This is necessary because the parent class would call proxy.ReverseProxyResource instead of CacheProxyResource """ return CacheProxyResource( self.host, self.port, self.path + '/' + urlquote(path, safe=""), self.reactor ) def getChildWithDefault(self, path, request): """ Retrieve a static or dynamically generated child resource from me. """ cached_resource = self.getCachedResource(request) if cached_resource: reactor.callInThread( responseInColor, request, '200 OK', cached_resource, 'Cached', 'underscore' ) return cached_resource # original logic if path in self.children: return self.children[path] return self.getChild(path, request) def render(self, request): """ Render a request by forwarding it to the proxied server. """ # set up and evaluate a connection to the target server if self.port == 80: host = self.host else: host = "%s:%d" % (self.host, self.port) request.requestHeaders.addRawHeader('host', host) request.content.seek(0, 0) qs = urlparse.urlparse(request.uri)[4] if qs: rest = self.path + '?' + qs else: rest = self.path global_self = self.getGlobalSelf() clientFactory = self.proxyClientFactoryClass( request.method, rest, request.clientproto, request.getAllHeaders(), request.content.read(), request, global_self # this is new ) self.reactor.connectTCP(self.host, self.port, clientFactory) return NOT_DONE_YET def decompressContent(self): self.content = decompressBuffer(self.content) def getGlobalSelf(self): """ This searches the reactor for the original instance of CacheProxyResource. This is necessary because with each call of getChild a new instance of CacheProxyResource is created. """ transports = self.reactor.getReaders() for transport in transports: try: resource = transport.factory.resource if isinstance(resource, self.__class__) and resource.port == self.port: return resource except AttributeError: pass return
zaffra/Donate
refs/heads/master
django/template/loaders/cached.py
79
""" Wrapper class that takes a list of template loaders as an argument and attempts to load templates from them in order, caching the result. """ from django.core.exceptions import ImproperlyConfigured from django.template import TemplateDoesNotExist from django.template.loader import BaseLoader, get_template_from_string, find_template_loader, make_origin from django.utils.hashcompat import sha_constructor from django.utils.importlib import import_module class Loader(BaseLoader): is_usable = True def __init__(self, loaders): self.template_cache = {} self._loaders = loaders self._cached_loaders = [] @property def loaders(self): # Resolve loaders on demand to avoid circular imports if not self._cached_loaders: for loader in self._loaders: self._cached_loaders.append(find_template_loader(loader)) return self._cached_loaders def find_template(self, name, dirs=None): for loader in self.loaders: try: template, display_name = loader(name, dirs) return (template, make_origin(display_name, loader, name, dirs)) except TemplateDoesNotExist: pass raise TemplateDoesNotExist(name) def load_template(self, template_name, template_dirs=None): key = template_name if template_dirs: # If template directories were specified, use a hash to differentiate key = '-'.join([template_name, sha_constructor('|'.join(template_dirs)).hexdigest()]) if key not in self.template_cache: template, origin = self.find_template(template_name, template_dirs) if not hasattr(template, 'render'): try: template = get_template_from_string(template, origin, template_name) except TemplateDoesNotExist: # If compiling the template we found raises TemplateDoesNotExist, # back off to returning the source and display name for the template # we were asked to load. This allows for correct identification (later) # of the actual template that does not exist. return template, origin self.template_cache[key] = template return self.template_cache[key], None def reset(self): "Empty the template cache." self.template_cache.clear()
erikge/watch_gyp
refs/heads/master
test/win/gyptest-link-subsystem.py
239
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Make sure subsystem setting is extracted properly. """ import TestGyp import sys if sys.platform == 'win32': test = TestGyp.TestGyp(formats=['msvs', 'ninja']) CHDIR = 'linker-flags' test.run_gyp('subsystem.gyp', chdir=CHDIR) test.build('subsystem.gyp', 'test_console_ok', chdir=CHDIR) test.build('subsystem.gyp', 'test_console_fail', chdir=CHDIR, status=1) test.build('subsystem.gyp', 'test_windows_ok', chdir=CHDIR) test.build('subsystem.gyp', 'test_windows_fail', chdir=CHDIR, status=1) test.build('subsystem.gyp', 'test_console_xp', chdir=CHDIR) test.build('subsystem.gyp', 'test_windows_xp', chdir=CHDIR) # Make sure we are targeting XP. def GetHeaders(exe): return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR)) if '5.01 subsystem version' not in GetHeaders('test_console_xp.exe'): test.fail_test() if '5.01 subsystem version' not in GetHeaders('test_windows_xp.exe'): test.fail_test() # TODO(scottmg): There are other subsystems (WinCE, etc.) that we don't use. test.pass_test()
enitihas/SAC-Website
refs/heads/master
venv/bin/venv/lib/python2.7/site-packages/flask/testsuite/test_apps/flask_broken/__init__.py
629
import flask.ext.broken.b import missing_module
mhugent/Quantum-GIS
refs/heads/master
python/plugins/processing/algs/grass7/ext/HtmlReportPostProcessor.py
12
# -*- coding: utf-8 -*- """ *************************************************************************** HtmlReportPostProcessor.py --------------------- Date : December 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'December 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' def postProcessResults(alg): htmlFile = alg.getOutputFromName('html').value grass7Name = alg.grass7Name found = False f = open(htmlFile, 'w') f.write('<h2>' + grass7Name + '</h2>\n') for line in alg.consoleOutput: if found and not line.strip().endswith('exit'): f.write(line + '<br>\n') if grass7Name in line and not line.startswith('GRASS'): found = True f.close()
tongpo/Holle-World
refs/heads/master
py/python3-cookbook/basic/mydesign/d06_proxy.py
1
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Topic: 代理模式 Desc : 代理模式,也叫委托模式 是一个使用率非常高的模式, 非常典型的场景就是游戏代练,代练者就是一个代理者或者委托者。 """ class RealSubject: def request(self): print('核心业务逻辑') class Proxy(RealSubject): def __init__(self): self.real_subject = RealSubject() def request(self): self.before() self.real_subject.request() self.end() def before(self): print('before') def end(self): print('end') if __name__ == '__main__': p = Proxy() p.request()
kartikshah1/Test
refs/heads/master
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/cp949prober.py
2800
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCKRDistributionAnalysis from .mbcssm import CP949SMModel class CP949Prober(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(CP949SMModel) # NOTE: CP949 is a superset of EUC-KR, so the distribution should be # not different. self._mDistributionAnalyzer = EUCKRDistributionAnalysis() self.reset() def get_charset_name(self): return "CP949"
miho030/FoxVc
refs/heads/master
TestCode/Fox2Av/FoxEngine/FoxCore/FoxConst.py
2
# _*_coding:utf-8 _*_ """ Made by Nicht = tayaka = Lee joon sung South Korea. Seoul. Gangnam. gaepodong. contact admin = [email protected] OR [email protected](youtube) OR [email protected](gmail) This is Opensource Computer Anti-Virus program. anyone can modificate this script. n you can edit this program on own your system environment. This AV is compiled by Pycharm-community, made with Python 2.7.12, licensing on GNU Gnu Public License Ver.3. just fun! :D """ # 디버깅 여부 설정/확인 FVCDEBUG = False """ * 악성코드 치료를 지시하는 상수들.. * 이는 커널이 사용자에 명령 하에, 악성코드를 치료할 시에 보내는 scan함수를 보조함. * 정확히는 scan 콜백 함수에서 리턴값을 사용하기 위해 제작. 1. 사용자(의 명령) -> 커널 -> 커널이 우선순위를 조사하고 플러그인엔진을 로딩 -> 2. 플러그인 엔진이 진단 -> 진단 결과를 리턴 -> 3. 리턴된 결과를 바탕으로 tui, gui 등의 환경으로 사용자에게 알리고 명령 하달 후 처리. """ Fvc_ACTION_IGNORE = 0 Fvc_ACTION_CURE = 1 Fvc_ACTION_DELETE = 2 Fvc_ACTION_QUIT = 3
ucsb-seclab/ictf-framework
refs/heads/master
gamebot/gamebot.py
1
#!/usr/bin/env python # -*- coding: utf-8 -*- """The gamebot is an internal component of the database backend and is responsible for progressing the game (each "round" of the game is known as a tick). Each tick the gamebot decides which scripts to run against which team and randomizes the order. This information is persisted directly into the database, then the gamebot sleeps until the next tick. """ from __future__ import print_function __authors__ = "Adam Doupé, Kevin Borgolte, Aravind Machiry" __version__ = "0.1.1" from datetime import datetime, timedelta from dbapi import DBApi from scripts_facade import ScriptsFacade import time import random import sys import logging import coloredlogs def _get_ticks_configuration(db_api): tick_time_in_sec, configured_benign, configured_exploit, num_get_flags = db_api.get_tick_config() at_least_one = max(1, configured_benign - 1) one_more_than_available = configured_benign + 1 num_benign = random.randint(at_least_one, one_more_than_available) # if number of exploits is zero. We do not schedule them. if configured_exploit > 0: at_least_one = max(1, configured_exploit - 1) one_more_than_available = configured_exploit + 1 num_exploit = random.randint(at_least_one, one_more_than_available) else: num_exploit = 0 tick_time_in_sec = random.uniform(tick_time_in_sec - 30, tick_time_in_sec + 30) return tick_time_in_sec, num_benign, num_exploit, num_get_flags MAIN_LOG_LEVEL = logging.DEBUG LOG_FMT = '%(levelname)s - %(asctime)s (%(name)s): %(msg)s' def main(): # pylint:disable=missing-docstring,too-many-locals log = logging.getLogger('gamebot_main') log.setLevel(MAIN_LOG_LEVEL) log_formatter = coloredlogs.ColoredFormatter(LOG_FMT) log_handler = logging.StreamHandler() log_handler.setFormatter(log_formatter) log.addHandler(log_handler) log.info("Starting GameBot") db_api = DBApi() # Check DB connection. if db_api.check_connection(): log.info("Connection to DB Verified.") else: log.fatal("Looks like DB is Down. Unable to verify db connection.") scripts_interface = ScriptsFacade(db_api) log.info("Initialization Complete") log.info("Check to see if there's a game running") while True: current_game_id = db_api.get_game_state() if current_game_id is None: log.info("Game is paused or has not started yet, sleeping, and will retry in a few") time.sleep(10) continue current_tick, seconds_left = db_api.get_current_tick_info() if current_tick != 0: log.info("We must be picking up from the last run. Sleep for {} seconds until the next tick.". format(seconds_left)) time.sleep(seconds_left) log.warning("Starting Main Loop") tick_id = current_tick while True: log.info("Starting Iteration") current_game_id = db_api.get_game_state() if current_game_id is None: log.info("Game is paused, breaking out of main loop") break # Create a new tick and Decide what scripts to run against each team sleep_time, num_benign, num_exploit, num_get_flags = _get_ticks_configuration(db_api) # Update script to be run, we should update these first as scriptbot # waits for tick to pick up the scripts. # First, update scripts and then update tick so that # when the tick is updated, # scriptbot can get all the scripts in a single shot. if scripts_interface.update_scripts_to_run(tick_id + 1, num_benign, num_exploit, num_get_flags): log.info("Successfully updated scripts to run for:" + str(tick_id + 1)) else: log.error("Failed to update scripts to run for:" + str(tick_id + 1)) # Change the tick current = datetime.now() time_to_change = current + timedelta(seconds=sleep_time) tick_id = db_api.update_tick_info(time_to_change.isoformat(), current.isoformat()) log.info("Current Tick {}".format(tick_id)) log.info("Tick Configuration, Sleep Time:" + str(sleep_time) + ", num_benign:" + str(num_benign) + ", num_exploit:" + str(num_exploit) + ", num_get_flags:" + str(num_get_flags)) # compute scores for the previous tick (the last completed tick) if tick_id > 1: # update the state of services for previous tick old_time = datetime.now() log.info("Updating state of services across all teams.") scripts_interface.update_state_of_services(tick_id - 1) log.info("Updated state of services across all teams in:" + str(datetime.now() - old_time)) else: log.info("Ignoring Scoring, as this is first tick.") # Sleep for the amount of time until the next tick time_diff_to_sleep = time_to_change - datetime.now() if time_diff_to_sleep.total_seconds() < 0: log.warning("Not sleeping at all. I was {} seconds too slow".format(time_diff_to_sleep)) else: seconds_to_sleep = (time_diff_to_sleep.seconds + (time_diff_to_sleep.microseconds / 1E6)) log.info("Sleeping for:" + str(seconds_to_sleep)) time.sleep(seconds_to_sleep) log.info("Awake") if __name__ == "__main__": sys.exit(main())
laszlocsomor/tensorflow
refs/heads/master
tensorflow/python/__init__.py
7
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Import core names of TensorFlow. Programs that want to build TensorFlow Ops and Graphs without having to import the constructors and utilities individually can import this file: from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf """ import ctypes import importlib import sys import traceback # TODO(drpng): write up instructions for editing this file in a doc and point to # the doc instead. # If you want to edit this file to expose modules in public tensorflow API, you # need to follow these steps: # 1. Consult with tensorflow team and get approval for adding a new API to the # public interface. # 2. Document the module in the gen_docs_combined.py. # 3. Import the module in the main tensorflow namespace by adding an import # statement in this file. # 4. Sanitize the entry point by making sure that your module does not expose # transitively imported modules used for implementation, such as os, sys. # go/tf-wildcard-import # pylint: disable=wildcard-import,g-bad-import-order,g-import-not-at-top import numpy as np from tensorflow.python import pywrap_tensorflow # Protocol buffers from tensorflow.core.framework.graph_pb2 import * from tensorflow.core.framework.node_def_pb2 import * from tensorflow.core.framework.summary_pb2 import * from tensorflow.core.framework.attr_value_pb2 import * from tensorflow.core.protobuf.meta_graph_pb2 import TensorInfo from tensorflow.core.protobuf.meta_graph_pb2 import MetaGraphDef from tensorflow.core.protobuf.config_pb2 import * from tensorflow.core.protobuf.tensorflow_server_pb2 import * from tensorflow.core.util.event_pb2 import * # Framework from tensorflow.python.framework.framework_lib import * from tensorflow.python.framework.versions import * from tensorflow.python.framework import errors from tensorflow.python.framework import graph_util # Session from tensorflow.python.client.client_lib import * # Ops from tensorflow.python.ops.standard_ops import * # Namespaces from tensorflow.python.ops import initializers_ns as initializers # pylint: enable=wildcard-import # Bring in subpackages. from tensorflow.python import data from tensorflow.python import keras from tensorflow.python.estimator import estimator_lib as estimator from tensorflow.python.feature_column import feature_column_lib as feature_column from tensorflow.python.layers import layers from tensorflow.python.ops import bitwise_ops as bitwise from tensorflow.python.ops import image_ops as image from tensorflow.python.ops import metrics from tensorflow.python.ops import nn from tensorflow.python.ops import sets from tensorflow.python.ops import spectral_ops as spectral from tensorflow.python.ops.distributions import distributions from tensorflow.python.ops.linalg import linalg from tensorflow.python.ops.losses import losses from tensorflow.python.profiler import profiler from tensorflow.python.saved_model import saved_model from tensorflow.python.summary import summary from tensorflow.python.user_ops import user_ops from tensorflow.python.util import compat # Import the names from python/training.py as train.Name. from tensorflow.python.training import training as train # Sub-package for performing i/o directly instead of via ops in a graph. from tensorflow.python.lib.io import python_io # Make some application and test modules available. from tensorflow.python.platform import app from tensorflow.python.platform import flags from tensorflow.python.platform import gfile from tensorflow.python.platform import tf_logging as logging from tensorflow.python.platform import resource_loader from tensorflow.python.platform import sysconfig from tensorflow.python.platform import test from tensorflow.python.util.all_util import remove_undocumented from tensorflow.python.util.all_util import make_all # Import modules whose docstrings contribute, for use by remove_undocumented # below. from tensorflow.python.client import client_lib from tensorflow.python.framework import constant_op from tensorflow.python.framework import framework_lib from tensorflow.python.framework import subscribe from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import confusion_matrix as confusion_matrix_m from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import functional_ops from tensorflow.python.ops import histogram_ops from tensorflow.python.ops import io_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import script_ops from tensorflow.python.ops import session_ops from tensorflow.python.ops import sparse_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import string_ops from tensorflow.python.ops import tensor_array_ops # Symbols whitelisted for export without documentation. # TODO(cwhipkey): review these and move to contrib, expose through # documentation, or remove. _allowed_symbols = [ 'AttrValue', 'ConfigProto', 'ClusterDef', 'DeviceSpec', 'Event', 'GPUOptions', 'GRAPH_DEF_VERSION', 'GRAPH_DEF_VERSION_MIN_CONSUMER', 'GRAPH_DEF_VERSION_MIN_PRODUCER', 'GraphDef', 'GraphOptions', 'HistogramProto', 'LogMessage', 'MetaGraphDef', 'NameAttrList', 'NodeDef', 'OptimizerOptions', 'RunOptions', 'RunMetadata', 'SessionLog', 'Summary', 'SummaryMetadata', 'TensorInfo', # Used for tf.saved_model functionality. ] # The following symbols are kept for compatibility. It is our plan # to remove them in the future. _allowed_symbols.extend([ 'arg_max', 'arg_min', 'mul', # use tf.multiply instead. 'neg', # use tf.negative instead. 'sub', # use tf.subtract instead. 'create_partitioned_variables', 'deserialize_many_sparse', 'lin_space', 'list_diff', # Use tf.listdiff instead. 'listdiff', # Use tf.listdiff instead. 'parse_single_sequence_example', 'serialize_many_sparse', 'serialize_sparse', 'sparse_matmul', ## use tf.matmul instead. ]) # This is needed temporarily because we import it explicitly. _allowed_symbols.extend([ 'pywrap_tensorflow', ]) # Dtypes exported by framework/dtypes.py. # TODO(cwhipkey): expose these through documentation. _allowed_symbols.extend([ 'QUANTIZED_DTYPES', 'bfloat16', 'bool', 'complex64', 'complex128', 'double', 'half', 'float16', 'float32', 'float64', 'int16', 'int32', 'int64', 'int8', 'qint16', 'qint32', 'qint8', 'quint16', 'quint8', 'string', 'uint64', 'uint32', 'uint16', 'uint8', 'resource', 'variant', ]) # Export modules and constants. _allowed_symbols.extend([ 'app', 'bitwise', 'compat', 'data', 'distributions', 'errors', 'estimator', 'feature_column', 'flags', 'gfile', 'graph_util', 'image', 'initializers', 'keras', 'layers', 'linalg', 'logging', 'losses', 'metrics', 'newaxis', 'nn', 'profiler', 'python_io', 'resource_loader', 'saved_model', 'sets', 'spectral', 'summary', 'sysconfig', 'test', 'train', 'user_ops', ]) # Variables framework.versions: _allowed_symbols.extend([ 'VERSION', 'GIT_VERSION', 'COMPILER_VERSION', 'CXX11_ABI_FLAG', ]) # Remove all extra symbols that don't have a docstring or are not explicitly # referenced in the whitelist. remove_undocumented(__name__, _allowed_symbols, [ framework_lib, array_ops, check_ops, client_lib, compat, constant_op, control_flow_ops, confusion_matrix_m, data, distributions, functional_ops, histogram_ops, io_ops, keras, layers, losses, math_ops, metrics, nn, profiler, resource_loader, sets, script_ops, session_ops, sparse_ops, state_ops, string_ops, summary, tensor_array_ops, train ]) # Special dunders that we choose to export: _exported_dunders = set([ '__version__', '__git_version__', '__compiler_version__', '__cxx11_abi_flag__', ]) # Expose symbols minus dunders, unless they are whitelisted above. # This is necessary to export our dunders. __all__ = [s for s in dir() if s in _exported_dunders or not s.startswith('_')]
sunjeammy/tornado
refs/heads/master
tornado/util.py
102
"""Miscellaneous utility functions and classes. This module is used internally by Tornado. It is not necessarily expected that the functions and classes defined here will be useful to other applications, but they are documented here in case they are. The one public-facing part of this module is the `Configurable` class and its `~Configurable.configure` method, which becomes a part of the interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`, and `.Resolver`. """ from __future__ import absolute_import, division, print_function, with_statement import array import inspect import os import sys import zlib try: xrange # py2 except NameError: xrange = range # py3 class ObjectDict(dict): """Makes a dictionary behave like an object, with attribute-style access. """ def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): self[name] = value class GzipDecompressor(object): """Streaming gzip decompressor. The interface is like that of `zlib.decompressobj` (without some of the optional arguments, but it understands gzip headers and checksums. """ def __init__(self): # Magic parameter makes zlib module understand gzip header # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib # This works on cpython and pypy, but not jython. self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) def decompress(self, value, max_length=None): """Decompress a chunk, returning newly-available data. Some data may be buffered for later processing; `flush` must be called when there is no more input data to ensure that all data was processed. If ``max_length`` is given, some input data may be left over in ``unconsumed_tail``; you must retrieve this value and pass it back to a future call to `decompress` if it is not empty. """ return self.decompressobj.decompress(value, max_length) @property def unconsumed_tail(self): """Returns the unconsumed portion left over """ return self.decompressobj.unconsumed_tail def flush(self): """Return any remaining buffered data not yet returned by decompress. Also checks for errors such as truncated input. No other methods may be called on this object after `flush`. """ return self.decompressobj.flush() def import_object(name): """Imports an object by name. import_object('x') is equivalent to 'import x'. import_object('x.y.z') is equivalent to 'from x.y import z'. >>> import tornado.escape >>> import_object('tornado.escape') is tornado.escape True >>> import_object('tornado.escape.utf8') is tornado.escape.utf8 True >>> import_object('tornado') is tornado True >>> import_object('tornado.missing_module') Traceback (most recent call last): ... ImportError: No module named missing_module """ if name.count('.') == 0: return __import__(name, None, None) parts = name.split('.') obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0) try: return getattr(obj, parts[-1]) except AttributeError: raise ImportError("No module named %s" % parts[-1]) # Fake unicode literal support: Python 3.2 doesn't have the u'' marker for # literal strings, and alternative solutions like "from __future__ import # unicode_literals" have other problems (see PEP 414). u() can be applied # to ascii strings that include \u escapes (but they must not contain # literal non-ascii characters). if type('') is not type(b''): def u(s): return s unicode_type = str basestring_type = str else: def u(s): return s.decode('unicode_escape') unicode_type = unicode basestring_type = basestring # Deprecated alias that was used before we dropped py25 support. # Left here in case anyone outside Tornado is using it. bytes_type = bytes if sys.version_info > (3,): exec(""" def raise_exc_info(exc_info): raise exc_info[1].with_traceback(exc_info[2]) def exec_in(code, glob, loc=None): if isinstance(code, str): code = compile(code, '<string>', 'exec', dont_inherit=True) exec(code, glob, loc) """) else: exec(""" def raise_exc_info(exc_info): raise exc_info[0], exc_info[1], exc_info[2] def exec_in(code, glob, loc=None): if isinstance(code, basestring): # exec(string) inherits the caller's future imports; compile # the string first to prevent that. code = compile(code, '<string>', 'exec', dont_inherit=True) exec code in glob, loc """) def errno_from_exception(e): """Provides the errno from an Exception object. There are cases that the errno attribute was not set so we pull the errno out of the args but if someone instantiates an Exception without any args you will get a tuple error. So this function abstracts all that behavior to give you a safe way to get the errno. """ if hasattr(e, 'errno'): return e.errno elif e.args: return e.args[0] else: return None class Configurable(object): """Base class for configurable interfaces. A configurable interface is an (abstract) class whose constructor acts as a factory function for one of its implementation subclasses. The implementation subclass as well as optional keyword arguments to its initializer can be set globally at runtime with `configure`. By using the constructor as the factory method, the interface looks like a normal class, `isinstance` works as usual, etc. This pattern is most useful when the choice of implementation is likely to be a global decision (e.g. when `~select.epoll` is available, always use it instead of `~select.select`), or when a previously-monolithic class has been split into specialized subclasses. Configurable subclasses must define the class methods `configurable_base` and `configurable_default`, and use the instance method `initialize` instead of ``__init__``. """ __impl_class = None __impl_kwargs = None def __new__(cls, **kwargs): base = cls.configurable_base() args = {} if cls is base: impl = cls.configured_class() if base.__impl_kwargs: args.update(base.__impl_kwargs) else: impl = cls args.update(kwargs) instance = super(Configurable, cls).__new__(impl) # initialize vs __init__ chosen for compatibility with AsyncHTTPClient # singleton magic. If we get rid of that we can switch to __init__ # here too. instance.initialize(**args) return instance @classmethod def configurable_base(cls): """Returns the base class of a configurable hierarchy. This will normally return the class in which it is defined. (which is *not* necessarily the same as the cls classmethod parameter). """ raise NotImplementedError() @classmethod def configurable_default(cls): """Returns the implementation class to be used if none is configured.""" raise NotImplementedError() def initialize(self): """Initialize a `Configurable` subclass instance. Configurable classes should use `initialize` instead of ``__init__``. """ @classmethod def configure(cls, impl, **kwargs): """Sets the class to use when the base class is instantiated. Keyword arguments will be saved and added to the arguments passed to the constructor. This can be used to set global defaults for some parameters. """ base = cls.configurable_base() if isinstance(impl, (unicode_type, bytes)): impl = import_object(impl) if impl is not None and not issubclass(impl, cls): raise ValueError("Invalid subclass of %s" % cls) base.__impl_class = impl base.__impl_kwargs = kwargs @classmethod def configured_class(cls): """Returns the currently configured class.""" base = cls.configurable_base() if cls.__impl_class is None: base.__impl_class = cls.configurable_default() return base.__impl_class @classmethod def _save_configuration(cls): base = cls.configurable_base() return (base.__impl_class, base.__impl_kwargs) @classmethod def _restore_configuration(cls, saved): base = cls.configurable_base() base.__impl_class = saved[0] base.__impl_kwargs = saved[1] class ArgReplacer(object): """Replaces one value in an ``args, kwargs`` pair. Inspects the function signature to find an argument by name whether it is passed by position or keyword. For use in decorators and similar wrappers. """ def __init__(self, func, name): self.name = name try: self.arg_pos = inspect.getargspec(func).args.index(self.name) except ValueError: # Not a positional parameter self.arg_pos = None def get_old_value(self, args, kwargs, default=None): """Returns the old value of the named argument without replacing it. Returns ``default`` if the argument is not present. """ if self.arg_pos is not None and len(args) > self.arg_pos: return args[self.arg_pos] else: return kwargs.get(self.name, default) def replace(self, new_value, args, kwargs): """Replace the named argument in ``args, kwargs`` with ``new_value``. Returns ``(old_value, args, kwargs)``. The returned ``args`` and ``kwargs`` objects may not be the same as the input objects, or the input objects may be mutated. If the named argument was not found, ``new_value`` will be added to ``kwargs`` and None will be returned as ``old_value``. """ if self.arg_pos is not None and len(args) > self.arg_pos: # The arg to replace is passed positionally old_value = args[self.arg_pos] args = list(args) # *args is normally a tuple args[self.arg_pos] = new_value else: # The arg to replace is either omitted or passed by keyword. old_value = kwargs.get(self.name) kwargs[self.name] = new_value return old_value, args, kwargs def timedelta_to_seconds(td): """Equivalent to td.total_seconds() (introduced in python 2.7).""" return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6) def _websocket_mask_python(mask, data): """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. Returns a `bytes` object of the same length as `data` with the mask applied as specified in section 5.3 of RFC 6455. This pure-python implementation may be replaced by an optimized version when available. """ mask = array.array("B", mask) unmasked = array.array("B", data) for i in xrange(len(data)): unmasked[i] = unmasked[i] ^ mask[i % 4] if hasattr(unmasked, 'tobytes'): # tostring was deprecated in py32. It hasn't been removed, # but since we turn on deprecation warnings in our tests # we need to use the right one. return unmasked.tobytes() else: return unmasked.tostring() if (os.environ.get('TORNADO_NO_EXTENSION') or os.environ.get('TORNADO_EXTENSION') == '0'): # These environment variables exist to make it easier to do performance # comparisons; they are not guaranteed to remain supported in the future. _websocket_mask = _websocket_mask_python else: try: from tornado.speedups import websocket_mask as _websocket_mask except ImportError: if os.environ.get('TORNADO_EXTENSION') == '1': raise _websocket_mask = _websocket_mask_python def doctests(): import doctest return doctest.DocTestSuite()
jimmy-ren/RPN2T
refs/heads/master
external/_caffe/python/caffe/test/test_solver.py
33
import unittest import tempfile import os import numpy as np import six import caffe from test_net import simple_net_file class TestSolver(unittest.TestCase): def setUp(self): self.num_output = 13 net_f = simple_net_file(self.num_output) f = tempfile.NamedTemporaryFile(mode='w+', delete=False) f.write("""net: '""" + net_f + """' test_iter: 10 test_interval: 10 base_lr: 0.01 momentum: 0.9 weight_decay: 0.0005 lr_policy: 'inv' gamma: 0.0001 power: 0.75 display: 100 max_iter: 100 snapshot_after_train: false snapshot_prefix: "model" """) f.close() self.solver = caffe.SGDSolver(f.name) # also make sure get_solver runs caffe.get_solver(f.name) caffe.set_mode_cpu() # fill in valid labels self.solver.net.blobs['label'].data[...] = \ np.random.randint(self.num_output, size=self.solver.net.blobs['label'].data.shape) self.solver.test_nets[0].blobs['label'].data[...] = \ np.random.randint(self.num_output, size=self.solver.test_nets[0].blobs['label'].data.shape) os.remove(f.name) os.remove(net_f) def test_solve(self): self.assertEqual(self.solver.iter, 0) self.solver.solve() self.assertEqual(self.solver.iter, 100) def test_net_memory(self): """Check that nets survive after the solver is destroyed.""" nets = [self.solver.net] + list(self.solver.test_nets) self.assertEqual(len(nets), 2) del self.solver total = 0 for net in nets: for ps in six.itervalues(net.params): for p in ps: total += p.data.sum() + p.diff.sum() for bl in six.itervalues(net.blobs): total += bl.data.sum() + bl.diff.sum() def test_snapshot(self): self.solver.snapshot() # Check that these files exist and then remove them files = ['model_iter_0.caffemodel', 'model_iter_0.solverstate'] for fn in files: assert os.path.isfile(fn) os.remove(fn)
Weil0ng/gem5
refs/heads/master
src/sim/probe/Probe.py
62
# -*- mode:python -*- # Copyright (c) 2013 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Matt Horsnell from m5.SimObject import SimObject from m5.params import * from m5.proxy import * class ProbeListenerObject(SimObject): type = 'ProbeListenerObject' cxx_header = 'sim/probe/probe.hh' manager = Param.SimObject(Parent.any, "ProbeManager")
ppmt/Crust
refs/heads/master
flask/lib/python2.7/site-packages/flask/app.py
427
# -*- coding: utf-8 -*- """ flask.app ~~~~~~~~~ This module implements the central WSGI application object. :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import os import sys from threading import Lock from datetime import timedelta from itertools import chain from functools import update_wrapper from werkzeug.datastructures import ImmutableDict from werkzeug.routing import Map, Rule, RequestRedirect, BuildError from werkzeug.exceptions import HTTPException, InternalServerError, \ MethodNotAllowed, BadRequest from .helpers import _PackageBoundObject, url_for, get_flashed_messages, \ locked_cached_property, _endpoint_from_view_func, find_package from . import json from .wrappers import Request, Response from .config import ConfigAttribute, Config from .ctx import RequestContext, AppContext, _AppCtxGlobals from .globals import _request_ctx_stack, request, session, g from .sessions import SecureCookieSessionInterface from .module import blueprint_is_module from .templating import DispatchingJinjaLoader, Environment, \ _default_template_ctx_processor from .signals import request_started, request_finished, got_request_exception, \ request_tearing_down, appcontext_tearing_down from ._compat import reraise, string_types, text_type, integer_types # a lock used for logger initialization _logger_lock = Lock() def _make_timedelta(value): if not isinstance(value, timedelta): return timedelta(seconds=value) return value def setupmethod(f): """Wraps a method so that it performs a check in debug mode if the first request was already handled. """ def wrapper_func(self, *args, **kwargs): if self.debug and self._got_first_request: raise AssertionError('A setup function was called after the ' 'first request was handled. This usually indicates a bug ' 'in the application where a module was not imported ' 'and decorators or other functionality was called too late.\n' 'To fix this make sure to import all your view modules, ' 'database models and everything related at a central place ' 'before the application starts serving requests.') return f(self, *args, **kwargs) return update_wrapper(wrapper_func, f) class Flask(_PackageBoundObject): """The flask object implements a WSGI application and acts as the central object. It is passed the name of the module or package of the application. Once it is created it will act as a central registry for the view functions, the URL rules, template configuration and much more. The name of the package is used to resolve resources from inside the package or the folder the module is contained in depending on if the package parameter resolves to an actual python package (a folder with an `__init__.py` file inside) or a standard module (just a `.py` file). For more information about resource loading, see :func:`open_resource`. Usually you create a :class:`Flask` instance in your main module or in the `__init__.py` file of your package like this:: from flask import Flask app = Flask(__name__) .. admonition:: About the First Parameter The idea of the first parameter is to give Flask an idea what belongs to your application. This name is used to find resources on the file system, can be used by extensions to improve debugging information and a lot more. So it's important what you provide there. If you are using a single module, `__name__` is always the correct value. If you however are using a package, it's usually recommended to hardcode the name of your package there. For example if your application is defined in `yourapplication/app.py` you should create it with one of the two versions below:: app = Flask('yourapplication') app = Flask(__name__.split('.')[0]) Why is that? The application will work even with `__name__`, thanks to how resources are looked up. However it will make debugging more painful. Certain extensions can make assumptions based on the import name of your application. For example the Flask-SQLAlchemy extension will look for the code in your application that triggered an SQL query in debug mode. If the import name is not properly set up, that debugging information is lost. (For example it would only pick up SQL queries in `yourapplication.app` and not `yourapplication.views.frontend`) .. versionadded:: 0.7 The `static_url_path`, `static_folder`, and `template_folder` parameters were added. .. versionadded:: 0.8 The `instance_path` and `instance_relative_config` parameters were added. :param import_name: the name of the application package :param static_url_path: can be used to specify a different path for the static files on the web. Defaults to the name of the `static_folder` folder. :param static_folder: the folder with static files that should be served at `static_url_path`. Defaults to the ``'static'`` folder in the root path of the application. :param template_folder: the folder that contains the templates that should be used by the application. Defaults to ``'templates'`` folder in the root path of the application. :param instance_path: An alternative instance path for the application. By default the folder ``'instance'`` next to the package or module is assumed to be the instance path. :param instance_relative_config: if set to `True` relative filenames for loading the config are assumed to be relative to the instance path instead of the application root. """ #: The class that is used for request objects. See :class:`~flask.Request` #: for more information. request_class = Request #: The class that is used for response objects. See #: :class:`~flask.Response` for more information. response_class = Response #: The class that is used for the :data:`~flask.g` instance. #: #: Example use cases for a custom class: #: #: 1. Store arbitrary attributes on flask.g. #: 2. Add a property for lazy per-request database connectors. #: 3. Return None instead of AttributeError on expected attributes. #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. #: #: In Flask 0.9 this property was called `request_globals_class` but it #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the #: flask.g object is not application context scoped. #: #: .. versionadded:: 0.10 app_ctx_globals_class = _AppCtxGlobals # Backwards compatibility support def _get_request_globals_class(self): return self.app_ctx_globals_class def _set_request_globals_class(self, value): from warnings import warn warn(DeprecationWarning('request_globals_class attribute is now ' 'called app_ctx_globals_class')) self.app_ctx_globals_class = value request_globals_class = property(_get_request_globals_class, _set_request_globals_class) del _get_request_globals_class, _set_request_globals_class #: The debug flag. Set this to `True` to enable debugging of the #: application. In debug mode the debugger will kick in when an unhandled #: exception occurs and the integrated server will automatically reload #: the application if changes in the code are detected. #: #: This attribute can also be configured from the config with the `DEBUG` #: configuration key. Defaults to `False`. debug = ConfigAttribute('DEBUG') #: The testing flag. Set this to `True` to enable the test mode of #: Flask extensions (and in the future probably also Flask itself). #: For example this might activate unittest helpers that have an #: additional runtime cost which should not be enabled by default. #: #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the #: default it's implicitly enabled. #: #: This attribute can also be configured from the config with the #: `TESTING` configuration key. Defaults to `False`. testing = ConfigAttribute('TESTING') #: If a secret key is set, cryptographic components can use this to #: sign cookies and other things. Set this to a complex random value #: when you want to use the secure cookie for instance. #: #: This attribute can also be configured from the config with the #: `SECRET_KEY` configuration key. Defaults to `None`. secret_key = ConfigAttribute('SECRET_KEY') #: The secure cookie uses this for the name of the session cookie. #: #: This attribute can also be configured from the config with the #: `SESSION_COOKIE_NAME` configuration key. Defaults to ``'session'`` session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME') #: A :class:`~datetime.timedelta` which is used to set the expiration #: date of a permanent session. The default is 31 days which makes a #: permanent session survive for roughly one month. #: #: This attribute can also be configured from the config with the #: `PERMANENT_SESSION_LIFETIME` configuration key. Defaults to #: ``timedelta(days=31)`` permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME', get_converter=_make_timedelta) #: Enable this if you want to use the X-Sendfile feature. Keep in #: mind that the server has to support this. This only affects files #: sent with the :func:`send_file` method. #: #: .. versionadded:: 0.2 #: #: This attribute can also be configured from the config with the #: `USE_X_SENDFILE` configuration key. Defaults to `False`. use_x_sendfile = ConfigAttribute('USE_X_SENDFILE') #: The name of the logger to use. By default the logger name is the #: package name passed to the constructor. #: #: .. versionadded:: 0.4 logger_name = ConfigAttribute('LOGGER_NAME') #: Enable the deprecated module support? This is active by default #: in 0.7 but will be changed to False in 0.8. With Flask 1.0 modules #: will be removed in favor of Blueprints enable_modules = True #: The logging format used for the debug logger. This is only used when #: the application is in debug mode, otherwise the attached logging #: handler does the formatting. #: #: .. versionadded:: 0.3 debug_log_format = ( '-' * 80 + '\n' + '%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + '%(message)s\n' + '-' * 80 ) #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. #: #: .. versionadded:: 0.10 json_encoder = json.JSONEncoder #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. #: #: .. versionadded:: 0.10 json_decoder = json.JSONDecoder #: Options that are passed directly to the Jinja2 environment. jinja_options = ImmutableDict( extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_'] ) #: Default configuration parameters. default_config = ImmutableDict({ 'DEBUG': False, 'TESTING': False, 'PROPAGATE_EXCEPTIONS': None, 'PRESERVE_CONTEXT_ON_EXCEPTION': None, 'SECRET_KEY': None, 'PERMANENT_SESSION_LIFETIME': timedelta(days=31), 'USE_X_SENDFILE': False, 'LOGGER_NAME': None, 'SERVER_NAME': None, 'APPLICATION_ROOT': None, 'SESSION_COOKIE_NAME': 'session', 'SESSION_COOKIE_DOMAIN': None, 'SESSION_COOKIE_PATH': None, 'SESSION_COOKIE_HTTPONLY': True, 'SESSION_COOKIE_SECURE': False, 'MAX_CONTENT_LENGTH': None, 'SEND_FILE_MAX_AGE_DEFAULT': 12 * 60 * 60, # 12 hours 'TRAP_BAD_REQUEST_ERRORS': False, 'TRAP_HTTP_EXCEPTIONS': False, 'PREFERRED_URL_SCHEME': 'http', 'JSON_AS_ASCII': True, 'JSON_SORT_KEYS': True, 'JSONIFY_PRETTYPRINT_REGULAR': True, }) #: The rule object to use for URL rules created. This is used by #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. #: #: .. versionadded:: 0.7 url_rule_class = Rule #: the test client that is used with when `test_client` is used. #: #: .. versionadded:: 0.7 test_client_class = None #: the session interface to use. By default an instance of #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. #: #: .. versionadded:: 0.8 session_interface = SecureCookieSessionInterface() def __init__(self, import_name, static_path=None, static_url_path=None, static_folder='static', template_folder='templates', instance_path=None, instance_relative_config=False): _PackageBoundObject.__init__(self, import_name, template_folder=template_folder) if static_path is not None: from warnings import warn warn(DeprecationWarning('static_path is now called ' 'static_url_path'), stacklevel=2) static_url_path = static_path if static_url_path is not None: self.static_url_path = static_url_path if static_folder is not None: self.static_folder = static_folder if instance_path is None: instance_path = self.auto_find_instance_path() elif not os.path.isabs(instance_path): raise ValueError('If an instance path is provided it must be ' 'absolute. A relative path was given instead.') #: Holds the path to the instance folder. #: #: .. versionadded:: 0.8 self.instance_path = instance_path #: The configuration dictionary as :class:`Config`. This behaves #: exactly like a regular dictionary but supports additional methods #: to load a config from files. self.config = self.make_config(instance_relative_config) # Prepare the deferred setup of the logger. self._logger = None self.logger_name = self.import_name #: A dictionary of all view functions registered. The keys will #: be function names which are also used to generate URLs and #: the values are the function objects themselves. #: To register a view function, use the :meth:`route` decorator. self.view_functions = {} # support for the now deprecated `error_handlers` attribute. The # :attr:`error_handler_spec` shall be used now. self._error_handlers = {} #: A dictionary of all registered error handlers. The key is `None` #: for error handlers active on the application, otherwise the key is #: the name of the blueprint. Each key points to another dictionary #: where they key is the status code of the http exception. The #: special key `None` points to a list of tuples where the first item #: is the class for the instance check and the second the error handler #: function. #: #: To register a error handler, use the :meth:`errorhandler` #: decorator. self.error_handler_spec = {None: self._error_handlers} #: A list of functions that are called when :meth:`url_for` raises a #: :exc:`~werkzeug.routing.BuildError`. Each function registered here #: is called with `error`, `endpoint` and `values`. If a function #: returns `None` or raises a `BuildError` the next function is #: tried. #: #: .. versionadded:: 0.9 self.url_build_error_handlers = [] #: A dictionary with lists of functions that should be called at the #: beginning of the request. The key of the dictionary is the name of #: the blueprint this function is active for, `None` for all requests. #: This can for example be used to open database connections or #: getting hold of the currently logged in user. To register a #: function here, use the :meth:`before_request` decorator. self.before_request_funcs = {} #: A lists of functions that should be called at the beginning of the #: first request to this instance. To register a function here, use #: the :meth:`before_first_request` decorator. #: #: .. versionadded:: 0.8 self.before_first_request_funcs = [] #: A dictionary with lists of functions that should be called after #: each request. The key of the dictionary is the name of the blueprint #: this function is active for, `None` for all requests. This can for #: example be used to open database connections or getting hold of the #: currently logged in user. To register a function here, use the #: :meth:`after_request` decorator. self.after_request_funcs = {} #: A dictionary with lists of functions that are called after #: each request, even if an exception has occurred. The key of the #: dictionary is the name of the blueprint this function is active for, #: `None` for all requests. These functions are not allowed to modify #: the request, and their return values are ignored. If an exception #: occurred while processing the request, it gets passed to each #: teardown_request function. To register a function here, use the #: :meth:`teardown_request` decorator. #: #: .. versionadded:: 0.7 self.teardown_request_funcs = {} #: A list of functions that are called when the application context #: is destroyed. Since the application context is also torn down #: if the request ends this is the place to store code that disconnects #: from databases. #: #: .. versionadded:: 0.9 self.teardown_appcontext_funcs = [] #: A dictionary with lists of functions that can be used as URL #: value processor functions. Whenever a URL is built these functions #: are called to modify the dictionary of values in place. The key #: `None` here is used for application wide #: callbacks, otherwise the key is the name of the blueprint. #: Each of these functions has the chance to modify the dictionary #: #: .. versionadded:: 0.7 self.url_value_preprocessors = {} #: A dictionary with lists of functions that can be used as URL value #: preprocessors. The key `None` here is used for application wide #: callbacks, otherwise the key is the name of the blueprint. #: Each of these functions has the chance to modify the dictionary #: of URL values before they are used as the keyword arguments of the #: view function. For each function registered this one should also #: provide a :meth:`url_defaults` function that adds the parameters #: automatically again that were removed that way. #: #: .. versionadded:: 0.7 self.url_default_functions = {} #: A dictionary with list of functions that are called without argument #: to populate the template context. The key of the dictionary is the #: name of the blueprint this function is active for, `None` for all #: requests. Each returns a dictionary that the template context is #: updated with. To register a function here, use the #: :meth:`context_processor` decorator. self.template_context_processors = { None: [_default_template_ctx_processor] } #: all the attached blueprints in a dictionary by name. Blueprints #: can be attached multiple times so this dictionary does not tell #: you how often they got attached. #: #: .. versionadded:: 0.7 self.blueprints = {} #: a place where extensions can store application specific state. For #: example this is where an extension could store database engines and #: similar things. For backwards compatibility extensions should register #: themselves like this:: #: #: if not hasattr(app, 'extensions'): #: app.extensions = {} #: app.extensions['extensionname'] = SomeObject() #: #: The key must match the name of the `flaskext` module. For example in #: case of a "Flask-Foo" extension in `flaskext.foo`, the key would be #: ``'foo'``. #: #: .. versionadded:: 0.7 self.extensions = {} #: The :class:`~werkzeug.routing.Map` for this instance. You can use #: this to change the routing converters after the class was created #: but before any routes are connected. Example:: #: #: from werkzeug.routing import BaseConverter #: #: class ListConverter(BaseConverter): #: def to_python(self, value): #: return value.split(',') #: def to_url(self, values): #: return ','.join(BaseConverter.to_url(value) #: for value in values) #: #: app = Flask(__name__) #: app.url_map.converters['list'] = ListConverter self.url_map = Map() # tracks internally if the application already handled at least one # request. self._got_first_request = False self._before_request_lock = Lock() # register the static folder for the application. Do that even # if the folder does not exist. First of all it might be created # while the server is running (usually happens during development) # but also because google appengine stores static files somewhere # else when mapped with the .yml file. if self.has_static_folder: self.add_url_rule(self.static_url_path + '/<path:filename>', endpoint='static', view_func=self.send_static_file) def _get_error_handlers(self): from warnings import warn warn(DeprecationWarning('error_handlers is deprecated, use the ' 'new error_handler_spec attribute instead.'), stacklevel=1) return self._error_handlers def _set_error_handlers(self, value): self._error_handlers = value self.error_handler_spec[None] = value error_handlers = property(_get_error_handlers, _set_error_handlers) del _get_error_handlers, _set_error_handlers @locked_cached_property def name(self): """The name of the application. This is usually the import name with the difference that it's guessed from the run file if the import name is main. This name is used as a display name when Flask needs the name of the application. It can be set and overridden to change the value. .. versionadded:: 0.8 """ if self.import_name == '__main__': fn = getattr(sys.modules['__main__'], '__file__', None) if fn is None: return '__main__' return os.path.splitext(os.path.basename(fn))[0] return self.import_name @property def propagate_exceptions(self): """Returns the value of the `PROPAGATE_EXCEPTIONS` configuration value in case it's set, otherwise a sensible default is returned. .. versionadded:: 0.7 """ rv = self.config['PROPAGATE_EXCEPTIONS'] if rv is not None: return rv return self.testing or self.debug @property def preserve_context_on_exception(self): """Returns the value of the `PRESERVE_CONTEXT_ON_EXCEPTION` configuration value in case it's set, otherwise a sensible default is returned. .. versionadded:: 0.7 """ rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION'] if rv is not None: return rv return self.debug @property def logger(self): """A :class:`logging.Logger` object for this application. The default configuration is to log to stderr if the application is in debug mode. This logger can be used to (surprise) log messages. Here some examples:: app.logger.debug('A value for debugging') app.logger.warning('A warning occurred (%d apples)', 42) app.logger.error('An error occurred') .. versionadded:: 0.3 """ if self._logger and self._logger.name == self.logger_name: return self._logger with _logger_lock: if self._logger and self._logger.name == self.logger_name: return self._logger from flask.logging import create_logger self._logger = rv = create_logger(self) return rv @locked_cached_property def jinja_env(self): """The Jinja2 environment used to load templates.""" return self.create_jinja_environment() @property def got_first_request(self): """This attribute is set to `True` if the application started handling the first request. .. versionadded:: 0.8 """ return self._got_first_request def make_config(self, instance_relative=False): """Used to create the config attribute by the Flask constructor. The `instance_relative` parameter is passed in from the constructor of Flask (there named `instance_relative_config`) and indicates if the config should be relative to the instance path or the root path of the application. .. versionadded:: 0.8 """ root_path = self.root_path if instance_relative: root_path = self.instance_path return Config(root_path, self.default_config) def auto_find_instance_path(self): """Tries to locate the instance path if it was not provided to the constructor of the application class. It will basically calculate the path to a folder named ``instance`` next to your main file or the package. .. versionadded:: 0.8 """ prefix, package_path = find_package(self.import_name) if prefix is None: return os.path.join(package_path, 'instance') return os.path.join(prefix, 'var', self.name + '-instance') def open_instance_resource(self, resource, mode='rb'): """Opens a resource from the application's instance folder (:attr:`instance_path`). Otherwise works like :meth:`open_resource`. Instance resources can also be opened for writing. :param resource: the name of the resource. To access resources within subfolders use forward slashes as separator. :param mode: resource file opening mode, default is 'rb'. """ return open(os.path.join(self.instance_path, resource), mode) def create_jinja_environment(self): """Creates the Jinja2 environment based on :attr:`jinja_options` and :meth:`select_jinja_autoescape`. Since 0.7 this also adds the Jinja2 globals and filters after initialization. Override this function to customize the behavior. .. versionadded:: 0.5 """ options = dict(self.jinja_options) if 'autoescape' not in options: options['autoescape'] = self.select_jinja_autoescape rv = Environment(self, **options) rv.globals.update( url_for=url_for, get_flashed_messages=get_flashed_messages, config=self.config, # request, session and g are normally added with the # context processor for efficiency reasons but for imported # templates we also want the proxies in there. request=request, session=session, g=g ) rv.filters['tojson'] = json.tojson_filter return rv def create_global_jinja_loader(self): """Creates the loader for the Jinja2 environment. Can be used to override just the loader and keeping the rest unchanged. It's discouraged to override this function. Instead one should override the :meth:`jinja_loader` function instead. The global loader dispatches between the loaders of the application and the individual blueprints. .. versionadded:: 0.7 """ return DispatchingJinjaLoader(self) def init_jinja_globals(self): """Deprecated. Used to initialize the Jinja2 globals. .. versionadded:: 0.5 .. versionchanged:: 0.7 This method is deprecated with 0.7. Override :meth:`create_jinja_environment` instead. """ def select_jinja_autoescape(self, filename): """Returns `True` if autoescaping should be active for the given template name. .. versionadded:: 0.5 """ if filename is None: return False return filename.endswith(('.html', '.htm', '.xml', '.xhtml')) def update_template_context(self, context): """Update the template context with some commonly used variables. This injects request, session, config and g into the template context as well as everything template context processors want to inject. Note that the as of Flask 0.6, the original values in the context will not be overridden if a context processor decides to return a value with the same key. :param context: the context as a dictionary that is updated in place to add extra variables. """ funcs = self.template_context_processors[None] reqctx = _request_ctx_stack.top if reqctx is not None: bp = reqctx.request.blueprint if bp is not None and bp in self.template_context_processors: funcs = chain(funcs, self.template_context_processors[bp]) orig_ctx = context.copy() for func in funcs: context.update(func()) # make sure the original values win. This makes it possible to # easier add new variables in context processors without breaking # existing views. context.update(orig_ctx) def run(self, host=None, port=None, debug=None, **options): """Runs the application on a local development server. If the :attr:`debug` flag is set the server will automatically reload for code changes and show a debugger in case an exception happened. If you want to run the application in debug mode, but disable the code execution on the interactive debugger, you can pass ``use_evalex=False`` as parameter. This will keep the debugger's traceback screen active, but disable code execution. .. admonition:: Keep in Mind Flask will suppress any server error with a generic error page unless it is in debug mode. As such to enable just the interactive debugger without the code reloading, you have to invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. Setting ``use_debugger`` to `True` without being in debug mode won't catch any exceptions because there won't be any to catch. .. versionchanged:: 0.10 The default port is now picked from the ``SERVER_NAME`` variable. :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to have the server available externally as well. Defaults to ``'127.0.0.1'``. :param port: the port of the webserver. Defaults to ``5000`` or the port defined in the ``SERVER_NAME`` config variable if present. :param debug: if given, enable or disable debug mode. See :attr:`debug`. :param options: the options to be forwarded to the underlying Werkzeug server. See :func:`werkzeug.serving.run_simple` for more information. """ from werkzeug.serving import run_simple if host is None: host = '127.0.0.1' if port is None: server_name = self.config['SERVER_NAME'] if server_name and ':' in server_name: port = int(server_name.rsplit(':', 1)[1]) else: port = 5000 if debug is not None: self.debug = bool(debug) options.setdefault('use_reloader', self.debug) options.setdefault('use_debugger', self.debug) try: run_simple(host, port, self, **options) finally: # reset the first request information if the development server # resetted normally. This makes it possible to restart the server # without reloader and that stuff from an interactive shell. self._got_first_request = False def test_client(self, use_cookies=True): """Creates a test client for this application. For information about unit testing head over to :ref:`testing`. Note that if you are testing for assertions or exceptions in your application code, you must set ``app.testing = True`` in order for the exceptions to propagate to the test client. Otherwise, the exception will be handled by the application (not visible to the test client) and the only indication of an AssertionError or other exception will be a 500 status code response to the test client. See the :attr:`testing` attribute. For example:: app.testing = True client = app.test_client() The test client can be used in a `with` block to defer the closing down of the context until the end of the `with` block. This is useful if you want to access the context locals for testing:: with app.test_client() as c: rv = c.get('/?vodka=42') assert request.args['vodka'] == '42' See :class:`~flask.testing.FlaskClient` for more information. .. versionchanged:: 0.4 added support for `with` block usage for the client. .. versionadded:: 0.7 The `use_cookies` parameter was added as well as the ability to override the client to be used by setting the :attr:`test_client_class` attribute. """ cls = self.test_client_class if cls is None: from flask.testing import FlaskClient as cls return cls(self, self.response_class, use_cookies=use_cookies) def open_session(self, request): """Creates or opens a new session. Default implementation stores all session data in a signed cookie. This requires that the :attr:`secret_key` is set. Instead of overriding this method we recommend replacing the :class:`session_interface`. :param request: an instance of :attr:`request_class`. """ return self.session_interface.open_session(self, request) def save_session(self, session, response): """Saves the session if it needs updates. For the default implementation, check :meth:`open_session`. Instead of overriding this method we recommend replacing the :class:`session_interface`. :param session: the session to be saved (a :class:`~werkzeug.contrib.securecookie.SecureCookie` object) :param response: an instance of :attr:`response_class` """ return self.session_interface.save_session(self, session, response) def make_null_session(self): """Creates a new instance of a missing session. Instead of overriding this method we recommend replacing the :class:`session_interface`. .. versionadded:: 0.7 """ return self.session_interface.make_null_session(self) def register_module(self, module, **options): """Registers a module with this application. The keyword argument of this function are the same as the ones for the constructor of the :class:`Module` class and will override the values of the module if provided. .. versionchanged:: 0.7 The module system was deprecated in favor for the blueprint system. """ assert blueprint_is_module(module), 'register_module requires ' \ 'actual module objects. Please upgrade to blueprints though.' if not self.enable_modules: raise RuntimeError('Module support was disabled but code ' 'attempted to register a module named %r' % module) else: from warnings import warn warn(DeprecationWarning('Modules are deprecated. Upgrade to ' 'using blueprints. Have a look into the documentation for ' 'more information. If this module was registered by a ' 'Flask-Extension upgrade the extension or contact the author ' 'of that extension instead. (Registered %r)' % module), stacklevel=2) self.register_blueprint(module, **options) @setupmethod def register_blueprint(self, blueprint, **options): """Registers a blueprint on the application. .. versionadded:: 0.7 """ first_registration = False if blueprint.name in self.blueprints: assert self.blueprints[blueprint.name] is blueprint, \ 'A blueprint\'s name collision occurred between %r and ' \ '%r. Both share the same name "%s". Blueprints that ' \ 'are created on the fly need unique names.' % \ (blueprint, self.blueprints[blueprint.name], blueprint.name) else: self.blueprints[blueprint.name] = blueprint first_registration = True blueprint.register(self, options, first_registration) @setupmethod def add_url_rule(self, rule, endpoint=None, view_func=None, **options): """Connects a URL rule. Works exactly like the :meth:`route` decorator. If a view_func is provided it will be registered with the endpoint. Basically this example:: @app.route('/') def index(): pass Is equivalent to the following:: def index(): pass app.add_url_rule('/', 'index', index) If the view_func is not provided you will need to connect the endpoint to a view function like so:: app.view_functions['index'] = index Internally :meth:`route` invokes :meth:`add_url_rule` so if you want to customize the behavior via subclassing you only need to change this method. For more information refer to :ref:`url-route-registrations`. .. versionchanged:: 0.2 `view_func` parameter added. .. versionchanged:: 0.6 `OPTIONS` is added automatically as method. :param rule: the URL rule as string :param endpoint: the endpoint for the registered URL rule. Flask itself assumes the name of the view function as endpoint :param view_func: the function to call when serving a request to the provided endpoint :param options: the options to be forwarded to the underlying :class:`~werkzeug.routing.Rule` object. A change to Werkzeug is handling of method options. methods is a list of methods this rule should be limited to (`GET`, `POST` etc.). By default a rule just listens for `GET` (and implicitly `HEAD`). Starting with Flask 0.6, `OPTIONS` is implicitly added and handled by the standard request handling. """ if endpoint is None: endpoint = _endpoint_from_view_func(view_func) options['endpoint'] = endpoint methods = options.pop('methods', None) # if the methods are not given and the view_func object knows its # methods we can use that instead. If neither exists, we go with # a tuple of only `GET` as default. if methods is None: methods = getattr(view_func, 'methods', None) or ('GET',) methods = set(methods) # Methods that should always be added required_methods = set(getattr(view_func, 'required_methods', ())) # starting with Flask 0.8 the view_func object can disable and # force-enable the automatic options handling. provide_automatic_options = getattr(view_func, 'provide_automatic_options', None) if provide_automatic_options is None: if 'OPTIONS' not in methods: provide_automatic_options = True required_methods.add('OPTIONS') else: provide_automatic_options = False # Add the required methods now. methods |= required_methods # due to a werkzeug bug we need to make sure that the defaults are # None if they are an empty dictionary. This should not be necessary # with Werkzeug 0.7 options['defaults'] = options.get('defaults') or None rule = self.url_rule_class(rule, methods=methods, **options) rule.provide_automatic_options = provide_automatic_options self.url_map.add(rule) if view_func is not None: old_func = self.view_functions.get(endpoint) if old_func is not None and old_func != view_func: raise AssertionError('View function mapping is overwriting an ' 'existing endpoint function: %s' % endpoint) self.view_functions[endpoint] = view_func def route(self, rule, **options): """A decorator that is used to register a view function for a given URL rule. This does the same thing as :meth:`add_url_rule` but is intended for decorator usage:: @app.route('/') def index(): return 'Hello World' For more information refer to :ref:`url-route-registrations`. :param rule: the URL rule as string :param endpoint: the endpoint for the registered URL rule. Flask itself assumes the name of the view function as endpoint :param options: the options to be forwarded to the underlying :class:`~werkzeug.routing.Rule` object. A change to Werkzeug is handling of method options. methods is a list of methods this rule should be limited to (`GET`, `POST` etc.). By default a rule just listens for `GET` (and implicitly `HEAD`). Starting with Flask 0.6, `OPTIONS` is implicitly added and handled by the standard request handling. """ def decorator(f): endpoint = options.pop('endpoint', None) self.add_url_rule(rule, endpoint, f, **options) return f return decorator @setupmethod def endpoint(self, endpoint): """A decorator to register a function as an endpoint. Example:: @app.endpoint('example.endpoint') def example(): return "example" :param endpoint: the name of the endpoint """ def decorator(f): self.view_functions[endpoint] = f return f return decorator @setupmethod def errorhandler(self, code_or_exception): """A decorator that is used to register a function give a given error code. Example:: @app.errorhandler(404) def page_not_found(error): return 'This page does not exist', 404 You can also register handlers for arbitrary exceptions:: @app.errorhandler(DatabaseError) def special_exception_handler(error): return 'Database connection failed', 500 You can also register a function as error handler without using the :meth:`errorhandler` decorator. The following example is equivalent to the one above:: def page_not_found(error): return 'This page does not exist', 404 app.error_handler_spec[None][404] = page_not_found Setting error handlers via assignments to :attr:`error_handler_spec` however is discouraged as it requires fiddling with nested dictionaries and the special case for arbitrary exception types. The first `None` refers to the active blueprint. If the error handler should be application wide `None` shall be used. .. versionadded:: 0.7 One can now additionally also register custom exception types that do not necessarily have to be a subclass of the :class:`~werkzeug.exceptions.HTTPException` class. :param code: the code as integer for the handler """ def decorator(f): self._register_error_handler(None, code_or_exception, f) return f return decorator def register_error_handler(self, code_or_exception, f): """Alternative error attach function to the :meth:`errorhandler` decorator that is more straightforward to use for non decorator usage. .. versionadded:: 0.7 """ self._register_error_handler(None, code_or_exception, f) @setupmethod def _register_error_handler(self, key, code_or_exception, f): if isinstance(code_or_exception, HTTPException): code_or_exception = code_or_exception.code if isinstance(code_or_exception, integer_types): assert code_or_exception != 500 or key is None, \ 'It is currently not possible to register a 500 internal ' \ 'server error on a per-blueprint level.' self.error_handler_spec.setdefault(key, {})[code_or_exception] = f else: self.error_handler_spec.setdefault(key, {}).setdefault(None, []) \ .append((code_or_exception, f)) @setupmethod def template_filter(self, name=None): """A decorator that is used to register custom template filter. You can specify a name for the filter, otherwise the function name will be used. Example:: @app.template_filter() def reverse(s): return s[::-1] :param name: the optional name of the filter, otherwise the function name will be used. """ def decorator(f): self.add_template_filter(f, name=name) return f return decorator @setupmethod def add_template_filter(self, f, name=None): """Register a custom template filter. Works exactly like the :meth:`template_filter` decorator. :param name: the optional name of the filter, otherwise the function name will be used. """ self.jinja_env.filters[name or f.__name__] = f @setupmethod def template_test(self, name=None): """A decorator that is used to register custom template test. You can specify a name for the test, otherwise the function name will be used. Example:: @app.template_test() def is_prime(n): if n == 2: return True for i in range(2, int(math.ceil(math.sqrt(n))) + 1): if n % i == 0: return False return True .. versionadded:: 0.10 :param name: the optional name of the test, otherwise the function name will be used. """ def decorator(f): self.add_template_test(f, name=name) return f return decorator @setupmethod def add_template_test(self, f, name=None): """Register a custom template test. Works exactly like the :meth:`template_test` decorator. .. versionadded:: 0.10 :param name: the optional name of the test, otherwise the function name will be used. """ self.jinja_env.tests[name or f.__name__] = f @setupmethod def template_global(self, name=None): """A decorator that is used to register a custom template global function. You can specify a name for the global function, otherwise the function name will be used. Example:: @app.template_global() def double(n): return 2 * n .. versionadded:: 0.10 :param name: the optional name of the global function, otherwise the function name will be used. """ def decorator(f): self.add_template_global(f, name=name) return f return decorator @setupmethod def add_template_global(self, f, name=None): """Register a custom template global function. Works exactly like the :meth:`template_global` decorator. .. versionadded:: 0.10 :param name: the optional name of the global function, otherwise the function name will be used. """ self.jinja_env.globals[name or f.__name__] = f @setupmethod def before_request(self, f): """Registers a function to run before each request.""" self.before_request_funcs.setdefault(None, []).append(f) return f @setupmethod def before_first_request(self, f): """Registers a function to be run before the first request to this instance of the application. .. versionadded:: 0.8 """ self.before_first_request_funcs.append(f) @setupmethod def after_request(self, f): """Register a function to be run after each request. Your function must take one parameter, a :attr:`response_class` object and return a new response object or the same (see :meth:`process_response`). As of Flask 0.7 this function might not be executed at the end of the request in case an unhandled exception occurred. """ self.after_request_funcs.setdefault(None, []).append(f) return f @setupmethod def teardown_request(self, f): """Register a function to be run at the end of each request, regardless of whether there was an exception or not. These functions are executed when the request context is popped, even if not an actual request was performed. Example:: ctx = app.test_request_context() ctx.push() ... ctx.pop() When ``ctx.pop()`` is executed in the above example, the teardown functions are called just before the request context moves from the stack of active contexts. This becomes relevant if you are using such constructs in tests. Generally teardown functions must take every necessary step to avoid that they will fail. If they do execute code that might fail they will have to surround the execution of these code by try/except statements and log occurring errors. When a teardown function was called because of a exception it will be passed an error object. .. admonition:: Debug Note In debug mode Flask will not tear down a request on an exception immediately. Instead if will keep it alive so that the interactive debugger can still access it. This behavior can be controlled by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. """ self.teardown_request_funcs.setdefault(None, []).append(f) return f @setupmethod def teardown_appcontext(self, f): """Registers a function to be called when the application context ends. These functions are typically also called when the request context is popped. Example:: ctx = app.app_context() ctx.push() ... ctx.pop() When ``ctx.pop()`` is executed in the above example, the teardown functions are called just before the app context moves from the stack of active contexts. This becomes relevant if you are using such constructs in tests. Since a request context typically also manages an application context it would also be called when you pop a request context. When a teardown function was called because of an exception it will be passed an error object. .. versionadded:: 0.9 """ self.teardown_appcontext_funcs.append(f) return f @setupmethod def context_processor(self, f): """Registers a template context processor function.""" self.template_context_processors[None].append(f) return f @setupmethod def url_value_preprocessor(self, f): """Registers a function as URL value preprocessor for all view functions of the application. It's called before the view functions are called and can modify the url values provided. """ self.url_value_preprocessors.setdefault(None, []).append(f) return f @setupmethod def url_defaults(self, f): """Callback function for URL defaults for all view functions of the application. It's called with the endpoint and values and should update the values passed in place. """ self.url_default_functions.setdefault(None, []).append(f) return f def handle_http_exception(self, e): """Handles an HTTP exception. By default this will invoke the registered error handlers and fall back to returning the exception as response. .. versionadded:: 0.3 """ handlers = self.error_handler_spec.get(request.blueprint) # Proxy exceptions don't have error codes. We want to always return # those unchanged as errors if e.code is None: return e if handlers and e.code in handlers: handler = handlers[e.code] else: handler = self.error_handler_spec[None].get(e.code) if handler is None: return e return handler(e) def trap_http_exception(self, e): """Checks if an HTTP exception should be trapped or not. By default this will return `False` for all exceptions except for a bad request key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to `True`. It also returns `True` if ``TRAP_HTTP_EXCEPTIONS`` is set to `True`. This is called for all HTTP exceptions raised by a view function. If it returns `True` for any exception the error handler for this exception is not called and it shows up as regular exception in the traceback. This is helpful for debugging implicitly raised HTTP exceptions. .. versionadded:: 0.8 """ if self.config['TRAP_HTTP_EXCEPTIONS']: return True if self.config['TRAP_BAD_REQUEST_ERRORS']: return isinstance(e, BadRequest) return False def handle_user_exception(self, e): """This method is called whenever an exception occurs that should be handled. A special case are :class:`~werkzeug.exception.HTTPException`\s which are forwarded by this function to the :meth:`handle_http_exception` method. This function will either return a response value or reraise the exception with the same traceback. .. versionadded:: 0.7 """ exc_type, exc_value, tb = sys.exc_info() assert exc_value is e # ensure not to trash sys.exc_info() at that point in case someone # wants the traceback preserved in handle_http_exception. Of course # we cannot prevent users from trashing it themselves in a custom # trap_http_exception method so that's their fault then. if isinstance(e, HTTPException) and not self.trap_http_exception(e): return self.handle_http_exception(e) blueprint_handlers = () handlers = self.error_handler_spec.get(request.blueprint) if handlers is not None: blueprint_handlers = handlers.get(None, ()) app_handlers = self.error_handler_spec[None].get(None, ()) for typecheck, handler in chain(blueprint_handlers, app_handlers): if isinstance(e, typecheck): return handler(e) reraise(exc_type, exc_value, tb) def handle_exception(self, e): """Default exception handling that kicks in when an exception occurs that is not caught. In debug mode the exception will be re-raised immediately, otherwise it is logged and the handler for a 500 internal server error is used. If no such handler exists, a default 500 internal server error message is displayed. .. versionadded:: 0.3 """ exc_type, exc_value, tb = sys.exc_info() got_request_exception.send(self, exception=e) handler = self.error_handler_spec[None].get(500) if self.propagate_exceptions: # if we want to repropagate the exception, we can attempt to # raise it with the whole traceback in case we can do that # (the function was actually called from the except part) # otherwise, we just raise the error again if exc_value is e: reraise(exc_type, exc_value, tb) else: raise e self.log_exception((exc_type, exc_value, tb)) if handler is None: return InternalServerError() return handler(e) def log_exception(self, exc_info): """Logs an exception. This is called by :meth:`handle_exception` if debugging is disabled and right before the handler is called. The default implementation logs the exception as error on the :attr:`logger`. .. versionadded:: 0.8 """ self.logger.error('Exception on %s [%s]' % ( request.path, request.method ), exc_info=exc_info) def raise_routing_exception(self, request): """Exceptions that are recording during routing are reraised with this method. During debug we are not reraising redirect requests for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising a different error instead to help debug situations. :internal: """ if not self.debug \ or not isinstance(request.routing_exception, RequestRedirect) \ or request.method in ('GET', 'HEAD', 'OPTIONS'): raise request.routing_exception from .debughelpers import FormDataRoutingRedirect raise FormDataRoutingRedirect(request) def dispatch_request(self): """Does the request dispatching. Matches the URL and returns the return value of the view or error handler. This does not have to be a response object. In order to convert the return value to a proper response object, call :func:`make_response`. .. versionchanged:: 0.7 This no longer does the exception handling, this code was moved to the new :meth:`full_dispatch_request`. """ req = _request_ctx_stack.top.request if req.routing_exception is not None: self.raise_routing_exception(req) rule = req.url_rule # if we provide automatic options for this URL and the # request came with the OPTIONS method, reply automatically if getattr(rule, 'provide_automatic_options', False) \ and req.method == 'OPTIONS': return self.make_default_options_response() # otherwise dispatch to the handler for that endpoint return self.view_functions[rule.endpoint](**req.view_args) def full_dispatch_request(self): """Dispatches the request and on top of that performs request pre and postprocessing as well as HTTP exception catching and error handling. .. versionadded:: 0.7 """ self.try_trigger_before_first_request_functions() try: request_started.send(self) rv = self.preprocess_request() if rv is None: rv = self.dispatch_request() except Exception as e: rv = self.handle_user_exception(e) response = self.make_response(rv) response = self.process_response(response) request_finished.send(self, response=response) return response def try_trigger_before_first_request_functions(self): """Called before each request and will ensure that it triggers the :attr:`before_first_request_funcs` and only exactly once per application instance (which means process usually). :internal: """ if self._got_first_request: return with self._before_request_lock: if self._got_first_request: return self._got_first_request = True for func in self.before_first_request_funcs: func() def make_default_options_response(self): """This method is called to create the default `OPTIONS` response. This can be changed through subclassing to change the default behavior of `OPTIONS` responses. .. versionadded:: 0.7 """ adapter = _request_ctx_stack.top.url_adapter if hasattr(adapter, 'allowed_methods'): methods = adapter.allowed_methods() else: # fallback for Werkzeug < 0.7 methods = [] try: adapter.match(method='--') except MethodNotAllowed as e: methods = e.valid_methods except HTTPException as e: pass rv = self.response_class() rv.allow.update(methods) return rv def should_ignore_error(self, error): """This is called to figure out if an error should be ignored or not as far as the teardown system is concerned. If this function returns `True` then the teardown handlers will not be passed the error. .. versionadded:: 0.10 """ return False def make_response(self, rv): """Converts the return value from a view function to a real response object that is an instance of :attr:`response_class`. The following types are allowed for `rv`: .. tabularcolumns:: |p{3.5cm}|p{9.5cm}| ======================= =========================================== :attr:`response_class` the object is returned unchanged :class:`str` a response object is created with the string as body :class:`unicode` a response object is created with the string encoded to utf-8 as body a WSGI function the function is called as WSGI application and buffered as response object :class:`tuple` A tuple in the form ``(response, status, headers)`` where `response` is any of the types defined here, `status` is a string or an integer and `headers` is a list of a dictionary with header values. ======================= =========================================== :param rv: the return value from the view function .. versionchanged:: 0.9 Previously a tuple was interpreted as the arguments for the response object. """ status = headers = None if isinstance(rv, tuple): rv, status, headers = rv + (None,) * (3 - len(rv)) if rv is None: raise ValueError('View function did not return a response') if not isinstance(rv, self.response_class): # When we create a response object directly, we let the constructor # set the headers and status. We do this because there can be # some extra logic involved when creating these objects with # specific values (like default content type selection). if isinstance(rv, (text_type, bytes, bytearray)): rv = self.response_class(rv, headers=headers, status=status) headers = status = None else: rv = self.response_class.force_type(rv, request.environ) if status is not None: if isinstance(status, string_types): rv.status = status else: rv.status_code = status if headers: rv.headers.extend(headers) return rv def create_url_adapter(self, request): """Creates a URL adapter for the given request. The URL adapter is created at a point where the request context is not yet set up so the request is passed explicitly. .. versionadded:: 0.6 .. versionchanged:: 0.9 This can now also be called without a request object when the URL adapter is created for the application context. """ if request is not None: return self.url_map.bind_to_environ(request.environ, server_name=self.config['SERVER_NAME']) # We need at the very least the server name to be set for this # to work. if self.config['SERVER_NAME'] is not None: return self.url_map.bind( self.config['SERVER_NAME'], script_name=self.config['APPLICATION_ROOT'] or '/', url_scheme=self.config['PREFERRED_URL_SCHEME']) def inject_url_defaults(self, endpoint, values): """Injects the URL defaults for the given endpoint directly into the values dictionary passed. This is used internally and automatically called on URL building. .. versionadded:: 0.7 """ funcs = self.url_default_functions.get(None, ()) if '.' in endpoint: bp = endpoint.rsplit('.', 1)[0] funcs = chain(funcs, self.url_default_functions.get(bp, ())) for func in funcs: func(endpoint, values) def handle_url_build_error(self, error, endpoint, values): """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. """ exc_type, exc_value, tb = sys.exc_info() for handler in self.url_build_error_handlers: try: rv = handler(error, endpoint, values) if rv is not None: return rv except BuildError as error: pass # At this point we want to reraise the exception. If the error is # still the same one we can reraise it with the original traceback, # otherwise we raise it from here. if error is exc_value: reraise(exc_type, exc_value, tb) raise error def preprocess_request(self): """Called before the actual request dispatching and will call every as :meth:`before_request` decorated function. If any of these function returns a value it's handled as if it was the return value from the view and further request handling is stopped. This also triggers the :meth:`url_value_processor` functions before the actual :meth:`before_request` functions are called. """ bp = _request_ctx_stack.top.request.blueprint funcs = self.url_value_preprocessors.get(None, ()) if bp is not None and bp in self.url_value_preprocessors: funcs = chain(funcs, self.url_value_preprocessors[bp]) for func in funcs: func(request.endpoint, request.view_args) funcs = self.before_request_funcs.get(None, ()) if bp is not None and bp in self.before_request_funcs: funcs = chain(funcs, self.before_request_funcs[bp]) for func in funcs: rv = func() if rv is not None: return rv def process_response(self, response): """Can be overridden in order to modify the response object before it's sent to the WSGI server. By default this will call all the :meth:`after_request` decorated functions. .. versionchanged:: 0.5 As of Flask 0.5 the functions registered for after request execution are called in reverse order of registration. :param response: a :attr:`response_class` object. :return: a new response object or the same, has to be an instance of :attr:`response_class`. """ ctx = _request_ctx_stack.top bp = ctx.request.blueprint funcs = ctx._after_request_functions if bp is not None and bp in self.after_request_funcs: funcs = chain(funcs, reversed(self.after_request_funcs[bp])) if None in self.after_request_funcs: funcs = chain(funcs, reversed(self.after_request_funcs[None])) for handler in funcs: response = handler(response) if not self.session_interface.is_null_session(ctx.session): self.save_session(ctx.session, response) return response def do_teardown_request(self, exc=None): """Called after the actual request dispatching and will call every as :meth:`teardown_request` decorated function. This is not actually called by the :class:`Flask` object itself but is always triggered when the request context is popped. That way we have a tighter control over certain resources under testing environments. .. versionchanged:: 0.9 Added the `exc` argument. Previously this was always using the current exception information. """ if exc is None: exc = sys.exc_info()[1] funcs = reversed(self.teardown_request_funcs.get(None, ())) bp = _request_ctx_stack.top.request.blueprint if bp is not None and bp in self.teardown_request_funcs: funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) for func in funcs: rv = func(exc) request_tearing_down.send(self, exc=exc) def do_teardown_appcontext(self, exc=None): """Called when an application context is popped. This works pretty much the same as :meth:`do_teardown_request` but for the application context. .. versionadded:: 0.9 """ if exc is None: exc = sys.exc_info()[1] for func in reversed(self.teardown_appcontext_funcs): func(exc) appcontext_tearing_down.send(self, exc=exc) def app_context(self): """Binds the application only. For as long as the application is bound to the current context the :data:`flask.current_app` points to that application. An application context is automatically created when a request context is pushed if necessary. Example usage:: with app.app_context(): ... .. versionadded:: 0.9 """ return AppContext(self) def request_context(self, environ): """Creates a :class:`~flask.ctx.RequestContext` from the given environment and binds it to the current context. This must be used in combination with the `with` statement because the request is only bound to the current context for the duration of the `with` block. Example usage:: with app.request_context(environ): do_something_with(request) The object returned can also be used without the `with` statement which is useful for working in the shell. The example above is doing exactly the same as this code:: ctx = app.request_context(environ) ctx.push() try: do_something_with(request) finally: ctx.pop() .. versionchanged:: 0.3 Added support for non-with statement usage and `with` statement is now passed the ctx object. :param environ: a WSGI environment """ return RequestContext(self, environ) def test_request_context(self, *args, **kwargs): """Creates a WSGI environment from the given values (see :func:`werkzeug.test.EnvironBuilder` for more information, this function accepts the same arguments). """ from flask.testing import make_test_environ_builder builder = make_test_environ_builder(self, *args, **kwargs) try: return self.request_context(builder.get_environ()) finally: builder.close() def wsgi_app(self, environ, start_response): """The actual WSGI application. This is not implemented in `__call__` so that middlewares can be applied without losing a reference to the class. So instead of doing this:: app = MyMiddleware(app) It's a better idea to do this instead:: app.wsgi_app = MyMiddleware(app.wsgi_app) Then you still have the original application object around and can continue to call methods on it. .. versionchanged:: 0.7 The behavior of the before and after request callbacks was changed under error conditions and a new callback was added that will always execute at the end of the request, independent on if an error occurred or not. See :ref:`callbacks-and-errors`. :param environ: a WSGI environment :param start_response: a callable accepting a status code, a list of headers and an optional exception context to start the response """ ctx = self.request_context(environ) ctx.push() error = None try: try: response = self.full_dispatch_request() except Exception as e: error = e response = self.make_response(self.handle_exception(e)) return response(environ, start_response) finally: if self.should_ignore_error(error): error = None ctx.auto_pop(error) @property def modules(self): from warnings import warn warn(DeprecationWarning('Flask.modules is deprecated, use ' 'Flask.blueprints instead'), stacklevel=2) return self.blueprints def __call__(self, environ, start_response): """Shortcut for :attr:`wsgi_app`.""" return self.wsgi_app(environ, start_response) def __repr__(self): return '<%s %r>' % ( self.__class__.__name__, self.name, )
hoangt/tpzsimul.gem5
refs/heads/master
src/arch/x86/isa/insts/simd128/floating_point/logical/andp.py
91
# Copyright (c) 2007 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop ANDPS_XMM_XMM { mand xmml, xmml, xmmlm mand xmmh, xmmh, xmmhm }; def macroop ANDPS_XMM_M { lea t1, seg, sib, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mand xmml, xmml, ufp1 mand xmmh, xmmh, ufp2 }; def macroop ANDPS_XMM_P { rdip t7 lea t1, seg, riprel, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mand xmml, xmml, ufp1 mand xmmh, xmmh, ufp2 }; def macroop ANDPD_XMM_XMM { mand xmml, xmml, xmmlm mand xmmh, xmmh, xmmhm }; def macroop ANDPD_XMM_M { lea t1, seg, sib, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mand xmml, xmml, ufp1 mand xmmh, xmmh, ufp2 }; def macroop ANDPD_XMM_P { rdip t7 lea t1, seg, riprel, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mand xmml, xmml, ufp1 mand xmmh, xmmh, ufp2 }; def macroop ANDNPS_XMM_XMM { mandn xmml, xmml, xmmlm mandn xmmh, xmmh, xmmhm }; def macroop ANDNPS_XMM_M { lea t1, seg, sib, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mandn xmml, xmml, ufp1 mandn xmmh, xmmh, ufp2 }; def macroop ANDNPS_XMM_P { rdip t7 lea t1, seg, riprel, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mandn xmml, xmml, ufp1 mandn xmmh, xmmh, ufp2 }; def macroop ANDNPD_XMM_XMM { mandn xmml, xmml, xmmlm mandn xmmh, xmmh, xmmhm }; def macroop ANDNPD_XMM_M { lea t1, seg, sib, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mandn xmml, xmml, ufp1 mandn xmmh, xmmh, ufp2 }; def macroop ANDNPD_XMM_P { rdip t7 lea t1, seg, riprel, disp, dataSize=asz ldfp ufp1, seg, [1, t0, t1], dataSize=8 ldfp ufp2, seg, [1, t0, t1], 8, dataSize=8 mandn xmml, xmml, ufp1 mandn xmmh, xmmh, ufp2 }; '''
opendaylight/netvirt
refs/heads/master
resources/tools/odltools/odltools/mdsal/tests/test_cmd.py
1
import logging import os import shutil import unittest from odltools import logg from odltools.mdsal import cmd from odltools.mdsal.tests import Args @unittest.skip("skipping") class TestCmd(unittest.TestCase): def setUp(self): logg.Logger(logging.INFO, logging.INFO) self.args = Args(path="/tmp/testmodels", pretty_print=True) def test_get_all_dumps(self): # Ensure odl is running at localhost:8181 # Remove an existing directory if os.path.exists(self.args.path): if os.path.islink(self.args.path): os.unlink(self.args.path) else: shutil.rmtree(self.args.path) cmd.get_all_dumps(self.args) # assert each model has been saved to a file for res in cmd.DSMAP.itervalues(): store = res[cmd.DSM_DSTYPE] model_path = res[cmd.DSM_PATH] path_split = cmd.split_model_path(model_path) filename = cmd.make_filename(self.args.path, store, path_split.name, path_split.container) self.assertTrue(os.path.isfile(filename)) if __name__ == '__main__': unittest.main()
philsch/ansible
refs/heads/devel
lib/ansible/modules/cloud/ovirt/ovirt_clusters_facts.py
45
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_clusters_facts short_description: Retrieve facts about one or more oVirt/RHV clusters author: "Ondra Machacek (@machacekondra)" version_added: "2.3" description: - "Retrieve facts about one or more oVirt/RHV clusters." notes: - "This module creates a new top-level C(ovirt_clusters) fact, which contains a list of clusters." options: pattern: description: - "Search term which is accepted by oVirt/RHV search backend." - "For example to search cluster X from datacenter Y use following pattern: name=X and datacenter=Y" extends_documentation_fragment: ovirt_facts ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Gather facts about all clusters which names start with C<production>: - ovirt_clusters_facts: pattern: name=production* - debug: var: ovirt_clusters ''' RETURN = ''' ovirt_clusters: description: "List of dictionaries describing the clusters. Cluster attribues are mapped to dictionary keys, all clusters attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/cluster." returned: On success. type: list ''' import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( check_sdk, create_connection, get_dict_of_struct, ovirt_facts_full_argument_spec, ) def main(): argument_spec = ovirt_facts_full_argument_spec( pattern=dict(default='', required=False), ) module = AnsibleModule(argument_spec) check_sdk(module) try: auth = module.params.pop('auth') connection = create_connection(auth) clusters_service = connection.system_service().clusters_service() clusters = clusters_service.list(search=module.params['pattern']) module.exit_json( changed=False, ansible_facts=dict( ovirt_clusters=[ get_dict_of_struct( struct=c, connection=connection, fetch_nested=module.params.get('fetch_nested'), attributes=module.params.get('nested_attributes'), ) for c in clusters ], ), ) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == '__main__': main()
msebire/intellij-community
refs/heads/master
python/lib/Lib/nturl2path.py
155
"""Convert a NT pathname to a file URL and vice versa.""" def url2pathname(url): """OS-specific conversion from a relative URL of the 'file' scheme to a file system path; not recommended for general use.""" # e.g. # ///C|/foo/bar/spam.foo # becomes # C:\foo\bar\spam.foo import string, urllib # Windows itself uses ":" even in URLs. url = url.replace(':', '|') if not '|' in url: # No drive specifier, just convert slashes if url[:4] == '////': # path is something like ////host/path/on/remote/host # convert this to \\host\path\on\remote\host # (notice halving of slashes at the start of the path) url = url[2:] components = url.split('/') # make sure not to convert quoted slashes :-) return urllib.unquote('\\'.join(components)) comp = url.split('|') if len(comp) != 2 or comp[0][-1] not in string.ascii_letters: error = 'Bad URL: ' + url raise IOError, error drive = comp[0][-1].upper() components = comp[1].split('/') path = drive + ':' for comp in components: if comp: path = path + '\\' + urllib.unquote(comp) return path def pathname2url(p): """OS-specific conversion from a file system path to a relative URL of the 'file' scheme; not recommended for general use.""" # e.g. # C:\foo\bar\spam.foo # becomes # ///C|/foo/bar/spam.foo import urllib if not ':' in p: # No drive specifier, just convert slashes and quote the name if p[:2] == '\\\\': # path is something like \\host\path\on\remote\host # convert this to ////host/path/on/remote/host # (notice doubling of slashes at the start of the path) p = '\\\\' + p components = p.split('\\') return urllib.quote('/'.join(components)) comp = p.split(':') if len(comp) != 2 or len(comp[0]) > 1: error = 'Bad path: ' + p raise IOError, error drive = urllib.quote(comp[0].upper()) components = comp[1].split('\\') path = '///' + drive + '|' for comp in components: if comp: path = path + '/' + urllib.quote(comp) return path
timmahoney/ansible-modules-core
refs/heads/devel
system/ping.py
59
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2012, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: ping version_added: historical short_description: Try to connect to host and return C(pong) on success. description: - A trivial test module, this module always returns C(pong) on successful contact. It does not make sense in playbooks, but it is useful from C(/usr/bin/ansible) options: {} author: Michael DeHaan ''' EXAMPLES = ''' # Test 'webservers' status ansible webservers -m ping ''' import exceptions def main(): module = AnsibleModule( argument_spec = dict( data=dict(required=False, default=None), ), supports_check_mode = True ) result = dict(ping='pong') if module.params['data']: if module.params['data'] == 'crash': raise exceptions.Exception("boom") result['ping'] = module.params['data'] module.exit_json(**result) from ansible.module_utils.basic import * main()
SaikWolf/gnuradio
refs/heads/master
gr-fec/python/fec/qa_ber_bf.py
33
#!/usr/bin/env python # # Copyright 2014 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest, blocks import fec_swig as fec import numpy import copy class test_ber_bf(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block() def tearDown(self): self.tb = None def test_000(self): # Cause a single bit error out of 8*N bits # using streaming mode mode = False N = 10000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0x01 src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(1., N) # [numpy.log10(1.0 / (8.0 * N)), ] self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_001(self): # Cause a single bit error out of 8*N bits # using test mode mode = True N = 1000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0x01 src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode, 1) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(1., N) self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_002(self): # Cause 8 bit errors out of 8*N bits # using test mode mode = True N = 1000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0xFF src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode, 1, -2.0) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(8., N) self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_003(self): # Cause a 8 bit errors out of 8*N bits # using test mode # Exit if BER < -2.0 mode = True N = 1000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0xFF src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode, 10, -2.0) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = [-2.0, ] print data print expected_result self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_004(self): # Cause 16 consecutive bit errors out of 8*N bits # make sure bytes are only read once. # using streaming mode mode = False N = 10000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0xFF data1[1] ^= 0xFF src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(16, N) self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def log_ber(self, n_errors, N): return numpy.log10(1. * n_errors / (8.0 * N)), if __name__ == '__main__': gr_unittest.run(test_ber_bf, "test_ber_bf.xml")
domeger/SplunkTAforPuppetEnterprise
refs/heads/master
bin/splunktaforpuppetenterprise/mako/runtime.py
22
# mako/runtime.py # Copyright (C) 2006-2016 the Mako authors and contributors <see AUTHORS file> # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """provides runtime services for templates, including Context, Namespace, and various helper functions.""" from mako import exceptions, util, compat from mako.compat import compat_builtins import sys class Context(object): """Provides runtime namespace, output buffer, and various callstacks for templates. See :ref:`runtime_toplevel` for detail on the usage of :class:`.Context`. """ def __init__(self, buffer, **data): self._buffer_stack = [buffer] self._data = data self._kwargs = data.copy() self._with_template = None self._outputting_as_unicode = None self.namespaces = {} # "capture" function which proxies to the # generic "capture" function self._data['capture'] = compat.partial(capture, self) # "caller" stack used by def calls with content self.caller_stack = self._data['caller'] = CallerStack() def _set_with_template(self, t): self._with_template = t illegal_names = t.reserved_names.intersection(self._data) if illegal_names: raise exceptions.NameConflictError( "Reserved words passed to render(): %s" % ", ".join(illegal_names)) @property def lookup(self): """Return the :class:`.TemplateLookup` associated with this :class:`.Context`. """ return self._with_template.lookup @property def kwargs(self): """Return the dictionary of top level keyword arguments associated with this :class:`.Context`. This dictionary only includes the top-level arguments passed to :meth:`.Template.render`. It does not include names produced within the template execution such as local variable names or special names such as ``self``, ``next``, etc. The purpose of this dictionary is primarily for the case that a :class:`.Template` accepts arguments via its ``<%page>`` tag, which are normally expected to be passed via :meth:`.Template.render`, except the template is being called in an inheritance context, using the ``body()`` method. :attr:`.Context.kwargs` can then be used to propagate these arguments to the inheriting template:: ${next.body(**context.kwargs)} """ return self._kwargs.copy() def push_caller(self, caller): """Push a ``caller`` callable onto the callstack for this :class:`.Context`.""" self.caller_stack.append(caller) def pop_caller(self): """Pop a ``caller`` callable onto the callstack for this :class:`.Context`.""" del self.caller_stack[-1] def keys(self): """Return a list of all names established in this :class:`.Context`.""" return list(self._data.keys()) def __getitem__(self, key): if key in self._data: return self._data[key] else: return compat_builtins.__dict__[key] def _push_writer(self): """push a capturing buffer onto this Context and return the new writer function.""" buf = util.FastEncodingBuffer() self._buffer_stack.append(buf) return buf.write def _pop_buffer_and_writer(self): """pop the most recent capturing buffer from this Context and return the current writer after the pop. """ buf = self._buffer_stack.pop() return buf, self._buffer_stack[-1].write def _push_buffer(self): """push a capturing buffer onto this Context.""" self._push_writer() def _pop_buffer(self): """pop the most recent capturing buffer from this Context.""" return self._buffer_stack.pop() def get(self, key, default=None): """Return a value from this :class:`.Context`.""" return self._data.get(key, compat_builtins.__dict__.get(key, default)) def write(self, string): """Write a string to this :class:`.Context` object's underlying output buffer.""" self._buffer_stack[-1].write(string) def writer(self): """Return the current writer function.""" return self._buffer_stack[-1].write def _copy(self): c = Context.__new__(Context) c._buffer_stack = self._buffer_stack c._data = self._data.copy() c._kwargs = self._kwargs c._with_template = self._with_template c._outputting_as_unicode = self._outputting_as_unicode c.namespaces = self.namespaces c.caller_stack = self.caller_stack return c def _locals(self, d): """Create a new :class:`.Context` with a copy of this :class:`.Context`'s current state, updated with the given dictionary. The :attr:`.Context.kwargs` collection remains unaffected. """ if not d: return self c = self._copy() c._data.update(d) return c def _clean_inheritance_tokens(self): """create a new copy of this :class:`.Context`. with tokens related to inheritance state removed.""" c = self._copy() x = c._data x.pop('self', None) x.pop('parent', None) x.pop('next', None) return c class CallerStack(list): def __init__(self): self.nextcaller = None def __nonzero__(self): return self.__bool__() def __bool__(self): return len(self) and self._get_caller() and True or False def _get_caller(self): # this method can be removed once # codegen MAGIC_NUMBER moves past 7 return self[-1] def __getattr__(self, key): return getattr(self._get_caller(), key) def _push_frame(self): frame = self.nextcaller or None self.append(frame) self.nextcaller = None return frame def _pop_frame(self): self.nextcaller = self.pop() class Undefined(object): """Represents an undefined value in a template. All template modules have a constant value ``UNDEFINED`` present which is an instance of this object. """ def __str__(self): raise NameError("Undefined") def __nonzero__(self): return self.__bool__() def __bool__(self): return False UNDEFINED = Undefined() STOP_RENDERING = "" class LoopStack(object): """a stack for LoopContexts that implements the context manager protocol to automatically pop off the top of the stack on context exit """ def __init__(self): self.stack = [] def _enter(self, iterable): self._push(iterable) return self._top def _exit(self): self._pop() return self._top @property def _top(self): if self.stack: return self.stack[-1] else: return self def _pop(self): return self.stack.pop() def _push(self, iterable): new = LoopContext(iterable) if self.stack: new.parent = self.stack[-1] return self.stack.append(new) def __getattr__(self, key): raise exceptions.RuntimeException("No loop context is established") def __iter__(self): return iter(self._top) class LoopContext(object): """A magic loop variable. Automatically accessible in any ``% for`` block. See the section :ref:`loop_context` for usage notes. :attr:`parent` -> :class:`.LoopContext` or ``None`` The parent loop, if one exists. :attr:`index` -> `int` The 0-based iteration count. :attr:`reverse_index` -> `int` The number of iterations remaining. :attr:`first` -> `bool` ``True`` on the first iteration, ``False`` otherwise. :attr:`last` -> `bool` ``True`` on the last iteration, ``False`` otherwise. :attr:`even` -> `bool` ``True`` when ``index`` is even. :attr:`odd` -> `bool` ``True`` when ``index`` is odd. """ def __init__(self, iterable): self._iterable = iterable self.index = 0 self.parent = None def __iter__(self): for i in self._iterable: yield i self.index += 1 @util.memoized_instancemethod def __len__(self): return len(self._iterable) @property def reverse_index(self): return len(self) - self.index - 1 @property def first(self): return self.index == 0 @property def last(self): return self.index == len(self) - 1 @property def even(self): return not self.odd @property def odd(self): return bool(self.index % 2) def cycle(self, *values): """Cycle through values as the loop progresses. """ if not values: raise ValueError("You must provide values to cycle through") return values[self.index % len(values)] class _NSAttr(object): def __init__(self, parent): self.__parent = parent def __getattr__(self, key): ns = self.__parent while ns: if hasattr(ns.module, key): return getattr(ns.module, key) else: ns = ns.inherits raise AttributeError(key) class Namespace(object): """Provides access to collections of rendering methods, which can be local, from other templates, or from imported modules. To access a particular rendering method referenced by a :class:`.Namespace`, use plain attribute access: .. sourcecode:: mako ${some_namespace.foo(x, y, z)} :class:`.Namespace` also contains several built-in attributes described here. """ def __init__(self, name, context, callables=None, inherits=None, populate_self=True, calling_uri=None): self.name = name self.context = context self.inherits = inherits if callables is not None: self.callables = dict([(c.__name__, c) for c in callables]) callables = () module = None """The Python module referenced by this :class:`.Namespace`. If the namespace references a :class:`.Template`, then this module is the equivalent of ``template.module``, i.e. the generated module for the template. """ template = None """The :class:`.Template` object referenced by this :class:`.Namespace`, if any. """ context = None """The :class:`.Context` object for this :class:`.Namespace`. Namespaces are often created with copies of contexts that contain slightly different data, particularly in inheritance scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one can traverse an entire chain of templates that inherit from one-another. """ filename = None """The path of the filesystem file used for this :class:`.Namespace`'s module or template. If this is a pure module-based :class:`.Namespace`, this evaluates to ``module.__file__``. If a template-based namespace, it evaluates to the original template file location. """ uri = None """The URI for this :class:`.Namespace`'s template. I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. This is the equivalent of :attr:`.Template.uri`. """ _templateuri = None @util.memoized_property def attr(self): """Access module level attributes by name. This accessor allows templates to supply "scalar" attributes which are particularly handy in inheritance relationships. .. seealso:: :ref:`inheritance_attr` :ref:`namespace_attr_for_includes` """ return _NSAttr(self) def get_namespace(self, uri): """Return a :class:`.Namespace` corresponding to the given ``uri``. If the given ``uri`` is a relative URI (i.e. it does not contain a leading slash ``/``), the ``uri`` is adjusted to be relative to the ``uri`` of the namespace itself. This method is therefore mostly useful off of the built-in ``local`` namespace, described in :ref:`namespace_local`. In most cases, a template wouldn't need this function, and should instead use the ``<%namespace>`` tag to load namespaces. However, since all ``<%namespace>`` tags are evaluated before the body of a template ever runs, this method can be used to locate namespaces using expressions that were generated within the body code of the template, or to conditionally use a particular namespace. """ key = (self, uri) if key in self.context.namespaces: return self.context.namespaces[key] else: ns = TemplateNamespace(uri, self.context._copy(), templateuri=uri, calling_uri=self._templateuri) self.context.namespaces[key] = ns return ns def get_template(self, uri): """Return a :class:`.Template` from the given ``uri``. The ``uri`` resolution is relative to the ``uri`` of this :class:`.Namespace` object's :class:`.Template`. """ return _lookup_template(self.context, uri, self._templateuri) def get_cached(self, key, **kwargs): """Return a value from the :class:`.Cache` referenced by this :class:`.Namespace` object's :class:`.Template`. The advantage to this method versus direct access to the :class:`.Cache` is that the configuration parameters declared in ``<%page>`` take effect here, thereby calling up the same configured backend as that configured by ``<%page>``. """ return self.cache.get(key, **kwargs) @property def cache(self): """Return the :class:`.Cache` object referenced by this :class:`.Namespace` object's :class:`.Template`. """ return self.template.cache def include_file(self, uri, **kwargs): """Include a file at the given ``uri``.""" _include_file(self.context, uri, self._templateuri, **kwargs) def _populate(self, d, l): for ident in l: if ident == '*': for (k, v) in self._get_star(): d[k] = v else: d[ident] = getattr(self, ident) def _get_star(self): if self.callables: for key in self.callables: yield (key, self.callables[key]) def __getattr__(self, key): if key in self.callables: val = self.callables[key] elif self.inherits: val = getattr(self.inherits, key) else: raise AttributeError( "Namespace '%s' has no member '%s'" % (self.name, key)) setattr(self, key, val) return val class TemplateNamespace(Namespace): """A :class:`.Namespace` specific to a :class:`.Template` instance.""" def __init__(self, name, context, template=None, templateuri=None, callables=None, inherits=None, populate_self=True, calling_uri=None): self.name = name self.context = context self.inherits = inherits if callables is not None: self.callables = dict([(c.__name__, c) for c in callables]) if templateuri is not None: self.template = _lookup_template(context, templateuri, calling_uri) self._templateuri = self.template.module._template_uri elif template is not None: self.template = template self._templateuri = template.module._template_uri else: raise TypeError("'template' argument is required.") if populate_self: lclcallable, lclcontext = \ _populate_self_namespace(context, self.template, self_ns=self) @property def module(self): """The Python module referenced by this :class:`.Namespace`. If the namespace references a :class:`.Template`, then this module is the equivalent of ``template.module``, i.e. the generated module for the template. """ return self.template.module @property def filename(self): """The path of the filesystem file used for this :class:`.Namespace`'s module or template. """ return self.template.filename @property def uri(self): """The URI for this :class:`.Namespace`'s template. I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. This is the equivalent of :attr:`.Template.uri`. """ return self.template.uri def _get_star(self): if self.callables: for key in self.callables: yield (key, self.callables[key]) def get(key): callable_ = self.template._get_def_callable(key) return compat.partial(callable_, self.context) for k in self.template.module._exports: yield (k, get(k)) def __getattr__(self, key): if key in self.callables: val = self.callables[key] elif self.template.has_def(key): callable_ = self.template._get_def_callable(key) val = compat.partial(callable_, self.context) elif self.inherits: val = getattr(self.inherits, key) else: raise AttributeError( "Namespace '%s' has no member '%s'" % (self.name, key)) setattr(self, key, val) return val class ModuleNamespace(Namespace): """A :class:`.Namespace` specific to a Python module instance.""" def __init__(self, name, context, module, callables=None, inherits=None, populate_self=True, calling_uri=None): self.name = name self.context = context self.inherits = inherits if callables is not None: self.callables = dict([(c.__name__, c) for c in callables]) mod = __import__(module) for token in module.split('.')[1:]: mod = getattr(mod, token) self.module = mod @property def filename(self): """The path of the filesystem file used for this :class:`.Namespace`'s module or template. """ return self.module.__file__ def _get_star(self): if self.callables: for key in self.callables: yield (key, self.callables[key]) for key in dir(self.module): if key[0] != '_': callable_ = getattr(self.module, key) if compat.callable(callable_): yield key, compat.partial(callable_, self.context) def __getattr__(self, key): if key in self.callables: val = self.callables[key] elif hasattr(self.module, key): callable_ = getattr(self.module, key) val = compat.partial(callable_, self.context) elif self.inherits: val = getattr(self.inherits, key) else: raise AttributeError( "Namespace '%s' has no member '%s'" % (self.name, key)) setattr(self, key, val) return val def supports_caller(func): """Apply a caller_stack compatibility decorator to a plain Python function. See the example in :ref:`namespaces_python_modules`. """ def wrap_stackframe(context, *args, **kwargs): context.caller_stack._push_frame() try: return func(context, *args, **kwargs) finally: context.caller_stack._pop_frame() return wrap_stackframe def capture(context, callable_, *args, **kwargs): """Execute the given template def, capturing the output into a buffer. See the example in :ref:`namespaces_python_modules`. """ if not compat.callable(callable_): raise exceptions.RuntimeException( "capture() function expects a callable as " "its argument (i.e. capture(func, *args, **kwargs))" ) context._push_buffer() try: callable_(*args, **kwargs) finally: buf = context._pop_buffer() return buf.getvalue() def _decorate_toplevel(fn): def decorate_render(render_fn): def go(context, *args, **kw): def y(*args, **kw): return render_fn(context, *args, **kw) try: y.__name__ = render_fn.__name__[7:] except TypeError: # < Python 2.4 pass return fn(y)(context, *args, **kw) return go return decorate_render def _decorate_inline(context, fn): def decorate_render(render_fn): dec = fn(render_fn) def go(*args, **kw): return dec(context, *args, **kw) return go return decorate_render def _include_file(context, uri, calling_uri, **kwargs): """locate the template from the given uri and include it in the current output.""" template = _lookup_template(context, uri, calling_uri) (callable_, ctx) = _populate_self_namespace( context._clean_inheritance_tokens(), template) callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs)) def _inherit_from(context, uri, calling_uri): """called by the _inherit method in template modules to set up the inheritance chain at the start of a template's execution.""" if uri is None: return None template = _lookup_template(context, uri, calling_uri) self_ns = context['self'] ih = self_ns while ih.inherits is not None: ih = ih.inherits lclcontext = context._locals({'next': ih}) ih.inherits = TemplateNamespace("self:%s" % template.uri, lclcontext, template=template, populate_self=False) context._data['parent'] = lclcontext._data['local'] = ih.inherits callable_ = getattr(template.module, '_mako_inherit', None) if callable_ is not None: ret = callable_(template, lclcontext) if ret: return ret gen_ns = getattr(template.module, '_mako_generate_namespaces', None) if gen_ns is not None: gen_ns(context) return (template.callable_, lclcontext) def _lookup_template(context, uri, relativeto): lookup = context._with_template.lookup if lookup is None: raise exceptions.TemplateLookupException( "Template '%s' has no TemplateLookup associated" % context._with_template.uri) uri = lookup.adjust_uri(uri, relativeto) try: return lookup.get_template(uri) except exceptions.TopLevelLookupException: raise exceptions.TemplateLookupException(str(compat.exception_as())) def _populate_self_namespace(context, template, self_ns=None): if self_ns is None: self_ns = TemplateNamespace('self:%s' % template.uri, context, template=template, populate_self=False) context._data['self'] = context._data['local'] = self_ns if hasattr(template.module, '_mako_inherit'): ret = template.module._mako_inherit(template, context) if ret: return ret return (template.callable_, context) def _render(template, callable_, args, data, as_unicode=False): """create a Context and return the string output of the given template and template callable.""" if as_unicode: buf = util.FastEncodingBuffer(as_unicode=True) elif template.bytestring_passthrough: buf = compat.StringIO() else: buf = util.FastEncodingBuffer( as_unicode=as_unicode, encoding=template.output_encoding, errors=template.encoding_errors) context = Context(buf, **data) context._outputting_as_unicode = as_unicode context._set_with_template(template) _render_context(template, callable_, context, *args, **_kwargs_for_callable(callable_, data)) return context._pop_buffer().getvalue() def _kwargs_for_callable(callable_, data): argspec = compat.inspect_func_args(callable_) # for normal pages, **pageargs is usually present if argspec[2]: return data # for rendering defs from the top level, figure out the args namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] kwargs = {} for arg in namedargs: if arg != 'context' and arg in data and arg not in kwargs: kwargs[arg] = data[arg] return kwargs def _kwargs_for_include(callable_, data, **kwargs): argspec = compat.inspect_func_args(callable_) namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] for arg in namedargs: if arg != 'context' and arg in data and arg not in kwargs: kwargs[arg] = data[arg] return kwargs def _render_context(tmpl, callable_, context, *args, **kwargs): import mako.template as template # create polymorphic 'self' namespace for this # template with possibly updated context if not isinstance(tmpl, template.DefTemplate): # if main render method, call from the base of the inheritance stack (inherit, lclcontext) = _populate_self_namespace(context, tmpl) _exec_template(inherit, lclcontext, args=args, kwargs=kwargs) else: # otherwise, call the actual rendering method specified (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent) _exec_template(callable_, context, args=args, kwargs=kwargs) def _exec_template(callable_, context, args=None, kwargs=None): """execute a rendering callable given the callable, a Context, and optional explicit arguments the contextual Template will be located if it exists, and the error handling options specified on that Template will be interpreted here. """ template = context._with_template if template is not None and \ (template.format_exceptions or template.error_handler): try: callable_(context, *args, **kwargs) except Exception: _render_error(template, context, compat.exception_as()) except: e = sys.exc_info()[0] _render_error(template, context, e) else: callable_(context, *args, **kwargs) def _render_error(template, context, error): if template.error_handler: result = template.error_handler(context, error) if not result: compat.reraise(*sys.exc_info()) else: error_template = exceptions.html_error_template() if context._outputting_as_unicode: context._buffer_stack[:] = [ util.FastEncodingBuffer(as_unicode=True)] else: context._buffer_stack[:] = [util.FastEncodingBuffer( error_template.output_encoding, error_template.encoding_errors)] context._set_with_template(error_template) error_template.render_context(context, error=error)
oliverhr/odoo
refs/heads/8.0-pos-pademobile-payment
addons/mail/tests/test_mail_features.py
172
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from ..mail_mail import mail_mail from ..mail_thread import mail_thread from .common import TestMail from openerp.tools import mute_logger, email_split, html2plaintext from openerp.tools.mail import html_sanitize class test_mail(TestMail): def test_000_alias_setup(self): """ Test basic mail.alias setup works, before trying to use them for routing """ cr, uid = self.cr, self.uid self.user_valentin_id = self.res_users.create(cr, uid, {'name': 'Valentin Cognito', 'email': '[email protected]', 'login': 'valentin.cognito', 'alias_name': 'valentin.cognito'}) self.user_valentin = self.res_users.browse(cr, uid, self.user_valentin_id) self.assertEquals(self.user_valentin.alias_name, self.user_valentin.login, "Login should be used as alias") self.user_pagan_id = self.res_users.create(cr, uid, {'name': 'Pagan Le Marchant', 'email': '[email protected]', 'login': '[email protected]', 'alias_name': '[email protected]'}) self.user_pagan = self.res_users.browse(cr, uid, self.user_pagan_id) self.assertEquals(self.user_pagan.alias_name, 'plmarchant', "If login is an email, the alias should keep only the local part") self.user_barty_id = self.res_users.create(cr, uid, {'name': 'Bartholomew Ironside', 'email': '[email protected]', 'login': 'b4r+_#_R3wl$$', 'alias_name': 'b4r+_#_R3wl$$'}) self.user_barty = self.res_users.browse(cr, uid, self.user_barty_id) self.assertEquals(self.user_barty.alias_name, 'b4r+_-_r3wl-', 'Disallowed chars should be replaced by hyphens') def test_00_followers_function_field(self): """ Tests designed for the many2many function field 'follower_ids'. We will test to perform writes using the many2many commands 0, 3, 4, 5 and 6. """ cr, uid, user_admin, partner_bert_id, group_pigs = self.cr, self.uid, self.user_admin, self.partner_bert_id, self.group_pigs # Data: create 'disturbing' values in mail.followers: same res_id, other res_model; same res_model, other res_id group_dummy_id = self.mail_group.create(cr, uid, {'name': 'Dummy group'}, {'mail_create_nolog': True}) self.mail_followers.create(cr, uid, {'res_model': 'mail.thread', 'res_id': self.group_pigs_id, 'partner_id': partner_bert_id}) self.mail_followers.create(cr, uid, {'res_model': 'mail.group', 'res_id': group_dummy_id, 'partner_id': partner_bert_id}) # Pigs just created: should be only Admin as follower follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan') # Subscribe Bert through a '4' command group_pigs.write({'message_follower_ids': [(4, partner_bert_id)]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the only Pigs fans') # Unsubscribe Bert through a '3' command group_pigs.write({'message_follower_ids': [(3, partner_bert_id)]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan') # Set followers through a '6' command group_pigs.write({'message_follower_ids': [(6, 0, [partner_bert_id])]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the only Pigs fan') # Add a follower created on the fly through a '0' command group_pigs.write({'message_follower_ids': [(0, 0, {'name': 'Patrick Fiori'})]}) partner_patrick_id = self.res_partner.search(cr, uid, [('name', '=', 'Patrick Fiori')])[0] group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([partner_bert_id, partner_patrick_id]), 'Bert and Patrick should be the only Pigs fans') # Finally, unlink through a '5' command group_pigs.write({'message_follower_ids': [(5, 0)]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertFalse(follower_ids, 'Pigs group should not have fans anymore') # Test dummy data has not been altered fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.thread'), ('res_id', '=', self.group_pigs_id)]) follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)]) self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the follower of dummy mail.thread data') fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', group_dummy_id)]) follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)]) self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the followers of dummy mail.group data') def test_05_message_followers_and_subtypes(self): """ Tests designed for the subscriber API as well as message subtypes """ cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs # Data: message subtypes self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_def', 'default': True, 'res_model': 'mail.group'}) self.mail_message_subtype.create(cr, uid, {'name': 'mt_other_def', 'default': True, 'res_model': 'crm.lead'}) self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_def', 'default': True, 'res_model': False}) mt_mg_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_nodef', 'default': False, 'res_model': 'mail.group'}) mt_all_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_nodef', 'default': False, 'res_model': False}) default_group_subtypes = self.mail_message_subtype.search(cr, uid, [('default', '=', True), '|', ('res_model', '=', 'mail.group'), ('res_model', '=', False)]) # ---------------------------------------- # CASE1: test subscriptions with subtypes # ---------------------------------------- # Do: subscribe Raoul, should have default subtypes group_pigs.message_subscribe_users([user_raoul.id]) group_pigs.refresh() # Test: 2 followers (Admin and Raoul) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]), 'message_subscribe: Admin and Raoul should be the only 2 Pigs fans') # Raoul follows default subtypes fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_raoul.partner_id.id) ]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set(default_group_subtypes), 'message_subscribe: Raoul subscription subtypes are incorrect, should be all default ones') # Do: subscribe Raoul with specified new subtypes group_pigs.message_subscribe_users([user_raoul.id], subtype_ids=[mt_mg_nodef]) # Test: 2 followers (Admin and Raoul) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]), 'message_subscribe: Admin and Raoul should be the only 2 Pigs fans') # Test: 2 lines in mail.followers (no duplicate for Raoul) fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ]) self.assertEqual(len(fol_ids), 2, 'message_subscribe: subscribing an already-existing follower should not create new entries in mail.followers') # Test: Raoul follows only specified subtypes fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_raoul.partner_id.id) ]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef]), 'message_subscribe: Raoul subscription subtypes are incorrect, should be only specified') # Do: Subscribe Raoul without specified subtypes: should not erase existing subscription subtypes group_pigs.message_subscribe_users([user_raoul.id, user_raoul.id]) group_pigs.message_subscribe_users([user_raoul.id]) group_pigs.refresh() # Test: 2 followers (Admin and Raoul) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]), 'message_subscribe: Admin and Raoul should be the only 2 Pigs fans') # Test: Raoul follows default subtypes fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_raoul.partner_id.id) ]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef]), 'message_subscribe: Raoul subscription subtypes are incorrect, should be only specified') # Do: Unsubscribe Raoul twice through message_unsubscribe_users group_pigs.message_unsubscribe_users([user_raoul.id, user_raoul.id]) group_pigs.refresh() # Test: 1 follower (Admin) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(follower_ids, [user_admin.partner_id.id], 'Admin must be the only Pigs fan') # Test: 1 lines in mail.followers (no duplicate for Raoul) fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id) ]) self.assertEqual(len(fol_ids), 1, 'message_subscribe: group should have only 1 entry in mail.follower for 1 follower') # Do: subscribe Admin with subtype_ids group_pigs.message_subscribe_users([uid], [mt_mg_nodef, mt_all_nodef]) fol_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_admin.partner_id.id)]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef, mt_all_nodef]), 'subscription subtypes are incorrect') # ---------------------------------------- # CASE2: test mail_thread fields # ---------------------------------------- subtype_data = group_pigs._get_subscription_data(None, None)[group_pigs.id]['message_subtype_data'] self.assertEqual(set(subtype_data.keys()), set(['Discussions', 'mt_mg_def', 'mt_all_def', 'mt_mg_nodef', 'mt_all_nodef']), 'mail.group available subtypes incorrect') self.assertFalse(subtype_data['Discussions']['followed'], 'Admin should not follow Discussions in pigs') self.assertTrue(subtype_data['mt_mg_nodef']['followed'], 'Admin should follow mt_mg_nodef in pigs') self.assertTrue(subtype_data['mt_all_nodef']['followed'], 'Admin should follow mt_all_nodef in pigs') def test_11_notification_url(self): """ Tests designed to test the URL added in notification emails. """ cr, uid, group_pigs = self.cr, self.uid, self.group_pigs # Test URL formatting base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url') # Partner data partner_raoul = self.res_partner.browse(cr, uid, self.partner_raoul_id) partner_bert_id = self.res_partner.create(cr, uid, {'name': 'bert'}) partner_bert = self.res_partner.browse(cr, uid, partner_bert_id) # Mail data mail_mail_id = self.mail_mail.create(cr, uid, {'state': 'exception'}) mail = self.mail_mail.browse(cr, uid, mail_mail_id) # Test: link for nobody -> None url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail) self.assertEqual(url, None, 'notification email: mails not send to a specific partner should not have any URL') # Test: link for partner -> None url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_bert) self.assertEqual(url, None, 'notification email: mails send to a not-user partner should not have any URL') # Test: link for user -> signin url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul) self.assertIn(base_url, url, 'notification email: link should contain web.base.url') self.assertIn('db=%s' % cr.dbname, url, 'notification email: link should contain database name') self.assertIn('action=mail.action_mail_redirect', url, 'notification email: link should contain the redirect action') self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') # Test: link for user -> with model and res_id mail_mail_id = self.mail_mail.create(cr, uid, {'model': 'mail.group', 'res_id': group_pigs.id}) mail = self.mail_mail.browse(cr, uid, mail_mail_id) url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul) self.assertIn(base_url, url, 'notification email: link should contain web.base.url') self.assertIn('db=%s' % cr.dbname, url, 'notification email: link should contain database name') self.assertIn('action=mail.action_mail_redirect', url, 'notification email: link should contain the redirect action') self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') self.assertIn('model=mail.group', url, 'notification email: link should contain the model when having not notification email on a record') self.assertIn('res_id=%s' % group_pigs.id, url, 'notification email: link should contain the res_id when having not notification email on a record') # Test: link for user -> with model and res_id mail_mail_id = self.mail_mail.create(cr, uid, {'notification': True, 'model': 'mail.group', 'res_id': group_pigs.id}) mail = self.mail_mail.browse(cr, uid, mail_mail_id) url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul) self.assertIn(base_url, url, 'notification email: link should contain web.base.url') self.assertIn('db=%s' % cr.dbname, url, 'notification email: link should contain database name') self.assertIn('action=mail.action_mail_redirect', url, 'notification email: link should contain the redirect action') self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') self.assertIn('message_id=%s' % mail.mail_message_id.id, url, 'notification email: link based on message should contain the mail_message id') self.assertNotIn('model=mail.group', url, 'notification email: link based on message should not contain model') self.assertNotIn('res_id=%s' % group_pigs.id, url, 'notification email: link based on message should not contain res_id') @mute_logger('openerp.addons.mail.mail_thread', 'openerp.models') def test_12_inbox_redirection(self): """ Tests designed to test the inbox redirection of emails notification URLs. """ cr, uid, user_admin, group_pigs = self.cr, self.uid, self.user_admin, self.group_pigs model, act_id = self.ir_model_data.get_object_reference(cr, uid, 'mail', 'action_mail_inbox_feeds') # Data: post a message on pigs msg_id = self.group_pigs.message_post(body='My body', partner_ids=[self.partner_bert_id], type='comment', subtype='mail.mt_comment') # No specific parameters -> should redirect to Inbox action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {}}) self.assertEqual( action.get('type'), 'ir.actions.client', 'URL redirection: action without parameters should redirect to client action Inbox' ) self.assertEqual( action.get('id'), act_id, 'URL redirection: action without parameters should redirect to client action Inbox' ) # Raoul has read access to Pigs -> should redirect to form view of Pigs action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {'message_id': msg_id}}) self.assertEqual( action.get('type'), 'ir.actions.act_window', 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) self.assertEqual( action.get('res_id'), group_pigs.id, 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {'model': 'mail.group', 'res_id': group_pigs.id}}) self.assertEqual( action.get('type'), 'ir.actions.act_window', 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) self.assertEqual( action.get('res_id'), group_pigs.id, 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) # Bert has no read access to Pigs -> should redirect to Inbox action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_bert_id, {'params': {'message_id': msg_id}}) self.assertEqual( action.get('type'), 'ir.actions.client', 'URL redirection: action without parameters should redirect to client action Inbox' ) self.assertEqual( action.get('id'), act_id, 'URL redirection: action without parameters should redirect to client action Inbox' ) action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_bert_id, {'params': {'model': 'mail.group', 'res_id': group_pigs.id}}) self.assertEqual( action.get('type'), 'ir.actions.client', 'URL redirection: action without parameters should redirect to client action Inbox' ) self.assertEqual( action.get('id'), act_id, 'URL redirection: action without parameters should redirect to client action Inbox' ) def test_20_message_post(self): """ Tests designed for message_post. """ cr, uid, user_raoul, group_pigs = self.cr, self.uid, self.user_raoul, self.group_pigs # -------------------------------------------------- # Data creation # -------------------------------------------------- # 0 - Update existing users-partners self.res_users.write(cr, uid, [uid], {'email': 'a@a', 'notify_email': 'always'}) self.res_users.write(cr, uid, [self.user_raoul_id], {'email': 'r@r'}) # 1 - Bert Tartopoils, with email, should receive emails for comments and emails p_b_id = self.res_partner.create(cr, uid, {'name': 'Bert Tartopoils', 'email': 'b@b'}) # 2 - Carine Poilvache, with email, should receive emails for emails p_c_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c', 'notify_email': 'none'}) # 3 - Dédé Grosbedon, without email, to test email verification; should receive emails for every message p_d_id = self.res_partner.create(cr, uid, {'name': 'Dédé Grosbedon', 'email': 'd@d', 'notify_email': 'always'}) # 4 - Attachments attach1_id = self.ir_attachment.create(cr, user_raoul.id, { 'name': 'Attach1', 'datas_fname': 'Attach1', 'datas': 'bWlncmF0aW9uIHRlc3Q=', 'res_model': 'mail.compose.message', 'res_id': 0}) attach2_id = self.ir_attachment.create(cr, user_raoul.id, { 'name': 'Attach2', 'datas_fname': 'Attach2', 'datas': 'bWlncmF0aW9uIHRlc3Q=', 'res_model': 'mail.compose.message', 'res_id': 0}) attach3_id = self.ir_attachment.create(cr, user_raoul.id, { 'name': 'Attach3', 'datas_fname': 'Attach3', 'datas': 'bWlncmF0aW9uIHRlc3Q=', 'res_model': 'mail.compose.message', 'res_id': 0}) # 5 - Mail data _subject = 'Pigs' _mail_subject = 'Re: %s' % (group_pigs.name) _body1 = '<p>Pigs rules</p>' _body2 = '<html>Pigs rocks</html>' _attachments = [ ('List1', 'My first attachment'), ('List2', 'My second attachment') ] # -------------------------------------------------- # CASE1: post comment + partners + attachments # -------------------------------------------------- # Data: set alias_domain to see emails with alias self.registry('ir.config_parameter').set_param(self.cr, self.uid, 'mail.catchall.domain', 'schlouby.fr') # Data: change Pigs name to test reply_to self.mail_group.write(cr, uid, [self.group_pigs_id], {'name': '"Pigs" !ù $%-'}) # Do: subscribe Raoul new_follower_ids = [self.partner_raoul_id] group_pigs.message_subscribe(new_follower_ids) # Test: group followers = Raoul + uid group_fids = [follower.id for follower in group_pigs.message_follower_ids] test_fids = new_follower_ids + [self.partner_admin_id] self.assertEqual(set(test_fids), set(group_fids), 'message_subscribe: incorrect followers after subscribe') # Do: Raoul message_post on Pigs self._init_mock_build_email() msg1_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body=_body1, subject=_subject, partner_ids=[p_b_id, p_c_id], attachment_ids=[attach1_id, attach2_id], attachments=_attachments, type='comment', subtype='mt_comment') msg = self.mail_message.browse(cr, uid, msg1_id) msg_message_id = msg.message_id msg_pids = [partner.id for partner in msg.notified_partner_ids] msg_aids = [attach.id for attach in msg.attachment_ids] sent_emails = self._build_email_kwargs_list # Test: mail_message: subject and body not modified self.assertEqual(_subject, msg.subject, 'message_post: mail.message subject incorrect') self.assertEqual(_body1, msg.body, 'message_post: mail.message body incorrect') # Test: mail_message: notified_partner_ids = group followers + partner_ids - author test_pids = set([self.partner_admin_id, p_b_id, p_c_id]) self.assertEqual(test_pids, set(msg_pids), 'message_post: mail.message notified partners incorrect') # Test: mail_message: attachments (4, attachment_ids + attachments) test_aids = set([attach1_id, attach2_id]) msg_attach_names = set([attach.name for attach in msg.attachment_ids]) test_attach_names = set(['Attach1', 'Attach2', 'List1', 'List2']) self.assertEqual(len(msg_aids), 4, 'message_post: mail.message wrong number of attachments') self.assertEqual(msg_attach_names, test_attach_names, 'message_post: mail.message attachments incorrectly added') self.assertTrue(test_aids.issubset(set(msg_aids)), 'message_post: mail.message attachments duplicated') for attach in msg.attachment_ids: self.assertEqual(attach.res_model, 'mail.group', 'message_post: mail.message attachments were not linked to the document') self.assertEqual(attach.res_id, group_pigs.id, 'message_post: mail.message attachments were not linked to the document') if 'List' in attach.name: self.assertIn((attach.name, attach.datas.decode('base64')), _attachments, 'message_post: mail.message attachment name / data incorrect') dl_attach = self.mail_message.download_attachment(cr, user_raoul.id, id_message=msg.id, attachment_id=attach.id) self.assertIn((dl_attach['filename'], dl_attach['base64'].decode('base64')), _attachments, 'message_post: mail.message download_attachment is incorrect') # Test: followers: same as before (author was already subscribed) group_pigs.refresh() group_fids = [follower.id for follower in group_pigs.message_follower_ids] test_fids = new_follower_ids + [self.partner_admin_id] self.assertEqual(set(test_fids), set(group_fids), 'message_post: wrong followers after posting') # Test: mail_mail: notifications have been deleted self.assertFalse(self.mail_mail.search(cr, uid, [('mail_message_id', '=', msg1_id)]), 'message_post: mail.mail notifications should have been auto-deleted!') # Test: notifications emails: to a and b, c is email only, r is author test_emailto = ['Administrator <a@a>', 'Bert Tartopoils <b@b>'] # test_emailto = ['"Followers of -Pigs-" <a@a>', '"Followers of -Pigs-" <b@b>'] self.assertEqual(len(sent_emails), 2, 'message_post: notification emails wrong number of send emails') self.assertEqual(set([m['email_to'][0] for m in sent_emails]), set(test_emailto), 'message_post: notification emails wrong recipients (email_to)') for sent_email in sent_emails: self.assertEqual(sent_email['email_from'], 'Raoul Grosbedon <[email protected]>', 'message_post: notification email wrong email_from: should use alias of sender') self.assertEqual(len(sent_email['email_to']), 1, 'message_post: notification email sent to more than one email address instead of a precise partner') self.assertIn(sent_email['email_to'][0], test_emailto, 'message_post: notification email email_to incorrect') self.assertEqual(sent_email['reply_to'], u'"YourCompany \\"Pigs\\" !ù $%-" <[email protected]>', 'message_post: notification email reply_to incorrect') self.assertEqual(_subject, sent_email['subject'], 'message_post: notification email subject incorrect') self.assertIn(_body1, sent_email['body'], 'message_post: notification email body incorrect') self.assertIn('Pigs rules', sent_email['body_alternative'], 'message_post: notification email body alternative should contain the body') self.assertNotIn('<p>', sent_email['body_alternative'], 'message_post: notification email body alternative still contains html') self.assertFalse(sent_email['references'], 'message_post: references should be False when sending a message that is not a reply') # Test: notification linked to this message = group followers = notified_partner_ids notif_ids = self.mail_notification.search(cr, uid, [('message_id', '=', msg1_id)]) notif_pids = set([notif.partner_id.id for notif in self.mail_notification.browse(cr, uid, notif_ids)]) self.assertEqual(notif_pids, test_pids, 'message_post: mail.message created mail.notification incorrect') # Data: Pigs name back to normal self.mail_group.write(cr, uid, [self.group_pigs_id], {'name': 'Pigs'}) # -------------------------------------------------- # CASE2: reply + parent_id + parent notification # -------------------------------------------------- # Data: remove alias_domain to see emails with alias param_ids = self.registry('ir.config_parameter').search(cr, uid, [('key', '=', 'mail.catchall.domain')]) self.registry('ir.config_parameter').unlink(cr, uid, param_ids) # Do: Raoul message_post on Pigs self._init_mock_build_email() msg2_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body=_body2, type='email', subtype='mt_comment', partner_ids=[p_d_id], parent_id=msg1_id, attachment_ids=[attach3_id], context={'mail_post_autofollow': True}) msg = self.mail_message.browse(cr, uid, msg2_id) msg_pids = [partner.id for partner in msg.notified_partner_ids] msg_aids = [attach.id for attach in msg.attachment_ids] sent_emails = self._build_email_kwargs_list # Test: mail_message: subject is False, body, parent_id is msg_id self.assertEqual(msg.subject, False, 'message_post: mail.message subject incorrect') self.assertEqual(msg.body, html_sanitize(_body2), 'message_post: mail.message body incorrect') self.assertEqual(msg.parent_id.id, msg1_id, 'message_post: mail.message parent_id incorrect') # Test: mail_message: notified_partner_ids = group followers test_pids = [self.partner_admin_id, p_d_id] self.assertEqual(set(test_pids), set(msg_pids), 'message_post: mail.message partners incorrect') # Test: mail_message: notifications linked to this message = group followers = notified_partner_ids notif_ids = self.mail_notification.search(cr, uid, [('message_id', '=', msg2_id)]) notif_pids = [notif.partner_id.id for notif in self.mail_notification.browse(cr, uid, notif_ids)] self.assertEqual(set(test_pids), set(notif_pids), 'message_post: mail.message notification partners incorrect') # Test: mail_mail: notifications deleted self.assertFalse(self.mail_mail.search(cr, uid, [('mail_message_id', '=', msg2_id)]), 'mail.mail notifications should have been auto-deleted!') # Test: emails send by server (to a, b, c, d) test_emailto = [u'Administrator <a@a>', u'Bert Tartopoils <b@b>', u'Carine Poilvache <c@c>', u'D\xe9d\xe9 Grosbedon <d@d>'] # test_emailto = [u'"Followers of Pigs" <a@a>', u'"Followers of Pigs" <b@b>', u'"Followers of Pigs" <c@c>', u'"Followers of Pigs" <d@d>'] # self.assertEqual(len(sent_emails), 3, 'sent_email number of sent emails incorrect') for sent_email in sent_emails: self.assertEqual(sent_email['email_from'], 'Raoul Grosbedon <r@r>', 'message_post: notification email wrong email_from: should use email of sender when no alias domain set') self.assertEqual(len(sent_email['email_to']), 1, 'message_post: notification email sent to more than one email address instead of a precise partner') self.assertIn(sent_email['email_to'][0], test_emailto, 'message_post: notification email email_to incorrect') self.assertEqual(email_split(sent_email['reply_to']), ['r@r'], # was '"Followers of Pigs" <r@r>', but makes no sense 'message_post: notification email reply_to incorrect: should have raoul email') self.assertEqual(_mail_subject, sent_email['subject'], 'message_post: notification email subject incorrect') self.assertIn(html_sanitize(_body2), sent_email['body'], 'message_post: notification email does not contain the body') self.assertIn('Pigs rocks', sent_email['body_alternative'], 'message_post: notification email body alternative should contain the body') self.assertNotIn('<p>', sent_email['body_alternative'], 'message_post: notification email body alternative still contains html') self.assertIn(msg_message_id, sent_email['references'], 'message_post: notification email references lacks parent message message_id') # Test: attachments + download for attach in msg.attachment_ids: self.assertEqual(attach.res_model, 'mail.group', 'message_post: mail.message attachment res_model incorrect') self.assertEqual(attach.res_id, self.group_pigs_id, 'message_post: mail.message attachment res_id incorrect') # Test: Dédé has been notified -> should also have been notified of the parent message msg = self.mail_message.browse(cr, uid, msg1_id) msg_pids = set([partner.id for partner in msg.notified_partner_ids]) test_pids = set([self.partner_admin_id, p_b_id, p_c_id, p_d_id]) self.assertEqual(test_pids, msg_pids, 'message_post: mail.message parent notification not created') # Do: reply to last message msg3_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body='Test', parent_id=msg2_id) msg = self.mail_message.browse(cr, uid, msg3_id) # Test: check that its parent will be the first message self.assertEqual(msg.parent_id.id, msg1_id, 'message_post did not flatten the thread structure') def test_25_message_compose_wizard(self): """ Tests designed for the mail.compose.message wizard. """ cr, uid, user_raoul, group_pigs = self.cr, self.uid, self.user_raoul, self.group_pigs mail_compose = self.registry('mail.compose.message') # -------------------------------------------------- # Data creation # -------------------------------------------------- # 0 - Update existing users-partners self.res_users.write(cr, uid, [uid], {'email': 'a@a'}) self.res_users.write(cr, uid, [self.user_raoul_id], {'email': 'r@r'}) # 1 - Bert Tartopoils, with email, should receive emails for comments and emails p_b_id = self.res_partner.create(cr, uid, {'name': 'Bert Tartopoils', 'email': 'b@b'}) # 2 - Carine Poilvache, with email, should receive emails for emails p_c_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c', 'notify_email': 'always'}) # 3 - Dédé Grosbedon, without email, to test email verification; should receive emails for every message p_d_id = self.res_partner.create(cr, uid, {'name': 'Dédé Grosbedon', 'email': 'd@d', 'notify_email': 'always'}) # 4 - Create a Bird mail.group, that will be used to test mass mailing group_bird_id = self.mail_group.create(cr, uid, { 'name': 'Bird', 'description': 'Bird resistance', }, context={'mail_create_nolog': True}) group_bird = self.mail_group.browse(cr, uid, group_bird_id) # 5 - Mail data _subject = 'Pigs' _body = 'Pigs <b>rule</b>' _reply_subject = 'Re: %s' % _subject _attachments = [ {'name': 'First', 'datas_fname': 'first.txt', 'datas': 'My first attachment'.encode('base64')}, {'name': 'Second', 'datas_fname': 'second.txt', 'datas': 'My second attachment'.encode('base64')} ] _attachments_test = [('first.txt', 'My first attachment'), ('second.txt', 'My second attachment')] # 6 - Subscribe Bert to Pigs group_pigs.message_subscribe([p_b_id]) # -------------------------------------------------- # CASE1: wizard + partners + context keys # -------------------------------------------------- # Do: Raoul wizard-composes on Pigs with auto-follow for partners, not for author compose_id = mail_compose.create(cr, user_raoul.id, { 'subject': _subject, 'body': _body, 'partner_ids': [(4, p_c_id), (4, p_d_id)], }, context={ 'default_composition_mode': 'comment', 'default_model': 'mail.group', 'default_res_id': self.group_pigs_id, }) compose = mail_compose.browse(cr, uid, compose_id) # Test: mail.compose.message: composition_mode, model, res_id self.assertEqual(compose.composition_mode, 'comment', 'compose wizard: mail.compose.message incorrect composition_mode') self.assertEqual(compose.model, 'mail.group', 'compose wizard: mail.compose.message incorrect model') self.assertEqual(compose.res_id, self.group_pigs_id, 'compose wizard: mail.compose.message incorrect res_id') # Do: Post the comment mail_compose.send_mail(cr, user_raoul.id, [compose_id], {'mail_post_autofollow': True, 'mail_create_nosubscribe': True}) group_pigs.refresh() message = group_pigs.message_ids[0] # Test: mail.group: followers (c and d added by auto follow key; raoul not added by nosubscribe key) pigs_pids = [p.id for p in group_pigs.message_follower_ids] test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id] self.assertEqual(set(pigs_pids), set(test_pids), 'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account') # Test: mail.message: subject, body inside p self.assertEqual(message.subject, _subject, 'compose wizard: mail.message incorrect subject') self.assertEqual(message.body, '<p>%s</p>' % _body, 'compose wizard: mail.message incorrect body') # Test: mail.message: notified_partner_ids = admin + bert (followers) + c + d (recipients) msg_pids = [partner.id for partner in message.notified_partner_ids] test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id] self.assertEqual(set(msg_pids), set(test_pids), 'compose wizard: mail.message notified_partner_ids incorrect') # -------------------------------------------------- # CASE2: reply + attachments # -------------------------------------------------- # Do: Reply with attachments compose_id = mail_compose.create(cr, user_raoul.id, { 'attachment_ids': [(0, 0, _attachments[0]), (0, 0, _attachments[1])] }, context={ 'default_composition_mode': 'comment', 'default_res_id': self.group_pigs_id, 'default_parent_id': message.id }) compose = mail_compose.browse(cr, uid, compose_id) # Test: mail.compose.message: model, res_id, parent_id self.assertEqual(compose.model, 'mail.group', 'compose wizard: mail.compose.message incorrect model') self.assertEqual(compose.res_id, self.group_pigs_id, 'compose wizard: mail.compose.message incorrect res_id') self.assertEqual(compose.parent_id.id, message.id, 'compose wizard: mail.compose.message incorrect parent_id') # Test: mail.compose.message: subject as Re:.., body, parent_id self.assertEqual(compose.subject, _reply_subject, 'compose wizard: mail.compose.message incorrect subject') self.assertFalse(compose.body, 'compose wizard: mail.compose.message body should not contain parent message body') self.assertEqual(compose.parent_id and compose.parent_id.id, message.id, 'compose wizard: mail.compose.message parent_id incorrect') # Test: mail.compose.message: attachments for attach in compose.attachment_ids: self.assertIn((attach.datas_fname, attach.datas.decode('base64')), _attachments_test, 'compose wizard: mail.message attachment name / data incorrect') # -------------------------------------------------- # CASE3: mass_mail on Pigs and Bird # -------------------------------------------------- # Do: Compose in mass_mail_mode on pigs and bird compose_id = mail_compose.create( cr, user_raoul.id, { 'subject': _subject, 'body': '${object.description}', 'partner_ids': [(4, p_c_id), (4, p_d_id)], }, context={ 'default_composition_mode': 'mass_mail', 'default_model': 'mail.group', 'default_res_id': False, 'active_ids': [self.group_pigs_id, group_bird_id], }) compose = mail_compose.browse(cr, uid, compose_id) # Do: Post the comment, get created message for each group mail_compose.send_mail(cr, user_raoul.id, [compose_id], context={ 'default_res_id': -1, 'active_ids': [self.group_pigs_id, group_bird_id] }) # check mail_mail mail_mail_ids = self.mail_mail.search(cr, uid, [('subject', '=', _subject)]) for mail_mail in self.mail_mail.browse(cr, uid, mail_mail_ids): self.assertEqual(set([p.id for p in mail_mail.recipient_ids]), set([p_c_id, p_d_id]), 'compose wizard: mail_mail mass mailing: mail.mail in mass mail incorrect recipients') # check logged messages group_pigs.refresh() group_bird.refresh() message1 = group_pigs.message_ids[0] message2 = group_bird.message_ids[0] # Test: Pigs and Bird did receive their message test_msg_ids = self.mail_message.search(cr, uid, [], limit=2) self.assertIn(message1.id, test_msg_ids, 'compose wizard: Pigs did not receive its mass mailing message') self.assertIn(message2.id, test_msg_ids, 'compose wizard: Bird did not receive its mass mailing message') # Test: mail.message: subject, body, subtype, notified partners (nobody + specific recipients) self.assertEqual(message1.subject, _subject, 'compose wizard: message_post: mail.message in mass mail subject incorrect') self.assertEqual(message1.body, '<p>%s</p>' % group_pigs.description, 'compose wizard: message_post: mail.message in mass mail body incorrect') # self.assertEqual(set([p.id for p in message1.notified_partner_ids]), set([p_c_id, p_d_id]), # 'compose wizard: message_post: mail.message in mass mail incorrect notified partners') self.assertEqual(message2.subject, _subject, 'compose wizard: message_post: mail.message in mass mail subject incorrect') self.assertEqual(message2.body, '<p>%s</p>' % group_bird.description, 'compose wizard: message_post: mail.message in mass mail body incorrect') # self.assertEqual(set([p.id for p in message2.notified_partner_ids]), set([p_c_id, p_d_id]), # 'compose wizard: message_post: mail.message in mass mail incorrect notified partners') # Test: mail.group followers: author not added as follower in mass mail mode pigs_pids = [p.id for p in group_pigs.message_follower_ids] test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id] self.assertEqual(set(pigs_pids), set(test_pids), 'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account') bird_pids = [p.id for p in group_bird.message_follower_ids] test_pids = [self.partner_admin_id] self.assertEqual(set(bird_pids), set(test_pids), 'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account') # Do: Compose in mass_mail, coming from list_view, we have an active_domain that should be supported compose_id = mail_compose.create(cr, user_raoul.id, { 'subject': _subject, 'body': '${object.description}', 'partner_ids': [(4, p_c_id), (4, p_d_id)], }, context={ 'default_composition_mode': 'mass_mail', 'default_model': 'mail.group', 'default_res_id': False, 'active_ids': [self.group_pigs_id], 'active_domain': [('name', 'in', ['Pigs', 'Bird'])], }) compose = mail_compose.browse(cr, uid, compose_id) # Do: Post the comment, get created message for each group mail_compose.send_mail( cr, user_raoul.id, [compose_id], context={ 'default_res_id': -1, 'active_ids': [self.group_pigs_id, group_bird_id] }) group_pigs.refresh() group_bird.refresh() message1 = group_pigs.message_ids[0] message2 = group_bird.message_ids[0] # Test: Pigs and Bird did receive their message test_msg_ids = self.mail_message.search(cr, uid, [], limit=2) self.assertIn(message1.id, test_msg_ids, 'compose wizard: Pigs did not receive its mass mailing message') self.assertIn(message2.id, test_msg_ids, 'compose wizard: Bird did not receive its mass mailing message') def test_30_needaction(self): """ Tests for mail.message needaction. """ cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs na_admin_base = self.mail_message._needaction_count(cr, uid, domain=[]) na_demo_base = self.mail_message._needaction_count(cr, user_raoul.id, domain=[]) # Test: number of unread notification = needaction on mail.message notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_admin.partner_id.id), ('is_read', '=', False) ]) na_count = self.mail_message._needaction_count(cr, uid, domain=[]) self.assertEqual(len(notif_ids), na_count, 'unread notifications count does not match needaction count') # Do: post 2 message on group_pigs as admin, 3 messages as demo user for dummy in range(2): group_pigs.message_post(body='My Body', subtype='mt_comment') raoul_pigs = group_pigs.sudo(user_raoul) for dummy in range(3): raoul_pigs.message_post(body='My Demo Body', subtype='mt_comment') # Test: admin has 3 new notifications (from demo), and 3 new needaction notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_admin.partner_id.id), ('is_read', '=', False) ]) self.assertEqual(len(notif_ids), na_admin_base + 3, 'Admin should have 3 new unread notifications') na_admin = self.mail_message._needaction_count(cr, uid, domain=[]) na_admin_group = self.mail_message._needaction_count(cr, uid, domain=[('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)]) self.assertEqual(na_admin, na_admin_base + 3, 'Admin should have 3 new needaction') self.assertEqual(na_admin_group, 3, 'Admin should have 3 needaction related to Pigs') # Test: demo has 0 new notifications (not a follower, not receiving its own messages), and 0 new needaction notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_raoul.partner_id.id), ('is_read', '=', False) ]) self.assertEqual(len(notif_ids), na_demo_base + 0, 'Demo should have 0 new unread notifications') na_demo = self.mail_message._needaction_count(cr, user_raoul.id, domain=[]) na_demo_group = self.mail_message._needaction_count(cr, user_raoul.id, domain=[('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)]) self.assertEqual(na_demo, na_demo_base + 0, 'Demo should have 0 new needaction') self.assertEqual(na_demo_group, 0, 'Demo should have 0 needaction related to Pigs') def test_40_track_field(self): """ Testing auto tracking of fields. """ def _strip_string_spaces(body): return body.replace(' ', '').replace('\n', '') # Data: subscribe Raoul to Pigs, because he will change the public attribute and may loose access to the record cr, uid = self.cr, self.uid self.mail_group.message_subscribe_users(cr, uid, [self.group_pigs_id], [self.user_raoul_id]) # Data: res.users.group, to test group_public_id automatic logging group_system_ref = self.registry('ir.model.data').get_object_reference(cr, uid, 'base', 'group_system') group_system_id = group_system_ref and group_system_ref[1] or False # Data: custom subtypes mt_private_id = self.mail_message_subtype.create(cr, uid, {'name': 'private', 'description': 'Private public'}) self.ir_model_data.create(cr, uid, {'name': 'mt_private', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_private_id}) mt_name_supername_id = self.mail_message_subtype.create(cr, uid, {'name': 'name_supername', 'description': 'Supername name'}) self.ir_model_data.create(cr, uid, {'name': 'mt_name_supername', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_name_supername_id}) mt_group_public_set_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public_set', 'description': 'Group set'}) self.ir_model_data.create(cr, uid, {'name': 'mt_group_public_set', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_set_id}) mt_group_public_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public', 'description': 'Group changed'}) self.ir_model_data.create(cr, uid, {'name': 'mt_group_public', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_id}) # Data: alter mail_group model for testing purposes (test on classic, selection and many2one fields) cls = type(self.mail_group) self.assertNotIn('_track', cls.__dict__) cls._track = { 'public': { 'mail.mt_private': lambda self, cr, uid, obj, ctx=None: obj.public == 'private', }, 'name': { 'mail.mt_name_supername': lambda self, cr, uid, obj, ctx=None: obj.name == 'supername', }, 'group_public_id': { 'mail.mt_group_public_set': lambda self, cr, uid, obj, ctx=None: obj.group_public_id, 'mail.mt_group_public': lambda self, cr, uid, obj, ctx=None: True, }, } visibility = {'public': 'onchange', 'name': 'always', 'group_public_id': 'onchange'} for key in visibility: self.assertFalse(hasattr(getattr(cls, key), 'track_visibility')) getattr(cls, key).track_visibility = visibility[key] @self.addCleanup def cleanup(): delattr(cls, '_track') for key in visibility: del getattr(cls, key).track_visibility # Test: change name -> always tracked, not related to a subtype self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'public': 'public'}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 1, 'tracked: a message should have been produced') # Test: first produced message: no subtype, name change tracked last_msg = self.group_pigs.message_ids[-1] self.assertFalse(last_msg.subtype_id, 'tracked: message should not have been linked to a subtype') self.assertIn(u'SelectedGroupOnly\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect') self.assertIn('Pigs', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: change name as supername, public as private -> 2 subtypes self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'name': 'supername', 'public': 'private'}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 3, 'tracked: two messages should have been produced') # Test: first produced message: mt_name_supername last_msg = self.group_pigs.message_ids[-2] self.assertEqual(last_msg.subtype_id.id, mt_private_id, 'tracked: message should be linked to mt_private subtype') self.assertIn('Private public', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Pigs\u2192supername', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect') # Test: second produced message: mt_name_supername last_msg = self.group_pigs.message_ids[-3] self.assertEqual(last_msg.subtype_id.id, mt_name_supername_id, 'tracked: message should be linked to mt_name_supername subtype') self.assertIn('Supername name', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Public\u2192Private', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect') self.assertIn(u'Pigs\u2192supername', _strip_string_spaces(last_msg.body), 'tracked feature: message body does not hold always tracked field') # Test: change public as public, group_public_id -> 2 subtypes, name always tracked self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'public': 'public', 'group_public_id': group_system_id}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 5, 'tracked: one message should have been produced') # Test: first produced message: mt_group_public_set_id, with name always tracked, public tracked on change last_msg = self.group_pigs.message_ids[-4] self.assertEqual(last_msg.subtype_id.id, mt_group_public_set_id, 'tracked: message should be linked to mt_group_public_set_id') self.assertIn('Group set', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Private\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold changed tracked field') self.assertIn(u'HumanResources/Employee\u2192Administration/Settings', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: second produced message: mt_group_public_id, with name always tracked, public tracked on change last_msg = self.group_pigs.message_ids[-5] self.assertEqual(last_msg.subtype_id.id, mt_group_public_id, 'tracked: message should be linked to mt_group_public_id') self.assertIn('Group changed', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Private\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold changed tracked field') self.assertIn(u'HumanResources/Employee\u2192Administration/Settings', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: change group_public_id to False -> 1 subtype, name always tracked self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'group_public_id': False}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 6, 'tracked: one message should have been produced') # Test: first produced message: mt_group_public_set_id, with name always tracked, public tracked on change last_msg = self.group_pigs.message_ids[-6] self.assertEqual(last_msg.subtype_id.id, mt_group_public_id, 'tracked: message should be linked to mt_group_public_id') self.assertIn('Group changed', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Administration/Settings\u2192', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: change not tracked field, no tracking message self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'description': 'Dummy'}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 6, 'tracked: No message should have been produced')
wraiden/spacewalk
refs/heads/master
backend/server/importlib/test/test_ks_tree.py
14
# # Copyright (c) 2008--2015 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # from spacewalk.server import rhnSQL from spacewalk.server.importlib.importLib import KickstartableTree, KickstartFile from spacewalk.server.importlib.kickstartImport import KickstartableTreeImport from spacewalk.server.importlib.backendOracle import OracleBackend ks_trees = [ KickstartableTree().populate({ 'channel': 'redhat-linux-i386-8.0', 'base_path': 'foo/bar/baz', 'label': 'redhat-linux-i386-8.0', 'boot_image': 'ks-rh', 'files': [ KickstartFile().populate({ 'relative_path': 'foo/foo1', 'checksum_type': 'md5', 'checksum': 'axbycz', 'last_modified': '2003-10-11 12:13:14', 'file_size': 12345, }), KickstartFile().populate({ 'relative_path': 'foo/foo4', 'checksum_type': 'md5', 'checksum': 'axbycz', 'last_modified': '2003-10-11 12:13:14', 'file_size': 123456, }), KickstartFile().populate({ 'relative_path': 'foo/foo3', 'checksum_type': 'md5', 'checksum': 'axbycz', 'last_modified': '2003-10-11 12:13:14', 'file_size': 1234567, }), ], }), ] rhnSQL.initDB() backend = OracleBackend() backend.init() ki = KickstartableTreeImport(ks_trees, backend) ki.run()
partofthething/home-assistant
refs/heads/dev
homeassistant/components/mullvad/__init__.py
1
"""The Mullvad VPN integration.""" import asyncio from datetime import timedelta import logging import async_timeout from mullvad_api import MullvadAPI from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import update_coordinator from .const import DOMAIN PLATFORMS = ["binary_sensor"] async def async_setup(hass: HomeAssistant, config: dict): """Set up the Mullvad VPN integration.""" return True async def async_setup_entry(hass: HomeAssistant, entry: dict): """Set up Mullvad VPN integration.""" async def async_get_mullvad_api_data(): with async_timeout.timeout(10): api = await hass.async_add_executor_job(MullvadAPI) return api.data coordinator = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_mullvad_api_data, update_interval=timedelta(minutes=1), ) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data[DOMAIN] = coordinator for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: del hass.data[DOMAIN] return unload_ok
pfnet/chainercv
refs/heads/master
chainercv/evaluations/eval_detection_voc.py
3
from __future__ import division from collections import defaultdict import itertools import numpy as np import six from chainercv.utils.bbox.bbox_iou import bbox_iou def eval_detection_voc( pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels, gt_difficults=None, iou_thresh=0.5, use_07_metric=False): """Calculate average precisions based on evaluation code of PASCAL VOC. This function evaluates predicted bounding boxes obtained from a dataset which has :math:`N` images by using average precision for each class. The code is based on the evaluation code used in PASCAL VOC Challenge. Args: pred_bboxes (iterable of numpy.ndarray): See the table below. pred_labels (iterable of numpy.ndarray): See the table below. pred_scores (iterable of numpy.ndarray): See the table below. gt_bboxes (iterable of numpy.ndarray): See the table below. gt_labels (iterable of numpy.ndarray): See the table below. gt_difficults (iterable of numpy.ndarray): See the table below. By default, this is :obj:`None`. In that case, this function considers all bounding boxes to be not difficult. iou_thresh (float): A prediction is correct if its Intersection over Union with the ground truth is above this value. use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric for calculating average precision. The default value is :obj:`False`. .. csv-table:: :header: name, shape, dtype, format :obj:`pred_bboxes`, ":math:`[(R, 4)]`", :obj:`float32`, \ ":math:`(y_{min}, x_{min}, y_{max}, x_{max})`" :obj:`pred_labels`, ":math:`[(R,)]`", :obj:`int32`, \ ":math:`[0, \#fg\_class - 1]`" :obj:`pred_scores`, ":math:`[(R,)]`", :obj:`float32`, \ -- :obj:`gt_bboxes`, ":math:`[(R, 4)]`", :obj:`float32`, \ ":math:`(y_{min}, x_{min}, y_{max}, x_{max})`" :obj:`gt_labels`, ":math:`[(R,)]`", :obj:`int32`, \ ":math:`[0, \#fg\_class - 1]`" :obj:`gt_difficults`, ":math:`[(R,)]`", :obj:`bool`, -- Returns: dict: The keys, value-types and the description of the values are listed below. * **ap** (*numpy.ndarray*): An array of average precisions. \ The :math:`l`-th value corresponds to the average precision \ for class :math:`l`. If class :math:`l` does not exist in \ either :obj:`pred_labels` or :obj:`gt_labels`, the corresponding \ value is set to :obj:`numpy.nan`. * **map** (*float*): The average of Average Precisions over classes. """ prec, rec = calc_detection_voc_prec_rec( pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels, gt_difficults, iou_thresh=iou_thresh) ap = calc_detection_voc_ap(prec, rec, use_07_metric=use_07_metric) return {'ap': ap, 'map': np.nanmean(ap)} def calc_detection_voc_prec_rec( pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels, gt_difficults=None, iou_thresh=0.5): """Calculate precision and recall based on evaluation code of PASCAL VOC. This function calculates precision and recall of predicted bounding boxes obtained from a dataset which has :math:`N` images. The code is based on the evaluation code used in PASCAL VOC Challenge. Args: pred_bboxes (iterable of numpy.ndarray): See the table in :func:`chainercv.evaluations.eval_detection_voc`. pred_labels (iterable of numpy.ndarray): See the table in :func:`chainercv.evaluations.eval_detection_voc`. pred_scores (iterable of numpy.ndarray): See the table in :func:`chainercv.evaluations.eval_detection_voc`. gt_bboxes (iterable of numpy.ndarray): See the table in :func:`chainercv.evaluations.eval_detection_voc`. gt_labels (iterable of numpy.ndarray): See the table in :func:`chainercv.evaluations.eval_detection_voc`. gt_difficults (iterable of numpy.ndarray): See the table in :func:`chainercv.evaluations.eval_detection_voc`. By default, this is :obj:`None`. In that case, this function considers all bounding boxes to be not difficult. iou_thresh (float): A prediction is correct if its Intersection over Union with the ground truth is above this value.. Returns: tuple of two lists: This function returns two lists: :obj:`prec` and :obj:`rec`. * :obj:`prec`: A list of arrays. :obj:`prec[l]` is precision \ for class :math:`l`. If class :math:`l` does not exist in \ either :obj:`pred_labels` or :obj:`gt_labels`, :obj:`prec[l]` is \ set to :obj:`None`. * :obj:`rec`: A list of arrays. :obj:`rec[l]` is recall \ for class :math:`l`. If class :math:`l` that is not marked as \ difficult does not exist in \ :obj:`gt_labels`, :obj:`rec[l]` is \ set to :obj:`None`. """ pred_bboxes = iter(pred_bboxes) pred_labels = iter(pred_labels) pred_scores = iter(pred_scores) gt_bboxes = iter(gt_bboxes) gt_labels = iter(gt_labels) if gt_difficults is None: gt_difficults = itertools.repeat(None) else: gt_difficults = iter(gt_difficults) n_pos = defaultdict(int) score = defaultdict(list) match = defaultdict(list) for pred_bbox, pred_label, pred_score, gt_bbox, gt_label, gt_difficult in \ six.moves.zip( pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels, gt_difficults): if gt_difficult is None: gt_difficult = np.zeros(gt_bbox.shape[0], dtype=bool) for l in np.unique(np.concatenate((pred_label, gt_label)).astype(int)): pred_mask_l = pred_label == l pred_bbox_l = pred_bbox[pred_mask_l] pred_score_l = pred_score[pred_mask_l] # sort by score order = pred_score_l.argsort()[::-1] pred_bbox_l = pred_bbox_l[order] pred_score_l = pred_score_l[order] gt_mask_l = gt_label == l gt_bbox_l = gt_bbox[gt_mask_l] gt_difficult_l = gt_difficult[gt_mask_l] n_pos[l] += np.logical_not(gt_difficult_l).sum() score[l].extend(pred_score_l) if len(pred_bbox_l) == 0: continue if len(gt_bbox_l) == 0: match[l].extend((0,) * pred_bbox_l.shape[0]) continue # VOC evaluation follows integer typed bounding boxes. pred_bbox_l = pred_bbox_l.copy() pred_bbox_l[:, 2:] += 1 gt_bbox_l = gt_bbox_l.copy() gt_bbox_l[:, 2:] += 1 iou = bbox_iou(pred_bbox_l, gt_bbox_l) gt_index = iou.argmax(axis=1) # set -1 if there is no matching ground truth gt_index[iou.max(axis=1) < iou_thresh] = -1 del iou selec = np.zeros(gt_bbox_l.shape[0], dtype=bool) for gt_idx in gt_index: if gt_idx >= 0: if gt_difficult_l[gt_idx]: match[l].append(-1) else: if not selec[gt_idx]: match[l].append(1) else: match[l].append(0) selec[gt_idx] = True else: match[l].append(0) for iter_ in ( pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels, gt_difficults): if next(iter_, None) is not None: raise ValueError('Length of input iterables need to be same.') n_fg_class = max(n_pos.keys()) + 1 prec = [None] * n_fg_class rec = [None] * n_fg_class for l in n_pos.keys(): score_l = np.array(score[l]) match_l = np.array(match[l], dtype=np.int8) order = score_l.argsort()[::-1] match_l = match_l[order] tp = np.cumsum(match_l == 1) fp = np.cumsum(match_l == 0) # If an element of fp + tp is 0, # the corresponding element of prec[l] is nan. prec[l] = tp / (fp + tp) # If n_pos[l] is 0, rec[l] is None. if n_pos[l] > 0: rec[l] = tp / n_pos[l] return prec, rec def calc_detection_voc_ap(prec, rec, use_07_metric=False): """Calculate average precisions based on evaluation code of PASCAL VOC. This function calculates average precisions from given precisions and recalls. The code is based on the evaluation code used in PASCAL VOC Challenge. Args: prec (list of numpy.array): A list of arrays. :obj:`prec[l]` indicates precision for class :math:`l`. If :obj:`prec[l]` is :obj:`None`, this function returns :obj:`numpy.nan` for class :math:`l`. rec (list of numpy.array): A list of arrays. :obj:`rec[l]` indicates recall for class :math:`l`. If :obj:`rec[l]` is :obj:`None`, this function returns :obj:`numpy.nan` for class :math:`l`. use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric for calculating average precision. The default value is :obj:`False`. Returns: ~numpy.ndarray: This function returns an array of average precisions. The :math:`l`-th value corresponds to the average precision for class :math:`l`. If :obj:`prec[l]` or :obj:`rec[l]` is :obj:`None`, the corresponding value is set to :obj:`numpy.nan`. """ n_fg_class = len(prec) ap = np.empty(n_fg_class) for l in six.moves.range(n_fg_class): if prec[l] is None or rec[l] is None: ap[l] = np.nan continue if use_07_metric: # 11 point metric ap[l] = 0 for t in np.arange(0., 1.1, 0.1): if np.sum(rec[l] >= t) == 0: p = 0 else: p = np.max(np.nan_to_num(prec[l])[rec[l] >= t]) ap[l] += p / 11 else: # correct AP calculation # first append sentinel values at the end mpre = np.concatenate(([0], np.nan_to_num(prec[l]), [0])) mrec = np.concatenate(([0], rec[l], [1])) mpre = np.maximum.accumulate(mpre[::-1])[::-1] # to calculate area under PR curve, look for points # where X axis (recall) changes value i = np.where(mrec[1:] != mrec[:-1])[0] # and sum (\Delta recall) * prec ap[l] = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) return ap
Teamxrtc/webrtc-streaming-node
refs/heads/master
third_party/depot_tools/third_party/pylint/lint.py
46
# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:[email protected] # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ %prog [options] module_or_package Check that a module satisfies a coding standard (and more !). %prog --help Display this help message and exit. %prog --help-msg <msg-id>[,<msg-id>] Display help messages about given message identifiers and exit. """ from __future__ import print_function import collections import contextlib import itertools import operator import os try: import multiprocessing except ImportError: multiprocessing = None import sys import tokenize import warnings import astroid from astroid.__pkginfo__ import version as astroid_version from astroid import modutils from logilab.common import configuration from logilab.common import optik_ext from logilab.common import interface from logilab.common import textutils from logilab.common import ureports from logilab.common.__pkginfo__ import version as common_version import six from pylint import checkers from pylint import interfaces from pylint import reporters from pylint import utils from pylint import config from pylint.__pkginfo__ import version MANAGER = astroid.MANAGER def _get_new_args(message): location = ( message.abspath, message.path, message.module, message.obj, message.line, message.column, ) return ( message.msg_id, message.symbol, location, message.msg, message.confidence, ) def _get_python_path(filepath): dirname = os.path.realpath(os.path.expanduser(filepath)) if not os.path.isdir(dirname): dirname = os.path.dirname(dirname) while True: if not os.path.exists(os.path.join(dirname, "__init__.py")): return dirname old_dirname = dirname dirname = os.path.dirname(dirname) if old_dirname == dirname: return os.getcwd() def _merge_stats(stats): merged = {} for stat in stats: for key, item in six.iteritems(stat): if key not in merged: merged[key] = item else: if isinstance(item, dict): merged[key].update(item) else: merged[key] = merged[key] + item return merged # Python Linter class ######################################################### MSGS = { 'F0001': ('%s', 'fatal', 'Used when an error occurred preventing the analysis of a \ module (unable to find it for instance).'), 'F0002': ('%s: %s', 'astroid-error', 'Used when an unexpected error occurred while building the ' 'Astroid representation. This is usually accompanied by a ' 'traceback. Please report such errors !'), 'F0003': ('ignored builtin module %s', 'ignored-builtin-module', 'Used to indicate that the user asked to analyze a builtin ' 'module which has been skipped.'), 'F0010': ('error while code parsing: %s', 'parse-error', 'Used when an exception occured while building the Astroid ' 'representation which could be handled by astroid.'), 'I0001': ('Unable to run raw checkers on built-in module %s', 'raw-checker-failed', 'Used to inform that a built-in module has not been checked ' 'using the raw checkers.'), 'I0010': ('Unable to consider inline option %r', 'bad-inline-option', 'Used when an inline option is either badly formatted or can\'t ' 'be used inside modules.'), 'I0011': ('Locally disabling %s (%s)', 'locally-disabled', 'Used when an inline option disables a message or a messages ' 'category.'), 'I0012': ('Locally enabling %s (%s)', 'locally-enabled', 'Used when an inline option enables a message or a messages ' 'category.'), 'I0013': ('Ignoring entire file', 'file-ignored', 'Used to inform that the file will not be checked'), 'I0020': ('Suppressed %s (from line %d)', 'suppressed-message', 'A message was triggered on a line, but suppressed explicitly ' 'by a disable= comment in the file. This message is not ' 'generated for messages that are ignored due to configuration ' 'settings.'), 'I0021': ('Useless suppression of %s', 'useless-suppression', 'Reported when a message is explicitly disabled for a line or ' 'a block of code, but never triggered.'), 'I0022': ('Pragma "%s" is deprecated, use "%s" instead', 'deprecated-pragma', 'Some inline pylint options have been renamed or reworked, ' 'only the most recent form should be used. ' 'NOTE:skip-all is only available with pylint >= 0.26', {'old_names': [('I0014', 'deprecated-disable-all')]}), 'E0001': ('%s', 'syntax-error', 'Used when a syntax error is raised for a module.'), 'E0011': ('Unrecognized file option %r', 'unrecognized-inline-option', 'Used when an unknown inline option is encountered.'), 'E0012': ('Bad option value %r', 'bad-option-value', 'Used when a bad value for an inline option is encountered.'), } def _deprecated_option(shortname, opt_type): def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument sys.stderr.write('Warning: option %s is deprecated and ignored.\n' % (optname,)) return {'short': shortname, 'help': 'DEPRECATED', 'hide': True, 'type': opt_type, 'action': 'callback', 'callback': _warn_deprecated} if multiprocessing is not None: class ChildLinter(multiprocessing.Process): # pylint: disable=no-member def run(self): tasks_queue, results_queue, self._config = self._args # pylint: disable=no-member self._config["jobs"] = 1 # Child does not parallelize any further. # Run linter for received files/modules. for file_or_module in iter(tasks_queue.get, 'STOP'): result = self._run_linter(file_or_module[0]) try: results_queue.put(result) except Exception as ex: print("internal error with sending report for module %s" % file_or_module, file=sys.stderr) print(ex, file=sys.stderr) results_queue.put({}) def _run_linter(self, file_or_module): linter = PyLinter() # Register standard checkers. linter.load_default_plugins() # Load command line plugins. # TODO linter.load_plugin_modules(self._plugins) linter.load_configuration(**self._config) linter.set_reporter(reporters.CollectingReporter()) # Run the checks. linter.check(file_or_module) msgs = [_get_new_args(m) for m in linter.reporter.messages] return (file_or_module, linter.file_state.base_name, linter.current_name, msgs, linter.stats, linter.msg_status) class PyLinter(configuration.OptionsManagerMixIn, utils.MessagesHandlerMixIn, utils.ReportsHandlerMixIn, checkers.BaseTokenChecker): """lint Python modules using external checkers. This is the main checker controlling the other ones and the reports generation. It is itself both a raw checker and an astroid checker in order to: * handle message activation / deactivation at the module level * handle some basic but necessary stats'data (number of classes, methods...) IDE plugins developpers: you may have to call `astroid.builder.MANAGER.astroid_cache.clear()` accross run if you want to ensure the latest code version is actually checked. """ __implements__ = (interfaces.ITokenChecker, ) name = 'master' priority = 0 level = 0 msgs = MSGS @staticmethod def make_options(): return (('ignore', {'type' : 'csv', 'metavar' : '<file>[,<file>...]', 'dest' : 'black_list', 'default' : ('CVS',), 'help' : 'Add files or directories to the blacklist. ' 'They should be base names, not paths.'}), ('persistent', {'default': True, 'type' : 'yn', 'metavar' : '<y_or_n>', 'level': 1, 'help' : 'Pickle collected data for later comparisons.'}), ('load-plugins', {'type' : 'csv', 'metavar' : '<modules>', 'default' : (), 'level': 1, 'help' : 'List of plugins (as comma separated values of ' 'python modules names) to load, usually to register ' 'additional checkers.'}), ('output-format', {'default': 'text', 'type': 'string', 'metavar' : '<format>', 'short': 'f', 'group': 'Reports', 'help' : 'Set the output format. Available formats are text,' ' parseable, colorized, msvs (visual studio) and html. You ' 'can also give a reporter class, eg mypackage.mymodule.' 'MyReporterClass.'}), ('files-output', {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>', 'group': 'Reports', 'level': 1, 'help' : 'Put messages in a separate file for each module / ' 'package specified on the command line instead of printing ' 'them on stdout. Reports (if any) will be written in a file ' 'name "pylint_global.[txt|html]".'}), ('reports', {'default': 1, 'type' : 'yn', 'metavar' : '<y_or_n>', 'short': 'r', 'group': 'Reports', 'help' : 'Tells whether to display a full report or only the ' 'messages'}), ('evaluation', {'type' : 'string', 'metavar' : '<python_expression>', 'group': 'Reports', 'level': 1, 'default': '10.0 - ((float(5 * error + warning + refactor + ' 'convention) / statement) * 10)', 'help' : 'Python expression which should return a note less ' 'than 10 (10 is the highest note). You have access ' 'to the variables errors warning, statement which ' 'respectively contain the number of errors / ' 'warnings messages and the total number of ' 'statements analyzed. This is used by the global ' 'evaluation report (RP0004).'}), ('comment', {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>', 'group': 'Reports', 'level': 1, 'help' : 'Add a comment according to your evaluation note. ' 'This is used by the global evaluation report (RP0004).'}), ('confidence', {'type' : 'multiple_choice', 'metavar': '<levels>', 'default': '', 'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS], 'group': 'Messages control', 'help' : 'Only show warnings with the listed confidence levels.' ' Leave empty to show all. Valid levels: %s' % ( ', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}), ('enable', {'type' : 'csv', 'metavar': '<msg ids>', 'short': 'e', 'group': 'Messages control', 'help' : 'Enable the message, report, category or checker with the ' 'given id(s). You can either give multiple identifier ' 'separated by comma (,) or put this option multiple time. ' 'See also the "--disable" option for examples. '}), ('disable', {'type' : 'csv', 'metavar': '<msg ids>', 'short': 'd', 'group': 'Messages control', 'help' : 'Disable the message, report, category or checker ' 'with the given id(s). You can either give multiple identifiers' ' separated by comma (,) or put this option multiple times ' '(only on the command line, not in the configuration file ' 'where it should appear only once).' 'You can also use "--disable=all" to disable everything first ' 'and then reenable specific checks. For example, if you want ' 'to run only the similarities checker, you can use ' '"--disable=all --enable=similarities". ' 'If you want to run only the classes checker, but have no ' 'Warning level messages displayed, use' '"--disable=all --enable=classes --disable=W"'}), ('msg-template', {'type' : 'string', 'metavar': '<template>', 'group': 'Reports', 'help' : ('Template used to display messages. ' 'This is a python new-style format string ' 'used to format the message information. ' 'See doc for all details') }), ('include-ids', _deprecated_option('i', 'yn')), ('symbols', _deprecated_option('s', 'yn')), ('jobs', {'type' : 'int', 'metavar': '<n-processes>', 'short': 'j', 'default': 1, 'help' : '''Use multiple processes to speed up Pylint.''', }), ('unsafe-load-any-extension', {'type': 'yn', 'metavar': '<yn>', 'default': False, 'hide': True, 'help': ('Allow loading of arbitrary C extensions. Extensions' ' are imported into the active Python interpreter and' ' may run arbitrary code.')}), ('extension-pkg-whitelist', {'type': 'csv', 'metavar': '<pkg[,pkg]>', 'default': [], 'help': ('A comma-separated list of package or module names' ' from where C extensions may be loaded. Extensions are' ' loading into the active Python interpreter and may run' ' arbitrary code')} ), ) option_groups = ( ('Messages control', 'Options controling analysis messages'), ('Reports', 'Options related to output formating and reporting'), ) def __init__(self, options=(), reporter=None, option_groups=(), pylintrc=None): # some stuff has to be done before ancestors initialization... # # messages store / checkers / reporter / astroid manager self.msgs_store = utils.MessagesStore() self.reporter = None self._reporter_name = None self._reporters = {} self._checkers = collections.defaultdict(list) self._pragma_lineno = {} self._ignore_file = False # visit variables self.file_state = utils.FileState() self.current_name = None self.current_file = None self.stats = None # init options self._external_opts = options self.options = options + PyLinter.make_options() self.option_groups = option_groups + PyLinter.option_groups self._options_methods = { 'enable': self.enable, 'disable': self.disable} self._bw_options_methods = {'disable-msg': self.disable, 'enable-msg': self.enable} full_version = '%%prog %s, \nastroid %s, common %s\nPython %s' % ( version, astroid_version, common_version, sys.version) configuration.OptionsManagerMixIn.__init__( self, usage=__doc__, version=full_version, config_file=pylintrc or config.PYLINTRC) utils.MessagesHandlerMixIn.__init__(self) utils.ReportsHandlerMixIn.__init__(self) checkers.BaseTokenChecker.__init__(self) # provided reports self.reports = (('RP0001', 'Messages by category', report_total_messages_stats), ('RP0002', '% errors / warnings by module', report_messages_by_module_stats), ('RP0003', 'Messages', report_messages_stats), ('RP0004', 'Global evaluation', self.report_evaluation), ) self.register_checker(self) self._dynamic_plugins = set() self.load_provider_defaults() if reporter: self.set_reporter(reporter) def load_default_plugins(self): checkers.initialize(self) reporters.initialize(self) # Make sure to load the default reporter, because # the option has been set before the plugins had been loaded. if not self.reporter: self._load_reporter() def load_plugin_modules(self, modnames): """take a list of module names which are pylint plugins and load and register them """ for modname in modnames: if modname in self._dynamic_plugins: continue self._dynamic_plugins.add(modname) module = modutils.load_module_from_name(modname) module.register(self) def _load_reporter(self): name = self._reporter_name.lower() if name in self._reporters: self.set_reporter(self._reporters[name]()) else: qname = self._reporter_name module = modutils.load_module_from_name( modutils.get_module_part(qname)) class_name = qname.split('.')[-1] reporter_class = getattr(module, class_name) self.set_reporter(reporter_class()) def set_reporter(self, reporter): """set the reporter used to display messages and reports""" self.reporter = reporter reporter.linter = self def set_option(self, optname, value, action=None, optdict=None): """overridden from configuration.OptionsProviderMixin to handle some special options """ if optname in self._options_methods or \ optname in self._bw_options_methods: if value: try: meth = self._options_methods[optname] except KeyError: meth = self._bw_options_methods[optname] warnings.warn('%s is deprecated, replace it by %s' % ( optname, optname.split('-')[0]), DeprecationWarning) value = optik_ext.check_csv(None, optname, value) if isinstance(value, (list, tuple)): for _id in value: meth(_id, ignore_unknown=True) else: meth(value) return # no need to call set_option, disable/enable methods do it elif optname == 'output-format': self._reporter_name = value # If the reporters are already available, load # the reporter class. if self._reporters: self._load_reporter() try: checkers.BaseTokenChecker.set_option(self, optname, value, action, optdict) except configuration.UnsupportedAction: print('option %s can\'t be read from config file' % \ optname, file=sys.stderr) def register_reporter(self, reporter_class): self._reporters[reporter_class.name] = reporter_class def report_order(self): reports = sorted(self._reports, key=lambda x: getattr(x, 'name', '')) try: # Remove the current reporter and add it # at the end of the list. reports.pop(reports.index(self)) except ValueError: pass else: reports.append(self) return reports # checkers manipulation methods ############################################ def register_checker(self, checker): """register a new checker checker is an object implementing IRawChecker or / and IAstroidChecker """ assert checker.priority <= 0, 'checker priority can\'t be >= 0' self._checkers[checker.name].append(checker) for r_id, r_title, r_cb in checker.reports: self.register_report(r_id, r_title, r_cb, checker) self.register_options_provider(checker) if hasattr(checker, 'msgs'): self.msgs_store.register_messages(checker) checker.load_defaults() # Register the checker, but disable all of its messages. # TODO(cpopa): we should have a better API for this. if not getattr(checker, 'enabled', True): self.disable(checker.name) def disable_noerror_messages(self): for msgcat, msgids in six.iteritems(self.msgs_store._msgs_by_category): if msgcat == 'E': for msgid in msgids: self.enable(msgid) else: for msgid in msgids: self.disable(msgid) def disable_reporters(self): """disable all reporters""" for reporters in six.itervalues(self._reports): for report_id, _, _ in reporters: self.disable_report(report_id) def error_mode(self): """error mode: enable only errors; no reports, no persistent""" self.disable_noerror_messages() self.disable('miscellaneous') self.set_option('reports', False) self.set_option('persistent', False) # block level option handling ############################################# # # see func_block_disable_msg.py test case for expected behaviour def process_tokens(self, tokens): """process tokens from the current module to search for module/block level options """ control_pragmas = {'disable', 'enable'} for (tok_type, content, start, _, _) in tokens: if tok_type != tokenize.COMMENT: continue match = utils.OPTION_RGX.search(content) if match is None: continue if match.group(1).strip() == "disable-all" or \ match.group(1).strip() == 'skip-file': if match.group(1).strip() == "disable-all": self.add_message('deprecated-pragma', line=start[0], args=('disable-all', 'skip-file')) self.add_message('file-ignored', line=start[0]) self._ignore_file = True return try: opt, value = match.group(1).split('=', 1) except ValueError: self.add_message('bad-inline-option', args=match.group(1).strip(), line=start[0]) continue opt = opt.strip() if opt in self._options_methods or opt in self._bw_options_methods: try: meth = self._options_methods[opt] except KeyError: meth = self._bw_options_methods[opt] # found a "(dis|en)able-msg" pragma deprecated suppresssion self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', ''))) for msgid in textutils.splitstrip(value): # Add the line where a control pragma was encountered. if opt in control_pragmas: self._pragma_lineno[msgid] = start[0] try: if (opt, msgid) == ('disable', 'all'): self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file')) self.add_message('file-ignored', line=start[0]) self._ignore_file = True return meth(msgid, 'module', start[0]) except utils.UnknownMessage: self.add_message('bad-option-value', args=msgid, line=start[0]) else: self.add_message('unrecognized-inline-option', args=opt, line=start[0]) # code checking methods ################################################### def get_checkers(self): """return all available checkers as a list""" return [self] + [c for checkers in six.itervalues(self._checkers) for c in checkers if c is not self] def prepare_checkers(self): """return checkers needed for activated messages and reports""" if not self.config.reports: self.disable_reporters() # get needed checkers neededcheckers = [self] for checker in self.get_checkers()[1:]: # fatal errors should not trigger enable / disabling a checker messages = set(msg for msg in checker.msgs if msg[0] != 'F' and self.is_message_enabled(msg)) if (messages or any(self.report_is_enabled(r[0]) for r in checker.reports)): neededcheckers.append(checker) # Sort checkers by priority neededcheckers = sorted(neededcheckers, key=operator.attrgetter('priority'), reverse=True) return neededcheckers def should_analyze_file(self, modname, path): # pylint: disable=unused-argument, no-self-use """Returns whether or not a module should be checked. This implementation returns True for all python source file, indicating that all files should be linted. Subclasses may override this method to indicate that modules satisfying certain conditions should not be linted. :param str modname: The name of the module to be checked. :param str path: The full path to the source code of the module. :returns: True if the module should be checked. :rtype: bool """ return path.endswith('.py') def check(self, files_or_modules): """main checking entry: check a list of files or modules from their name. """ # initialize msgs_state now that all messages have been registered into # the store for msg in self.msgs_store.messages: if not msg.may_be_emitted(): self._msgs_state[msg.msgid] = False if not isinstance(files_or_modules, (list, tuple)): files_or_modules = (files_or_modules,) if self.config.jobs == 1: with fix_import_path(files_or_modules): self._do_check(files_or_modules) else: # Hack that permits running pylint, on Windows, with -m switch # and with --jobs, as in 'python -2 -m pylint .. --jobs'. # For more details why this is needed, # see Python issue http://bugs.python.org/issue10845. mock_main = __name__ != '__main__' # -m switch if mock_main: sys.modules['__main__'] = sys.modules[__name__] try: self._parallel_check(files_or_modules) finally: if mock_main: sys.modules.pop('__main__') def _parallel_task(self, files_or_modules): # Prepare configuration for child linters. filter_options = {'symbols', 'include-ids', 'long-help'} filter_options.update([opt_name for opt_name, _ in self._external_opts]) config = {} for opt_providers in six.itervalues(self._all_options): for optname, optdict, val in opt_providers.options_and_values(): if optname not in filter_options: config[optname] = configuration.format_option_value(optdict, val) childs = [] manager = multiprocessing.Manager() # pylint: disable=no-member tasks_queue = manager.Queue() # pylint: disable=no-member results_queue = manager.Queue() # pylint: disable=no-member for _ in range(self.config.jobs): cl = ChildLinter(args=(tasks_queue, results_queue, config)) cl.start() # pylint: disable=no-member childs.append(cl) # send files to child linters for files_or_module in files_or_modules: tasks_queue.put([files_or_module]) # collect results from child linters failed = False for _ in files_or_modules: try: result = results_queue.get() except Exception as ex: print("internal error while receiving results from child linter", file=sys.stderr) print(ex, file=sys.stderr) failed = True break yield result # Stop child linters and wait for their completion. for _ in range(self.config.jobs): tasks_queue.put('STOP') for cl in childs: cl.join() if failed: print("Error occured, stopping the linter.", file=sys.stderr) sys.exit(32) def _parallel_check(self, files_or_modules): # Reset stats. self.open() all_stats = [] for result in self._parallel_task(files_or_modules): ( file_or_module, self.file_state.base_name, module, messages, stats, msg_status ) = result if file_or_module == files_or_modules[-1]: last_module = module for msg in messages: msg = utils.Message(*msg) self.set_current_module(module) self.reporter.handle_message(msg) all_stats.append(stats) self.msg_status |= msg_status self.stats = _merge_stats(itertools.chain(all_stats, [self.stats])) self.current_name = last_module # Insert stats data to local checkers. for checker in self.get_checkers(): if checker is not self: checker.stats = self.stats def _do_check(self, files_or_modules): walker = utils.PyLintASTWalker(self) checkers = self.prepare_checkers() tokencheckers = [c for c in checkers if interface.implements(c, interfaces.ITokenChecker) and c is not self] rawcheckers = [c for c in checkers if interface.implements(c, interfaces.IRawChecker)] # notify global begin for checker in checkers: checker.open() if interface.implements(checker, interfaces.IAstroidChecker): walker.add_checker(checker) # build ast and check modules or packages for descr in self.expand_files(files_or_modules): modname, filepath = descr['name'], descr['path'] if not descr['isarg'] and not self.should_analyze_file(modname, filepath): continue if self.config.files_output: reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension) self.reporter.set_output(open(reportfile, 'w')) self.set_current_module(modname, filepath) # get the module representation ast_node = self.get_ast(filepath, modname) if ast_node is None: continue # XXX to be correct we need to keep module_msgs_state for every # analyzed module (the problem stands with localized messages which # are only detected in the .close step) self.file_state = utils.FileState(descr['basename']) self._ignore_file = False # fix the current file (if the source file was not available or # if it's actually a c extension) self.current_file = ast_node.file # pylint: disable=maybe-no-member self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers) # warn about spurious inline messages handling for msgid, line, args in self.file_state.iter_spurious_suppression_messages(self.msgs_store): self.add_message(msgid, line, None, args) # notify global end self.stats['statement'] = walker.nbstatements checkers.reverse() for checker in checkers: checker.close() def expand_files(self, modules): """get modules and errors from a list of modules and handle errors """ result, errors = utils.expand_modules(modules, self.config.black_list) for error in errors: message = modname = error["mod"] key = error["key"] self.set_current_module(modname) if key == "fatal": message = str(error["ex"]).replace(os.getcwd() + os.sep, '') self.add_message(key, args=message) return result def set_current_module(self, modname, filepath=None): """set the name of the currently analyzed module and init statistics for it """ if not modname and filepath is None: return self.reporter.on_set_current_module(modname, filepath) self.current_name = modname self.current_file = filepath or modname self.stats['by_module'][modname] = {} self.stats['by_module'][modname]['statement'] = 0 for msg_cat in six.itervalues(utils.MSG_TYPES): self.stats['by_module'][modname][msg_cat] = 0 def get_ast(self, filepath, modname): """return a ast(roid) representation for a module""" try: return MANAGER.ast_from_file(filepath, modname, source=True) except SyntaxError as ex: self.add_message('syntax-error', line=ex.lineno, args=ex.msg) except astroid.AstroidBuildingException as ex: self.add_message('parse-error', args=ex) except Exception as ex: # pylint: disable=broad-except import traceback traceback.print_exc() self.add_message('astroid-error', args=(ex.__class__, ex)) def check_astroid_module(self, ast_node, walker, rawcheckers, tokencheckers): """Check a module from its astroid representation.""" try: tokens = utils.tokenize_module(ast_node) except tokenize.TokenError as ex: self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0]) return if not ast_node.pure_python: self.add_message('raw-checker-failed', args=ast_node.name) else: #assert astroid.file.endswith('.py') # invoke ITokenChecker interface on self to fetch module/block # level options self.process_tokens(tokens) if self._ignore_file: return False # walk ast to collect line numbers self.file_state.collect_block_lines(self.msgs_store, ast_node) # run raw and tokens checkers for checker in rawcheckers: checker.process_module(ast_node) for checker in tokencheckers: checker.process_tokens(tokens) # generate events to astroid checkers walker.walk(ast_node) return True # IAstroidChecker interface ################################################# def open(self): """initialize counters""" self.stats = {'by_module' : {}, 'by_msg' : {}, } MANAGER.always_load_extensions = self.config.unsafe_load_any_extension MANAGER.extension_package_whitelist.update( self.config.extension_pkg_whitelist) for msg_cat in six.itervalues(utils.MSG_TYPES): self.stats[msg_cat] = 0 def generate_reports(self): """close the whole package /module, it's time to make reports ! if persistent run, pickle results for later comparison """ if self.file_state.base_name is not None: # load previous results if any previous_stats = config.load_results(self.file_state.base_name) # XXX code below needs refactoring to be more reporter agnostic self.reporter.on_close(self.stats, previous_stats) if self.config.reports: sect = self.make_reports(self.stats, previous_stats) if self.config.files_output: filename = 'pylint_global.' + self.reporter.extension self.reporter.set_output(open(filename, 'w')) else: sect = ureports.Section() if self.config.reports or self.config.output_format == 'html': self.reporter.display_results(sect) # save results if persistent run if self.config.persistent: config.save_results(self.stats, self.file_state.base_name) else: if self.config.output_format == 'html': # No output will be emitted for the html # reporter if the file doesn't exist, so emit # the results here. self.reporter.display_results(ureports.Section()) self.reporter.on_close(self.stats, {}) # specific reports ######################################################## def report_evaluation(self, sect, stats, previous_stats): """make the global evaluation report""" # check with at least check 1 statements (usually 0 when there is a # syntax error preventing pylint from further processing) if stats['statement'] == 0: raise utils.EmptyReport() # get a global note for the code evaluation = self.config.evaluation try: note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used except Exception as ex: # pylint: disable=broad-except msg = 'An exception occurred while rating: %s' % ex else: stats['global_note'] = note msg = 'Your code has been rated at %.2f/10' % note pnote = previous_stats.get('global_note') if pnote is not None: msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote) if self.config.comment: msg = '%s\n%s' % (msg, config.get_note_message(note)) sect.append(ureports.Text(msg)) # some reporting functions #################################################### def report_total_messages_stats(sect, stats, previous_stats): """make total errors / warnings report""" lines = ['type', 'number', 'previous', 'difference'] lines += checkers.table_lines_from_stats(stats, previous_stats, ('convention', 'refactor', 'warning', 'error')) sect.append(ureports.Table(children=lines, cols=4, rheaders=1)) def report_messages_stats(sect, stats, _): """make messages type report""" if not stats['by_msg']: # don't print this report when we didn't detected any errors raise utils.EmptyReport() in_order = sorted([(value, msg_id) for msg_id, value in six.iteritems(stats['by_msg']) if not msg_id.startswith('I')]) in_order.reverse() lines = ('message id', 'occurrences') for value, msg_id in in_order: lines += (msg_id, str(value)) sect.append(ureports.Table(children=lines, cols=2, rheaders=1)) def report_messages_by_module_stats(sect, stats, _): """make errors / warnings by modules report""" if len(stats['by_module']) == 1: # don't print this report when we are analysing a single module raise utils.EmptyReport() by_mod = collections.defaultdict(dict) for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'): total = stats[m_type] for module in six.iterkeys(stats['by_module']): mod_total = stats['by_module'][module][m_type] if total == 0: percent = 0 else: percent = float((mod_total)*100) / total by_mod[module][m_type] = percent sorted_result = [] for module, mod_info in six.iteritems(by_mod): sorted_result.append((mod_info['error'], mod_info['warning'], mod_info['refactor'], mod_info['convention'], module)) sorted_result.sort() sorted_result.reverse() lines = ['module', 'error', 'warning', 'refactor', 'convention'] for line in sorted_result: # Don't report clean modules. if all(entry == 0 for entry in line[:-1]): continue lines.append(line[-1]) for val in line[:-1]: lines.append('%.2f' % val) if len(lines) == 5: raise utils.EmptyReport() sect.append(ureports.Table(children=lines, cols=5, rheaders=1)) # utilities ################################################################### class ArgumentPreprocessingError(Exception): """Raised if an error occurs during argument preprocessing.""" def preprocess_options(args, search_for): """look for some options (keys of <search_for>) which have to be processed before others values of <search_for> are callback functions to call when the option is found """ i = 0 while i < len(args): arg = args[i] if arg.startswith('--'): try: option, val = arg[2:].split('=', 1) except ValueError: option, val = arg[2:], None try: cb, takearg = search_for[option] except KeyError: i += 1 else: del args[i] if takearg and val is None: if i >= len(args) or args[i].startswith('-'): msg = 'Option %s expects a value' % option raise ArgumentPreprocessingError(msg) val = args[i] del args[i] elif not takearg and val is not None: msg = "Option %s doesn't expects a value" % option raise ArgumentPreprocessingError(msg) cb(option, val) else: i += 1 @contextlib.contextmanager def fix_import_path(args): """Prepare sys.path for running the linter checks. Within this context, each of the given arguments is importable. Paths are added to sys.path in corresponding order to the arguments. We avoid adding duplicate directories to sys.path. `sys.path` is reset to its original value upon exitign this context. """ orig = list(sys.path) changes = [] for arg in args: path = _get_python_path(arg) if path in changes: continue else: changes.append(path) sys.path[:] = changes + sys.path try: yield finally: sys.path[:] = orig class Run(object): """helper class to use as main for pylint : run(*sys.argv[1:]) """ LinterClass = PyLinter option_groups = ( ('Commands', 'Options which are actually commands. Options in this \ group are mutually exclusive.'), ) def __init__(self, args, reporter=None, exit=True): self._rcfile = None self._plugins = [] try: preprocess_options(args, { # option: (callback, takearg) 'init-hook': (cb_init_hook, True), 'rcfile': (self.cb_set_rcfile, True), 'load-plugins': (self.cb_add_plugins, True), }) except ArgumentPreprocessingError as ex: print(ex, file=sys.stderr) sys.exit(32) self.linter = linter = self.LinterClass(( ('rcfile', {'action' : 'callback', 'callback' : lambda *args: 1, 'type': 'string', 'metavar': '<file>', 'help' : 'Specify a configuration file.'}), ('init-hook', {'action' : 'callback', 'callback' : lambda *args: 1, 'type' : 'string', 'metavar': '<code>', 'level': 1, 'help' : 'Python code to execute, usually for sys.path ' 'manipulation such as pygtk.require().'}), ('help-msg', {'action' : 'callback', 'type' : 'string', 'metavar': '<msg-id>', 'callback' : self.cb_help_message, 'group': 'Commands', 'help' : 'Display a help message for the given message id and ' 'exit. The value may be a comma separated list of message ids.'}), ('list-msgs', {'action' : 'callback', 'metavar': '<msg-id>', 'callback' : self.cb_list_messages, 'group': 'Commands', 'level': 1, 'help' : "Generate pylint's messages."}), ('list-conf-levels', {'action' : 'callback', 'callback' : cb_list_confidence_levels, 'group': 'Commands', 'level': 1, 'help' : "Generate pylint's messages."}), ('full-documentation', {'action' : 'callback', 'metavar': '<msg-id>', 'callback' : self.cb_full_documentation, 'group': 'Commands', 'level': 1, 'help' : "Generate pylint's full documentation."}), ('generate-rcfile', {'action' : 'callback', 'callback' : self.cb_generate_config, 'group': 'Commands', 'help' : 'Generate a sample configuration file according to ' 'the current configuration. You can put other options ' 'before this one to get them in the generated ' 'configuration.'}), ('generate-man', {'action' : 'callback', 'callback' : self.cb_generate_manpage, 'group': 'Commands', 'help' : "Generate pylint's man page.", 'hide': True}), ('errors-only', {'action' : 'callback', 'callback' : self.cb_error_mode, 'short': 'E', 'help' : 'In error mode, checkers without error messages are ' 'disabled and for others, only the ERROR messages are ' 'displayed, and no reports are done by default'''}), ('py3k', {'action' : 'callback', 'callback' : self.cb_python3_porting_mode, 'help' : 'In Python 3 porting mode, all checkers will be ' 'disabled and only messages emitted by the porting ' 'checker will be displayed'}), ('profile', {'type' : 'yn', 'metavar' : '<y_or_n>', 'default': False, 'hide': True, 'help' : 'Profiled execution.'}), ), option_groups=self.option_groups, pylintrc=self._rcfile) # register standard checkers linter.load_default_plugins() # load command line plugins linter.load_plugin_modules(self._plugins) # add some help section linter.add_help_section('Environment variables', config.ENV_HELP, level=1) # pylint: disable=bad-continuation linter.add_help_section('Output', 'Using the default text output, the message format is : \n' ' \n' ' MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n' ' \n' 'There are 5 kind of message types : \n' ' * (C) convention, for programming standard violation \n' ' * (R) refactor, for bad code smell \n' ' * (W) warning, for python specific problems \n' ' * (E) error, for probable bugs in the code \n' ' * (F) fatal, if an error occurred which prevented pylint from doing further\n' 'processing.\n' , level=1) linter.add_help_section('Output status code', 'Pylint should leave with following status code: \n' ' * 0 if everything went fine \n' ' * 1 if a fatal message was issued \n' ' * 2 if an error message was issued \n' ' * 4 if a warning message was issued \n' ' * 8 if a refactor message was issued \n' ' * 16 if a convention message was issued \n' ' * 32 on usage error \n' ' \n' 'status 1 to 16 will be bit-ORed so you can know which different categories has\n' 'been issued by analysing pylint output status code\n', level=1) # read configuration linter.disable('pointless-except') linter.disable('suppressed-message') linter.disable('useless-suppression') linter.read_config_file() config_parser = linter.cfgfile_parser # run init hook, if present, before loading plugins if config_parser.has_option('MASTER', 'init-hook'): cb_init_hook('init-hook', textutils.unquote(config_parser.get('MASTER', 'init-hook'))) # is there some additional plugins in the file configuration, in if config_parser.has_option('MASTER', 'load-plugins'): plugins = textutils.splitstrip( config_parser.get('MASTER', 'load-plugins')) linter.load_plugin_modules(plugins) # now we can load file config and command line, plugins (which can # provide options) have been registered linter.load_config_file() if reporter: # if a custom reporter is provided as argument, it may be overridden # by file parameters, so re-set it here, but before command line # parsing so it's still overrideable by command line option linter.set_reporter(reporter) try: args = linter.load_command_line_configuration(args) except SystemExit as exc: if exc.code == 2: # bad options exc.code = 32 raise if not args: print(linter.help()) sys.exit(32) if linter.config.jobs < 0: print("Jobs number (%d) should be greater than 0" % linter.config.jobs, file=sys.stderr) sys.exit(32) if linter.config.jobs > 1 or linter.config.jobs == 0: if multiprocessing is None: print("Multiprocessing library is missing, " "fallback to single process", file=sys.stderr) linter.set_option("jobs", 1) else: if linter.config.jobs == 0: linter.config.jobs = multiprocessing.cpu_count() # insert current working directory to the python path to have a correct # behaviour if self.linter.config.profile: with fix_import_path(args): print('** profiled run', file=sys.stderr) import cProfile, pstats cProfile.runctx('linter.check(%r)' % args, globals(), locals(), 'stones.prof') data = pstats.Stats('stones.prof') data.strip_dirs() data.sort_stats('time', 'calls') data.print_stats(30) else: linter.check(args) linter.generate_reports() if exit: sys.exit(self.linter.msg_status) def cb_set_rcfile(self, name, value): """callback for option preprocessing (i.e. before option parsing)""" self._rcfile = value def cb_add_plugins(self, name, value): """callback for option preprocessing (i.e. before option parsing)""" self._plugins.extend(textutils.splitstrip(value)) def cb_error_mode(self, *args, **kwargs): """error mode: * disable all but error messages * disable the 'miscellaneous' checker which can be safely deactivated in debug * disable reports * do not save execution information """ self.linter.error_mode() def cb_generate_config(self, *args, **kwargs): """optik callback for sample config file generation""" self.linter.generate_config(skipsections=('COMMANDS',)) sys.exit(0) def cb_generate_manpage(self, *args, **kwargs): """optik callback for sample config file generation""" from pylint import __pkginfo__ self.linter.generate_manpage(__pkginfo__) sys.exit(0) def cb_help_message(self, option, optname, value, parser): """optik callback for printing some help about a particular message""" self.linter.msgs_store.help_message(textutils.splitstrip(value)) sys.exit(0) def cb_full_documentation(self, option, optname, value, parser): """optik callback for printing full documentation""" self.linter.print_full_documentation() sys.exit(0) def cb_list_messages(self, option, optname, value, parser): # FIXME """optik callback for printing available messages""" self.linter.msgs_store.list_messages() sys.exit(0) def cb_python3_porting_mode(self, *args, **kwargs): """Activate only the python3 porting checker.""" self.linter.disable('all') self.linter.enable('python3') def cb_list_confidence_levels(option, optname, value, parser): for level in interfaces.CONFIDENCE_LEVELS: print('%-18s: %s' % level) sys.exit(0) def cb_init_hook(optname, value): """exec arbitrary code to set sys.path for instance""" exec(value) # pylint: disable=exec-used if __name__ == '__main__': Run(sys.argv[1:])
bigswitch/tempest
refs/heads/master
tempest/api/compute/servers/test_list_server_filters.py
5
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.api.compute import base from tempest.common import fixed_network from tempest.common.utils import data_utils from tempest.common import waiters from tempest import config from tempest.lib import decorators from tempest.lib import exceptions as lib_exc from tempest import test CONF = config.CONF class ListServerFiltersTestJSON(base.BaseV2ComputeTest): @classmethod def setup_credentials(cls): cls.set_network_resources(network=True, subnet=True, dhcp=True) super(ListServerFiltersTestJSON, cls).setup_credentials() @classmethod def setup_clients(cls): super(ListServerFiltersTestJSON, cls).setup_clients() cls.client = cls.servers_client @classmethod def resource_setup(cls): super(ListServerFiltersTestJSON, cls).resource_setup() # Check to see if the alternate image ref actually exists... images_client = cls.compute_images_client images = images_client.list_images()['images'] if cls.image_ref != cls.image_ref_alt and \ any([image for image in images if image['id'] == cls.image_ref_alt]): cls.multiple_images = True else: cls.image_ref_alt = cls.image_ref # Do some sanity checks here. If one of the images does # not exist, fail early since the tests won't work... try: cls.compute_images_client.show_image(cls.image_ref) except lib_exc.NotFound: raise RuntimeError("Image %s (image_ref) was not found!" % cls.image_ref) try: cls.compute_images_client.show_image(cls.image_ref_alt) except lib_exc.NotFound: raise RuntimeError("Image %s (image_ref_alt) was not found!" % cls.image_ref_alt) network = cls.get_tenant_network() if network: cls.fixed_network_name = network.get('name') else: cls.fixed_network_name = None network_kwargs = fixed_network.set_networks_kwarg(network) cls.s1_name = data_utils.rand_name(cls.__name__ + '-instance') cls.s1 = cls.create_test_server(name=cls.s1_name, wait_until='ACTIVE', **network_kwargs) cls.s2_name = data_utils.rand_name(cls.__name__ + '-instance') cls.s2 = cls.create_test_server(name=cls.s2_name, image_id=cls.image_ref_alt, wait_until='ACTIVE') cls.s3_name = data_utils.rand_name(cls.__name__ + '-instance') cls.s3 = cls.create_test_server(name=cls.s3_name, flavor=cls.flavor_ref_alt, wait_until='ACTIVE') @test.idempotent_id('05e8a8e7-9659-459a-989d-92c2f501f4ba') @decorators.skip_unless_attr('multiple_images', 'Only one image found') def test_list_servers_filter_by_image(self): # Filter the list of servers by image params = {'image': self.image_ref} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s3['id'], map(lambda x: x['id'], servers)) @test.idempotent_id('573637f5-7325-47bb-9144-3476d0416908') def test_list_servers_filter_by_flavor(self): # Filter the list of servers by flavor params = {'flavor': self.flavor_ref_alt} body = self.client.list_servers(**params) servers = body['servers'] self.assertNotIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s3['id'], map(lambda x: x['id'], servers)) @test.idempotent_id('9b067a7b-7fee-4f6a-b29c-be43fe18fc5a') def test_list_servers_filter_by_server_name(self): # Filter the list of servers by server name params = {'name': self.s1_name} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers)) @test.idempotent_id('ca78e20e-fddb-4ce6-b7f7-bcbf8605e66e') def test_list_servers_filter_by_server_status(self): # Filter the list of servers by server status params = {'status': 'active'} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s3['id'], map(lambda x: x['id'], servers)) @test.idempotent_id('451dbbb2-f330-4a9f-b0e1-5f5d2cb0f34c') def test_list_servers_filter_by_shutoff_status(self): # Filter the list of servers by server shutoff status params = {'status': 'shutoff'} self.client.stop_server(self.s1['id']) waiters.wait_for_server_status(self.client, self.s1['id'], 'SHUTOFF') body = self.client.list_servers(**params) self.client.start_server(self.s1['id']) waiters.wait_for_server_status(self.client, self.s1['id'], 'ACTIVE') servers = body['servers'] self.assertIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertNotIn(self.s3['id'], map(lambda x: x['id'], servers)) @test.idempotent_id('614cdfc1-d557-4bac-915b-3e67b48eee76') def test_list_servers_filter_by_limit(self): # Verify only the expected number of servers are returned params = {'limit': 1} servers = self.client.list_servers(**params) self.assertEqual(1, len([x for x in servers['servers'] if 'id' in x])) @test.idempotent_id('b1495414-2d93-414c-8019-849afe8d319e') def test_list_servers_filter_by_zero_limit(self): # Verify only the expected number of servers are returned params = {'limit': 0} servers = self.client.list_servers(**params) self.assertEqual(0, len(servers['servers'])) @test.idempotent_id('37791bbd-90c0-4de0-831e-5f38cba9c6b3') def test_list_servers_filter_by_exceed_limit(self): # Verify only the expected number of servers are returned params = {'limit': 100000} servers = self.client.list_servers(**params) all_servers = self.client.list_servers() self.assertEqual(len([x for x in all_servers['servers'] if 'id' in x]), len([x for x in servers['servers'] if 'id' in x])) @test.idempotent_id('b3304c3b-97df-46d2-8cd3-e2b6659724e7') @decorators.skip_unless_attr('multiple_images', 'Only one image found') def test_list_servers_detailed_filter_by_image(self): # Filter the detailed list of servers by image params = {'image': self.image_ref} body = self.client.list_servers(detail=True, **params) servers = body['servers'] self.assertIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s3['id'], map(lambda x: x['id'], servers)) @test.idempotent_id('80c574cc-0925-44ba-8602-299028357dd9') def test_list_servers_detailed_filter_by_flavor(self): # Filter the detailed list of servers by flavor params = {'flavor': self.flavor_ref_alt} body = self.client.list_servers(detail=True, **params) servers = body['servers'] self.assertNotIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertNotIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s3['id'], map(lambda x: x['id'], servers)) @test.idempotent_id('f9eb2b70-735f-416c-b260-9914ac6181e4') def test_list_servers_detailed_filter_by_server_name(self): # Filter the detailed list of servers by server name params = {'name': self.s1_name} body = self.client.list_servers(detail=True, **params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers)) @test.idempotent_id('de2612ab-b7dd-4044-b0b1-d2539601911f') def test_list_servers_detailed_filter_by_server_status(self): # Filter the detailed list of servers by server status params = {'status': 'active'} body = self.client.list_servers(detail=True, **params) servers = body['servers'] test_ids = [s['id'] for s in (self.s1, self.s2, self.s3)] self.assertIn(self.s1['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s2['id'], map(lambda x: x['id'], servers)) self.assertIn(self.s3['id'], map(lambda x: x['id'], servers)) self.assertEqual(['ACTIVE'] * 3, [x['status'] for x in servers if x['id'] in test_ids]) @test.idempotent_id('e9f624ee-92af-4562-8bec-437945a18dcb') def test_list_servers_filtered_by_name_wildcard(self): # List all servers that contains '-instance' in name params = {'name': '-instance'} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertIn(self.s3_name, map(lambda x: x['name'], servers)) # Let's take random part of name and try to search it part_name = self.s1_name[6:-1] params = {'name': part_name} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers)) @test.idempotent_id('24a89b0c-0d55-4a28-847f-45075f19b27b') def test_list_servers_filtered_by_name_regex(self): # list of regex that should match s1, s2 and s3 regexes = ['^.*\-instance\-[0-9]+$', '^.*\-instance\-.*$'] for regex in regexes: params = {'name': regex} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertIn(self.s3_name, map(lambda x: x['name'], servers)) # Let's take random part of name and try to search it part_name = self.s1_name[-10:] params = {'name': part_name} body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers)) @test.idempotent_id('43a1242e-7b31-48d1-88f2-3f72aa9f2077') def test_list_servers_filtered_by_ip(self): # Filter servers by ip # Here should be listed 1 server if not self.fixed_network_name: msg = 'fixed_network_name needs to be configured to run this test' raise self.skipException(msg) self.s1 = self.client.show_server(self.s1['id'])['server'] for addr_spec in self.s1['addresses'][self.fixed_network_name]: ip = addr_spec['addr'] if addr_spec['version'] == 4: params = {'ip': ip} break else: msg = "Skipped until bug 1450859 is resolved" raise self.skipException(msg) body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s2_name, map(lambda x: x['name'], servers)) self.assertNotIn(self.s3_name, map(lambda x: x['name'], servers)) @decorators.skip_because(bug="1540645") @test.idempotent_id('a905e287-c35e-42f2-b132-d02b09f3654a') def test_list_servers_filtered_by_ip_regex(self): # Filter servers by regex ip # List all servers filtered by part of ip address. # Here should be listed all servers if not self.fixed_network_name: msg = 'fixed_network_name needs to be configured to run this test' raise self.skipException(msg) self.s1 = self.client.show_server(self.s1['id'])['server'] addr_spec = self.s1['addresses'][self.fixed_network_name][0] ip = addr_spec['addr'][0:-3] if addr_spec['version'] == 4: params = {'ip': ip} else: params = {'ip6': ip} # capture all servers in case something goes wrong all_servers = self.client.list_servers(detail=True) body = self.client.list_servers(**params) servers = body['servers'] self.assertIn(self.s1_name, map(lambda x: x['name'], servers), "%s not found in %s, all servers %s" % (self.s1_name, servers, all_servers)) self.assertIn(self.s2_name, map(lambda x: x['name'], servers), "%s not found in %s, all servers %s" % (self.s2_name, servers, all_servers)) self.assertIn(self.s3_name, map(lambda x: x['name'], servers), "%s not found in %s, all servers %s" % (self.s3_name, servers, all_servers)) @test.idempotent_id('67aec2d0-35fe-4503-9f92-f13272b867ed') def test_list_servers_detailed_limit_results(self): # Verify only the expected number of detailed results are returned params = {'limit': 1} servers = self.client.list_servers(detail=True, **params) self.assertEqual(1, len(servers['servers']))
nimbusproject/kazoo
refs/heads/master
kazoo/__init__.py
1
import os from kazoo.zkclient import ZooKeeperClient from kazoo.client import KazooClient __all__ = ['ZooKeeperClient', 'KazooClient'] # ZK C client likes to spew log info to STDERR. disable that unless an # env is present. def disable_zookeeper_log(): import zookeeper zookeeper.set_log_stream(open('/dev/null')) if not "KAZOO_LOG_ENABLED" in os.environ: disable_zookeeper_log() def patch_extras(): # workaround for http://code.google.com/p/gevent/issues/detail?id=112 # gevent isn't patching threading._sleep which causes problems # for Condition objects from gevent import sleep import threading threading._sleep = sleep if "KAZOO_TEST_GEVENT_PATCH" in os.environ: from gevent import monkey; monkey.patch_all() patch_extras()
rvalyi/OpenUpgrade
refs/heads/master
addons/account_anglo_saxon/product.py
384
############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class product_category(osv.osv): _inherit = "product.category" _columns = { 'property_account_creditor_price_difference_categ': fields.property( type='many2one', relation='account.account', string="Price Difference Account", help="This account will be used to value price difference between purchase price and cost price."), #Redefine fields to change help text for anglo saxon methodology. 'property_account_income_categ': fields.property( type='many2one', relation='account.account', string="Income Account", help="This account will be used to value outgoing stock using sale price."), 'property_account_expense_categ': fields.property( type='many2one', relation='account.account', string="Expense Account", help="This account will be used to value outgoing stock using cost price."), } class product_template(osv.osv): _inherit = "product.template" _columns = { 'property_account_creditor_price_difference': fields.property( type='many2one', relation='account.account', string="Price Difference Account", help="This account will be used to value price difference between purchase price and cost price."), #Redefine fields to change help text for anglo saxon methodology. 'property_account_income': fields.property( type='many2one', relation='account.account', string="Income Account", help="This account will be used to value outgoing stock using sale price."), 'property_account_expense': fields.property( type='many2one', relation='account.account', string="Expense Account", help="This account will be used to value outgoing stock using cost price."), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
johankaito/fufuka
refs/heads/master
microblog/flask/venv/lib/python2.7/site-packages/celery/tests/backends/test_cache.py
6
from __future__ import absolute_import import sys import types from contextlib import contextmanager from kombu.utils.encoding import str_to_bytes from celery import signature from celery import states from celery import group from celery.backends.cache import CacheBackend, DummyClient from celery.exceptions import ImproperlyConfigured from celery.five import items, string, text_t from celery.utils import uuid from celery.tests.case import ( AppCase, Mock, mask_modules, patch, reset_modules, ) PY3 = sys.version_info[0] == 3 class SomeClass(object): def __init__(self, data): self.data = data class test_CacheBackend(AppCase): def setup(self): self.tb = CacheBackend(backend='memory://', app=self.app) self.tid = uuid() def test_no_backend(self): self.app.conf.CELERY_CACHE_BACKEND = None with self.assertRaises(ImproperlyConfigured): CacheBackend(backend=None, app=self.app) def test_mark_as_done(self): self.assertEqual(self.tb.get_status(self.tid), states.PENDING) self.assertIsNone(self.tb.get_result(self.tid)) self.tb.mark_as_done(self.tid, 42) self.assertEqual(self.tb.get_status(self.tid), states.SUCCESS) self.assertEqual(self.tb.get_result(self.tid), 42) def test_is_pickled(self): result = {'foo': 'baz', 'bar': SomeClass(12345)} self.tb.mark_as_done(self.tid, result) # is serialized properly. rindb = self.tb.get_result(self.tid) self.assertEqual(rindb.get('foo'), 'baz') self.assertEqual(rindb.get('bar').data, 12345) def test_mark_as_failure(self): try: raise KeyError('foo') except KeyError as exception: self.tb.mark_as_failure(self.tid, exception) self.assertEqual(self.tb.get_status(self.tid), states.FAILURE) self.assertIsInstance(self.tb.get_result(self.tid), KeyError) def test_apply_chord(self): tb = CacheBackend(backend='memory://', app=self.app) gid, res = uuid(), [self.app.AsyncResult(uuid()) for _ in range(3)] tb.apply_chord(group(app=self.app), (), gid, {}, result=res) @patch('celery.result.GroupResult.restore') def test_on_chord_part_return(self, restore): tb = CacheBackend(backend='memory://', app=self.app) deps = Mock() deps.__len__ = Mock() deps.__len__.return_value = 2 restore.return_value = deps task = Mock() task.name = 'foobarbaz' self.app.tasks['foobarbaz'] = task task.request.chord = signature(task) gid, res = uuid(), [self.app.AsyncResult(uuid()) for _ in range(3)] task.request.group = gid tb.apply_chord(group(app=self.app), (), gid, {}, result=res) self.assertFalse(deps.join_native.called) tb.on_chord_part_return(task, 'SUCCESS', 10) self.assertFalse(deps.join_native.called) tb.on_chord_part_return(task, 'SUCCESS', 10) deps.join_native.assert_called_with(propagate=True, timeout=3.0) deps.delete.assert_called_with() def test_mget(self): self.tb.set('foo', 1) self.tb.set('bar', 2) self.assertDictEqual(self.tb.mget(['foo', 'bar']), {'foo': 1, 'bar': 2}) def test_forget(self): self.tb.mark_as_done(self.tid, {'foo': 'bar'}) x = self.app.AsyncResult(self.tid, backend=self.tb) x.forget() self.assertIsNone(x.result) def test_process_cleanup(self): self.tb.process_cleanup() def test_expires_as_int(self): tb = CacheBackend(backend='memory://', expires=10, app=self.app) self.assertEqual(tb.expires, 10) def test_unknown_backend_raises_ImproperlyConfigured(self): with self.assertRaises(ImproperlyConfigured): CacheBackend(backend='unknown://', app=self.app) class MyMemcachedStringEncodingError(Exception): pass class MemcachedClient(DummyClient): def set(self, key, value, *args, **kwargs): if PY3: key_t, must_be, not_be, cod = bytes, 'string', 'bytes', 'decode' else: key_t, must_be, not_be, cod = text_t, 'bytes', 'string', 'encode' if isinstance(key, key_t): raise MyMemcachedStringEncodingError( 'Keys must be {0}, not {1}. Convert your ' 'strings using mystring.{2}(charset)!'.format( must_be, not_be, cod)) return super(MemcachedClient, self).set(key, value, *args, **kwargs) class MockCacheMixin(object): @contextmanager def mock_memcache(self): memcache = types.ModuleType('memcache') memcache.Client = MemcachedClient memcache.Client.__module__ = memcache.__name__ prev, sys.modules['memcache'] = sys.modules.get('memcache'), memcache try: yield True finally: if prev is not None: sys.modules['memcache'] = prev @contextmanager def mock_pylibmc(self): pylibmc = types.ModuleType('pylibmc') pylibmc.Client = MemcachedClient pylibmc.Client.__module__ = pylibmc.__name__ prev = sys.modules.get('pylibmc') sys.modules['pylibmc'] = pylibmc try: yield True finally: if prev is not None: sys.modules['pylibmc'] = prev class test_get_best_memcache(AppCase, MockCacheMixin): def test_pylibmc(self): with self.mock_pylibmc(): with reset_modules('celery.backends.cache'): from celery.backends import cache cache._imp = [None] self.assertEqual(cache.get_best_memcache()[0].__module__, 'pylibmc') def test_memcache(self): with self.mock_memcache(): with reset_modules('celery.backends.cache'): with mask_modules('pylibmc'): from celery.backends import cache cache._imp = [None] self.assertEqual(cache.get_best_memcache()[0]().__module__, 'memcache') def test_no_implementations(self): with mask_modules('pylibmc', 'memcache'): with reset_modules('celery.backends.cache'): from celery.backends import cache cache._imp = [None] with self.assertRaises(ImproperlyConfigured): cache.get_best_memcache() def test_cached(self): with self.mock_pylibmc(): with reset_modules('celery.backends.cache'): from celery.backends import cache cache._imp = [None] cache.get_best_memcache()[0](behaviors={'foo': 'bar'}) self.assertTrue(cache._imp[0]) cache.get_best_memcache()[0]() def test_backends(self): from celery.backends.cache import backends with self.mock_memcache(): for name, fun in items(backends): self.assertTrue(fun()) class test_memcache_key(AppCase, MockCacheMixin): def test_memcache_unicode_key(self): with self.mock_memcache(): with reset_modules('celery.backends.cache'): with mask_modules('pylibmc'): from celery.backends import cache cache._imp = [None] task_id, result = string(uuid()), 42 b = cache.CacheBackend(backend='memcache', app=self.app) b.store_result(task_id, result, status=states.SUCCESS) self.assertEqual(b.get_result(task_id), result) def test_memcache_bytes_key(self): with self.mock_memcache(): with reset_modules('celery.backends.cache'): with mask_modules('pylibmc'): from celery.backends import cache cache._imp = [None] task_id, result = str_to_bytes(uuid()), 42 b = cache.CacheBackend(backend='memcache', app=self.app) b.store_result(task_id, result, status=states.SUCCESS) self.assertEqual(b.get_result(task_id), result) def test_pylibmc_unicode_key(self): with reset_modules('celery.backends.cache'): with self.mock_pylibmc(): from celery.backends import cache cache._imp = [None] task_id, result = string(uuid()), 42 b = cache.CacheBackend(backend='memcache', app=self.app) b.store_result(task_id, result, status=states.SUCCESS) self.assertEqual(b.get_result(task_id), result) def test_pylibmc_bytes_key(self): with reset_modules('celery.backends.cache'): with self.mock_pylibmc(): from celery.backends import cache cache._imp = [None] task_id, result = str_to_bytes(uuid()), 42 b = cache.CacheBackend(backend='memcache', app=self.app) b.store_result(task_id, result, status=states.SUCCESS) self.assertEqual(b.get_result(task_id), result)
Domatix/account-financial-tools
refs/heads/8.0
account_asset_management/__init__.py
35
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>). # Copyright (c) 2014 Noviat nv/sa (www.noviat.com). All rights reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import account_asset from . import account_asset_invoice from . import account from . import account_move from . import wizard from . import report from . import res_config # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
wmvanvliet/mne-python
refs/heads/master
mne/tests/test_ola.py
14
import numpy as np from numpy.testing import assert_allclose import pytest from mne._ola import _COLA, _Interp2, _Storer def test_interp_2pt(): """Test our two-point interpolator.""" n_pts = 200 assert n_pts % 50 == 0 feeds = [ # test a bunch of feeds to make sure they don't break things [n_pts], [50] * (n_pts // 50), [10] * (n_pts // 10), [5] * (n_pts // 5), [2] * (n_pts // 2), [1] * n_pts, ] # ZOH values = np.array([10, -10]) expected = np.full(n_pts, 10) for feed in feeds: expected[-1] = 10 interp = _Interp2([0, n_pts], values, 'zero') out = np.concatenate([interp.feed(f)[0] for f in feed]) assert_allclose(out, expected) interp = _Interp2([0, n_pts - 1], values, 'zero') expected[-1] = -10 out = np.concatenate([interp.feed(f)[0] for f in feed]) assert_allclose(out, expected) # linear and inputs of different sizes values = [np.arange(2)[:, np.newaxis, np.newaxis], np.array([20, 10])] expected = [ np.linspace(0, 1, n_pts, endpoint=False)[np.newaxis, np.newaxis, :], np.linspace(20, 10, n_pts, endpoint=False)] for feed in feeds: interp = _Interp2([0, n_pts], values, 'linear') outs = [interp.feed(f) for f in feed] outs = [np.concatenate([o[0] for o in outs], axis=-1), np.concatenate([o[1] for o in outs], axis=-1)] assert_allclose(outs[0], expected[0], atol=1e-7) assert_allclose(outs[1], expected[1], atol=1e-7) # cos**2 and more interesting bounds values = np.array([10, -10]) expected = np.full(n_pts, 10.) expected[-5:] = -10 cos = np.cos(np.linspace(0, np.pi / 2., n_pts - 9, endpoint=False)) expected[4:-5] = cos ** 2 * 20 - 10 for feed in feeds: interp = _Interp2([4, n_pts - 5], values, 'cos2') out = np.concatenate([interp.feed(f)[0] for f in feed]) assert_allclose(out, expected, atol=1e-7) out = interp.feed(10)[0] assert_allclose(out, [values[-1]] * 10, atol=1e-7) # hann and broadcasting n_hann = n_pts - 9 expected[4:-5] = np.hanning(2 * n_hann + 1)[n_hann:-1] * 20 - 10 expected = np.array([expected, expected[::-1] * 0.5]) values = np.array([values, values[::-1] * 0.5]).T for feed in feeds: interp = _Interp2([4, n_pts - 5], values, 'hann') out = np.concatenate([interp.feed(f)[0] for f in feed], axis=-1) assert_allclose(out, expected, atol=1e-7) # one control point and None support values = [np.array([10]), None] for start in [0, 50, 99, 100, 1000]: interp = _Interp2([start], values, 'zero') out, none = interp.feed(n_pts) assert none is None expected = np.full(n_pts, 10.) assert_allclose(out, expected) @pytest.mark.parametrize('ndim', (1, 2, 3)) def test_cola(ndim): """Test COLA processing.""" sfreq = 1000. rng = np.random.RandomState(0) def processor(x): return (x / 2.,) # halve the signal for n_total in (999, 1000, 1001): signal = rng.randn(n_total) out = rng.randn(n_total) # shouldn't matter for _ in range(ndim - 1): signal = signal[np.newaxis] out = out[np.newaxis] for n_samples in (99, 100, 101, 102, n_total - n_total // 2 + 1, n_total): for window in ('hann', 'bartlett', 'boxcar', 'triang'): # A few example COLA possibilities n_overlaps = () if window in ('hann', 'bartlett') or n_samples % 2 == 0: n_overlaps += ((n_samples + 1) // 2,) if window == 'boxcar': n_overlaps += (0,) for n_overlap in n_overlaps: # can pass callable or ndarray for storer in (out, _Storer(out)): cola = _COLA(processor, storer, n_total, n_samples, n_overlap, sfreq, window) n_input = 0 # feed data in an annoying way while n_input < n_total: next_len = min(rng.randint(1, 30), n_total - n_input) cola.feed(signal[..., n_input:n_input + next_len]) n_input += next_len assert_allclose(out, signal / 2., atol=1e-7)
VioletRed/script.module.urlresolver
refs/heads/master
lib/urlresolver/plugins/donevideo.py
1
''' Donevideo urlresolver plugin Copyright (C) 2013 Vinnydude This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' from t0mm0.common.net import Net from urlresolver.plugnplay.interfaces import UrlResolver from urlresolver.plugnplay.interfaces import PluginSettings from urlresolver.plugnplay import Plugin import re, xbmcgui from urlresolver import common from lib import jsunpack from lib import captcha_lib net = Net() class DonevideoResolver(Plugin, UrlResolver, PluginSettings): implements = [UrlResolver, PluginSettings] name = "donevideo" domains = [ "donevideo.com" ] def __init__(self): p = self.get_setting('priority') or 100 self.priority = int(p) self.net = Net() def get_media_url(self, host, media_id): try: url = self.get_url(host, media_id) html = self.net.http_GET(url).content dialog = xbmcgui.DialogProgress() dialog.create('Resolving', 'Resolving Donevideo Link...') dialog.update(0) data = {} r = re.findall(r'type="(?:hidden|submit)?" name="(.+?)"\s* value="?(.+?)">', html) for name, value in r: data[name] = value html = net.http_POST(url, data).content r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)">', html) for name, value in r: data[name] = value data.update(captcha_lib.do_captcha(html)) html = net.http_POST(url, data).content sPattern = '<script type=(?:"|\')text/javascript(?:"|\')>(eval\(' sPattern += 'function\(p,a,c,k,e,d\)(?!.+player_ads.+).+np_vid.+?)' sPattern += '\s+?</script>' r = re.search(sPattern, html, re.DOTALL + re.IGNORECASE) if r: sJavascript = r.group(1) sUnpacked = jsunpack.unpack(sJavascript) sPattern = '<embed id="np_vid"type="video/divx"src="(.+?)' sPattern += '"custommode=' r = re.search(sPattern, sUnpacked) if r: dialog.update(100) dialog.close() return r.group(1) else: num = re.compile('donevideo\|(.+?)\|http').findall(html) pre = 'http://'+num[0]+'.donevideo.com:182/d/' preb = re.compile('image\|(.+?)\|video\|(.+?)\|').findall(html) for ext, link in preb: r = pre+link+'/video.'+ext dialog.update(100) dialog.close() return r except Exception, e: common.addon.log('**** Donevideo Error occured: %s' % e) common.addon.show_small_popup('Error', str(e), 5000, '') return self.unresolvable(code=0, msg=e) def get_url(self, host, media_id): return 'http://www.donevideo.com/%s' % media_id def get_host_and_id(self, url): r = re.search('//(.+?)/([0-9a-zA-Z]+)',url) if r: return r.groups() else: return False return('host', 'media_id') def valid_url(self, url, host): if self.get_setting('enabled') == 'false': return False return (re.match('http://(www.)?donevideo.com/' + '[0-9A-Za-z]+', url) or 'donevideo' in host)
chouseknecht/ansible
refs/heads/devel
lib/ansible/modules/network/fortios/fortios_ips_global.py
14
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_ips_global short_description: Configure IPS global parameter in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify ips feature and global category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.8" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true version_added: 2.9 ips_global: description: - Configure IPS global parameter. default: null type: dict suboptions: anomaly_mode: description: - Global blocking mode for rate-based anomalies. type: str choices: - periodical - continuous database: description: - Regular or extended IPS database. Regular protects against the latest common and in-the-wild attacks. Extended includes protection from legacy attacks. type: str choices: - regular - extended deep_app_insp_db_limit: description: - Limit on number of entries in deep application inspection database (1 - 2147483647, 0 = use recommended setting) type: int deep_app_insp_timeout: description: - Timeout for Deep application inspection (1 - 2147483647 sec., 0 = use recommended setting). type: int engine_count: description: - Number of IPS engines running. If set to the default value of 0, FortiOS sets the number to optimize performance depending on the number of CPU cores. type: int exclude_signatures: description: - Excluded signatures. type: str choices: - none - industrial fail_open: description: - Enable to allow traffic if the IPS process crashes. Default is disable and IPS traffic is blocked when the IPS process crashes. type: str choices: - enable - disable intelligent_mode: description: - Enable/disable IPS adaptive scanning (intelligent mode). Intelligent mode optimizes the scanning method for the type of traffic. type: str choices: - enable - disable session_limit_mode: description: - Method of counting concurrent sessions used by session limit anomalies. Choose between greater accuracy (accurate) or improved performance (heuristics). type: str choices: - accurate - heuristic skype_client_public_ipaddr: description: - Public IP addresses of your network that receive Skype sessions. Helps identify Skype sessions. Separate IP addresses with commas. type: str socket_size: description: - IPS socket buffer size (0 - 256 MB). Default depends on available memory. Can be changed to tune performance. type: int sync_session_ttl: description: - Enable/disable use of kernel session TTL for IPS sessions. type: str choices: - enable - disable traffic_submit: description: - Enable/disable submitting attack data found by this FortiGate to FortiGuard. type: str choices: - enable - disable ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" ssl_verify: "False" tasks: - name: Configure IPS global parameter. fortios_ips_global: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" https: "False" ips_global: anomaly_mode: "periodical" database: "regular" deep_app_insp_db_limit: "5" deep_app_insp_timeout: "6" engine_count: "7" exclude_signatures: "none" fail_open: "enable" intelligent_mode: "enable" session_limit_mode: "accurate" skype_client_public_ipaddr: "<your_own_value>" socket_size: "13" sync_session_ttl: "enable" traffic_submit: "enable" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "id" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.connection import Connection from ansible.module_utils.network.fortios.fortios import FortiOSHandler from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG def login(data, fos): host = data['host'] username = data['username'] password = data['password'] ssl_verify = data['ssl_verify'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password, verify=ssl_verify) def filter_ips_global_data(json): option_list = ['anomaly_mode', 'database', 'deep_app_insp_db_limit', 'deep_app_insp_timeout', 'engine_count', 'exclude_signatures', 'fail_open', 'intelligent_mode', 'session_limit_mode', 'skype_client_public_ipaddr', 'socket_size', 'sync_session_ttl', 'traffic_submit'] dictionary = {} for attribute in option_list: if attribute in json and json[attribute] is not None: dictionary[attribute] = json[attribute] return dictionary def underscore_to_hyphen(data): if isinstance(data, list): for elem in data: elem = underscore_to_hyphen(elem) elif isinstance(data, dict): new_data = {} for k, v in data.items(): new_data[k.replace('_', '-')] = underscore_to_hyphen(v) data = new_data return data def ips_global(data, fos): vdom = data['vdom'] ips_global_data = data['ips_global'] filtered_data = underscore_to_hyphen(filter_ips_global_data(ips_global_data)) return fos.set('ips', 'global', data=filtered_data, vdom=vdom) def is_successful_status(status): return status['status'] == "success" or \ status['http_method'] == "DELETE" and status['http_status'] == 404 def fortios_ips(data, fos): if data['ips_global']: resp = ips_global(data, fos) return not is_successful_status(resp), \ resp['status'] == "success", \ resp def main(): fields = { "host": {"required": False, "type": "str"}, "username": {"required": False, "type": "str"}, "password": {"required": False, "type": "str", "default": "", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": True}, "ssl_verify": {"required": False, "type": "bool", "default": True}, "ips_global": { "required": False, "type": "dict", "default": None, "options": { "anomaly_mode": {"required": False, "type": "str", "choices": ["periodical", "continuous"]}, "database": {"required": False, "type": "str", "choices": ["regular", "extended"]}, "deep_app_insp_db_limit": {"required": False, "type": "int"}, "deep_app_insp_timeout": {"required": False, "type": "int"}, "engine_count": {"required": False, "type": "int"}, "exclude_signatures": {"required": False, "type": "str", "choices": ["none", "industrial"]}, "fail_open": {"required": False, "type": "str", "choices": ["enable", "disable"]}, "intelligent_mode": {"required": False, "type": "str", "choices": ["enable", "disable"]}, "session_limit_mode": {"required": False, "type": "str", "choices": ["accurate", "heuristic"]}, "skype_client_public_ipaddr": {"required": False, "type": "str"}, "socket_size": {"required": False, "type": "int"}, "sync_session_ttl": {"required": False, "type": "str", "choices": ["enable", "disable"]}, "traffic_submit": {"required": False, "type": "str", "choices": ["enable", "disable"]} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) # legacy_mode refers to using fortiosapi instead of HTTPAPI legacy_mode = 'host' in module.params and module.params['host'] is not None and \ 'username' in module.params and module.params['username'] is not None and \ 'password' in module.params and module.params['password'] is not None if not legacy_mode: if module._socket_path: connection = Connection(module._socket_path) fos = FortiOSHandler(connection) is_error, has_changed, result = fortios_ips(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) else: try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") fos = FortiOSAPI() login(module.params, fos) is_error, has_changed, result = fortios_ips(module.params, fos) fos.logout() if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
SummerLW/Perf-Insight-Report
refs/heads/test
telemetry/third_party/modulegraph/modulegraph_tests/testdata/syspath/mymodule3.py
26
""" fake module """
sachinkum/Bal-Aveksha
refs/heads/master
WebServer/BalAvekshaEnv/lib/python3.5/site-packages/django/views/debug.py
17
from __future__ import unicode_literals import re import sys import types from django.conf import settings from django.http import HttpResponse, HttpResponseNotFound from django.template import Context, Engine, TemplateDoesNotExist from django.template.defaultfilters import force_escape, pprint from django.urls import Resolver404, resolve from django.utils import lru_cache, six, timezone from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_bytes, force_text, smart_text from django.utils.module_loading import import_string from django.utils.translation import ugettext as _ # Minimal Django templates engine to render the error templates # regardless of the project's TEMPLATES setting. DEBUG_ENGINE = Engine(debug=True) HIDDEN_SETTINGS = re.compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE', flags=re.IGNORECASE) CLEANSED_SUBSTITUTE = '********************' class CallableSettingWrapper(object): """ Object to wrap callable appearing in settings * Not to call in the debug page (#21345). * Not to break the debug page if the callable forbidding to set attributes (#23070). """ def __init__(self, callable_setting): self._wrapped = callable_setting def __repr__(self): return repr(self._wrapped) def cleanse_setting(key, value): """Cleanse an individual setting key/value of sensitive content. If the value is a dictionary, recursively cleanse the keys in that dictionary. """ try: if HIDDEN_SETTINGS.search(key): cleansed = CLEANSED_SUBSTITUTE else: if isinstance(value, dict): cleansed = {k: cleanse_setting(k, v) for k, v in value.items()} else: cleansed = value except TypeError: # If the key isn't regex-able, just return as-is. cleansed = value if callable(cleansed): # For fixing #21345 and #23070 cleansed = CallableSettingWrapper(cleansed) return cleansed def get_safe_settings(): "Returns a dictionary of the settings module, with sensitive settings blurred out." settings_dict = {} for k in dir(settings): if k.isupper(): settings_dict[k] = cleanse_setting(k, getattr(settings, k)) return settings_dict def technical_500_response(request, exc_type, exc_value, tb, status_code=500): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. """ reporter = ExceptionReporter(request, exc_type, exc_value, tb) if request.is_ajax(): text = reporter.get_traceback_text() return HttpResponse(text, status=status_code, content_type='text/plain') else: html = reporter.get_traceback_html() return HttpResponse(html, status=status_code, content_type='text/html') @lru_cache.lru_cache() def get_default_exception_reporter_filter(): # Instantiate the default filter for the first time and cache it. return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)() def get_exception_reporter_filter(request): default_filter = get_default_exception_reporter_filter() return getattr(request, 'exception_reporter_filter', default_filter) class ExceptionReporterFilter(object): """ Base for all exception reporter filter classes. All overridable hooks contain lenient default behaviors. """ def get_post_parameters(self, request): if request is None: return {} else: return request.POST def get_traceback_frame_variables(self, request, tb_frame): return list(tb_frame.f_locals.items()) class SafeExceptionReporterFilter(ExceptionReporterFilter): """ Use annotations made by the sensitive_post_parameters and sensitive_variables decorators to filter out sensitive information. """ def is_active(self, request): """ This filter is to add safety in production environments (i.e. DEBUG is False). If DEBUG is True then your site is not safe anyway. This hook is provided as a convenience to easily activate or deactivate the filter on a per request basis. """ return settings.DEBUG is False def get_cleansed_multivaluedict(self, request, multivaluedict): """ Replaces the keys in a MultiValueDict marked as sensitive with stars. This mitigates leaking sensitive POST parameters if something like request.POST['nonexistent_key'] throws an exception (#21098). """ sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', []) if self.is_active(request) and sensitive_post_parameters: multivaluedict = multivaluedict.copy() for param in sensitive_post_parameters: if param in multivaluedict: multivaluedict[param] = CLEANSED_SUBSTITUTE return multivaluedict def get_post_parameters(self, request): """ Replaces the values of POST parameters marked as sensitive with stars (*********). """ if request is None: return {} else: sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', []) if self.is_active(request) and sensitive_post_parameters: cleansed = request.POST.copy() if sensitive_post_parameters == '__ALL__': # Cleanse all parameters. for k, v in cleansed.items(): cleansed[k] = CLEANSED_SUBSTITUTE return cleansed else: # Cleanse only the specified parameters. for param in sensitive_post_parameters: if param in cleansed: cleansed[param] = CLEANSED_SUBSTITUTE return cleansed else: return request.POST def cleanse_special_types(self, request, value): try: # If value is lazy or a complex object of another kind, this check # might raise an exception. isinstance checks that lazy # MultiValueDicts will have a return value. is_multivalue_dict = isinstance(value, MultiValueDict) except Exception as e: return '{!r} while evaluating {!r}'.format(e, value) if is_multivalue_dict: # Cleanse MultiValueDicts (request.POST is the one we usually care about) value = self.get_cleansed_multivaluedict(request, value) return value def get_traceback_frame_variables(self, request, tb_frame): """ Replaces the values of variables marked as sensitive with stars (*********). """ # Loop through the frame's callers to see if the sensitive_variables # decorator was used. current_frame = tb_frame.f_back sensitive_variables = None while current_frame is not None: if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and 'sensitive_variables_wrapper' in current_frame.f_locals): # The sensitive_variables decorator was used, so we take note # of the sensitive variables' names. wrapper = current_frame.f_locals['sensitive_variables_wrapper'] sensitive_variables = getattr(wrapper, 'sensitive_variables', None) break current_frame = current_frame.f_back cleansed = {} if self.is_active(request) and sensitive_variables: if sensitive_variables == '__ALL__': # Cleanse all variables for name, value in tb_frame.f_locals.items(): cleansed[name] = CLEANSED_SUBSTITUTE else: # Cleanse specified variables for name, value in tb_frame.f_locals.items(): if name in sensitive_variables: value = CLEANSED_SUBSTITUTE else: value = self.cleanse_special_types(request, value) cleansed[name] = value else: # Potentially cleanse the request and any MultiValueDicts if they # are one of the frame variables. for name, value in tb_frame.f_locals.items(): cleansed[name] = self.cleanse_special_types(request, value) if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and 'sensitive_variables_wrapper' in tb_frame.f_locals): # For good measure, obfuscate the decorated function's arguments in # the sensitive_variables decorator's frame, in case the variables # associated with those arguments were meant to be obfuscated from # the decorated function's frame. cleansed['func_args'] = CLEANSED_SUBSTITUTE cleansed['func_kwargs'] = CLEANSED_SUBSTITUTE return cleansed.items() class ExceptionReporter(object): """ A class to organize and coordinate reporting on exceptions. """ def __init__(self, request, exc_type, exc_value, tb, is_email=False): self.request = request self.filter = get_exception_reporter_filter(self.request) self.exc_type = exc_type self.exc_value = exc_value self.tb = tb self.is_email = is_email self.template_info = getattr(self.exc_value, 'template_debug', None) self.template_does_not_exist = False self.postmortem = None # Handle deprecated string exceptions if isinstance(self.exc_type, six.string_types): self.exc_value = Exception('Deprecated String Exception: %r' % self.exc_type) self.exc_type = type(self.exc_value) def get_traceback_data(self): """Return a dictionary containing traceback information.""" if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist): self.template_does_not_exist = True self.postmortem = self.exc_value.chain or [self.exc_value] frames = self.get_traceback_frames() for i, frame in enumerate(frames): if 'vars' in frame: frame_vars = [] for k, v in frame['vars']: v = pprint(v) # The force_escape filter assume unicode, make sure that works if isinstance(v, six.binary_type): v = v.decode('utf-8', 'replace') # don't choke on non-utf-8 input # Trim large blobs of data if len(v) > 4096: v = '%s... <trimmed %d bytes string>' % (v[0:4096], len(v)) frame_vars.append((k, force_escape(v))) frame['vars'] = frame_vars frames[i] = frame unicode_hint = '' if self.exc_type and issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] unicode_hint = smart_text( unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))], 'ascii', errors='replace' ) from django import get_version if self.request is None: user_str = None else: try: user_str = force_text(self.request.user) except Exception: # request.user may raise OperationalError if the database is # unavailable, for example. user_str = '[unable to retrieve the current user]' c = { 'is_email': self.is_email, 'unicode_hint': unicode_hint, 'frames': frames, 'request': self.request, 'filtered_POST': self.filter.get_post_parameters(self.request), 'user_str': user_str, 'settings': get_safe_settings(), 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'server_time': timezone.now(), 'django_version_info': get_version(), 'sys_path': sys.path, 'template_info': self.template_info, 'template_does_not_exist': self.template_does_not_exist, 'postmortem': self.postmortem, } # Check whether exception info is available if self.exc_type: c['exception_type'] = self.exc_type.__name__ if self.exc_value: c['exception_value'] = smart_text(self.exc_value, errors='replace') if frames: c['lastframe'] = frames[-1] return c def get_traceback_html(self): "Return HTML version of debug 500 HTTP error page." t = DEBUG_ENGINE.from_string(TECHNICAL_500_TEMPLATE) c = Context(self.get_traceback_data(), use_l10n=False) return t.render(c) def get_traceback_text(self): "Return plain text version of debug 500 HTTP error page." t = DEBUG_ENGINE.from_string(TECHNICAL_500_TEXT_TEMPLATE) c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False) return t.render(c) def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). """ source = None if loader is not None and hasattr(loader, "get_source"): try: source = loader.get_source(module_name) except ImportError: pass if source is not None: source = source.splitlines() if source is None: try: with open(filename, 'rb') as fp: source = fp.read().splitlines() except (OSError, IOError): pass if source is None: return None, [], None, [] # If we just read the source from a file, or if the loader did not # apply tokenize.detect_encoding to decode the source into a Unicode # string, then we should do that ourselves. if isinstance(source[0], six.binary_type): encoding = 'ascii' for line in source[:2]: # File coding may be specified. Match pattern from PEP-263 # (http://www.python.org/dev/peps/pep-0263/) match = re.search(br'coding[:=]\s*([-\w.]+)', line) if match: encoding = match.group(1).decode('ascii') break source = [six.text_type(sline, encoding, 'replace') for sline in source] lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines pre_context = source[lower_bound:lineno] context_line = source[lineno] post_context = source[lineno + 1:upper_bound] return lower_bound, pre_context, context_line, post_context def get_traceback_frames(self): def explicit_or_implicit_cause(exc_value): explicit = getattr(exc_value, '__cause__', None) implicit = getattr(exc_value, '__context__', None) return explicit or implicit # Get the exception and all its causes exceptions = [] exc_value = self.exc_value while exc_value: exceptions.append(exc_value) exc_value = explicit_or_implicit_cause(exc_value) frames = [] # No exceptions were supplied to ExceptionReporter if not exceptions: return frames # In case there's just one exception (always in Python 2, # sometimes in Python 3), take the traceback from self.tb (Python 2 # doesn't have a __traceback__ attribute on Exception) exc_value = exceptions.pop() tb = self.tb if six.PY2 or not exceptions else exc_value.__traceback__ while tb is not None: # Support for __traceback_hide__ which is used by a few libraries # to hide internal frames. if tb.tb_frame.f_locals.get('__traceback_hide__'): tb = tb.tb_next continue filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno - 1 loader = tb.tb_frame.f_globals.get('__loader__') module_name = tb.tb_frame.f_globals.get('__name__') or '' pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file( filename, lineno, 7, loader, module_name, ) if pre_context_lineno is not None: frames.append({ 'exc_cause': explicit_or_implicit_cause(exc_value), 'exc_cause_explicit': getattr(exc_value, '__cause__', True), 'tb': tb, 'type': 'django' if module_name.startswith('django.') else 'user', 'filename': filename, 'function': function, 'lineno': lineno + 1, 'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame), 'id': id(tb), 'pre_context': pre_context, 'context_line': context_line, 'post_context': post_context, 'pre_context_lineno': pre_context_lineno + 1, }) # If the traceback for current exception is consumed, try the # other exception. if six.PY2: tb = tb.tb_next elif not tb.tb_next and exceptions: exc_value = exceptions.pop() tb = exc_value.__traceback__ else: tb = tb.tb_next return frames def format_exception(self): """ Return the same data as from traceback.format_exception. """ import traceback frames = self.get_traceback_frames() tb = [(f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames] list = ['Traceback (most recent call last):\n'] list += traceback.format_list(tb) list += traceback.format_exception_only(self.exc_type, self.exc_value) return list def technical_404_response(request, exception): "Create a technical 404 error response. The exception should be the Http404." try: error_url = exception.args[0]['path'] except (IndexError, TypeError, KeyError): error_url = request.path_info[1:] # Trim leading slash try: tried = exception.args[0]['tried'] except (IndexError, TypeError, KeyError): tried = [] else: if (not tried or ( # empty URLconf request.path == '/' and len(tried) == 1 and # default URLconf len(tried[0]) == 1 and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin' )): return default_urlconf(request) urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF) if isinstance(urlconf, types.ModuleType): urlconf = urlconf.__name__ caller = '' try: resolver_match = resolve(request.path) except Resolver404: pass else: obj = resolver_match.func if hasattr(obj, '__name__'): caller = obj.__name__ elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'): caller = obj.__class__.__name__ if hasattr(obj, '__module__'): module = obj.__module__ caller = '%s.%s' % (module, caller) t = DEBUG_ENGINE.from_string(TECHNICAL_404_TEMPLATE) c = Context({ 'urlconf': urlconf, 'root_urlconf': settings.ROOT_URLCONF, 'request_path': error_url, 'urlpatterns': tried, 'reason': force_bytes(exception, errors='replace'), 'request': request, 'settings': get_safe_settings(), 'raising_view_name': caller, }) return HttpResponseNotFound(t.render(c), content_type='text/html') def default_urlconf(request): "Create an empty URLconf 404 error response." t = DEBUG_ENGINE.from_string(DEFAULT_URLCONF_TEMPLATE) c = Context({ "title": _("Welcome to Django"), "heading": _("It worked!"), "subheading": _("Congratulations on your first Django-powered page."), "instructions": _( "Of course, you haven't actually done any work yet. " "Next, start your first app by running <code>python manage.py startapp [app_label]</code>." ), "explanation": _( "You're seeing this message because you have <code>DEBUG = True</code> in your " "Django settings file and you haven't configured any URLs. Get to work!" ), }) return HttpResponse(t.render(c), content_type='text/html') # # Templates are embedded in the file so that we know the error handler will # always work even if the template loader is broken. # TECHNICAL_500_TEMPLATE = (""" <!DOCTYPE html> <html lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <meta name="robots" content="NONE,NOARCHIVE"> <title>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}""" """{% if request %} at {{ request.path_info|escape }}{% endif %}</title> <style type="text/css"> html * { padding:0; margin:0; } body * { padding:10px 20px; } body * * { padding:0; } body { font:small sans-serif; } body>div { border-bottom:1px solid #ddd; } h1 { font-weight:normal; } h2 { margin-bottom:.8em; } h2 span { font-size:80%; color:#666; font-weight:normal; } h3 { margin:1em 0 .5em 0; } h4 { margin:0 0 .5em 0; font-weight: normal; } code, pre { font-size: 100%; white-space: pre-wrap; } table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; } tbody td, tbody th { vertical-align:top; padding:2px 3px; } thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; } tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; } table.vars { margin:5px 0 2px 40px; } table.vars td, table.req td { font-family:monospace; } table td.code { width:100%; } table td.code pre { overflow:hidden; } table.source th { color:#666; } table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; } ul.traceback { list-style-type:none; color: #222; } ul.traceback li.frame { padding-bottom:1em; color:#666; } ul.traceback li.user { background-color:#e0e0e0; color:#000 } div.context { padding:10px 0; overflow:hidden; } div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; } div.context ol li { font-family:monospace; white-space:pre; color:#777; cursor:pointer; padding-left: 2px; } div.context ol li pre { display:inline; } div.context ol.context-line li { color:#505050; background-color:#dfdfdf; padding: 3px 2px; } div.context ol.context-line li span { position:absolute; right:32px; } .user div.context ol.context-line li { background-color:#bbb; color:#000; } .user div.context ol li { color:#666; } div.commands { margin-left: 40px; } div.commands a { color:#555; text-decoration:none; } .user div.commands a { color: black; } #summary { background: #ffc; } #summary h2 { font-weight: normal; color: #666; } #explanation { background:#eee; } #template, #template-not-exist { background:#f6f6f6; } #template-not-exist ul { margin: 0 0 10px 20px; } #template-not-exist .postmortem-section { margin-bottom: 3px; } #unicode-hint { background:#eee; } #traceback { background:#eee; } #requestinfo { background:#f6f6f6; padding-left:120px; } #summary table { border:none; background:transparent; } #requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; } #requestinfo h3 { margin-bottom:-1em; } .error { background: #ffc; } .specific { color:#cc3300; font-weight:bold; } h2 span.commands { font-size:.7em;} span.commands a:link {color:#5E5694;} pre.exception_value { font-family: sans-serif; color: #666; font-size: 1.5em; margin: 10px 0 10px 0; } .append-bottom { margin-bottom: 10px; } </style> {% if not is_email %} <script type="text/javascript"> //<!-- function getElementsByClassName(oElm, strTagName, strClassName){ // Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com var arrElements = (strTagName == "*" && document.all)? document.all : oElm.getElementsByTagName(strTagName); var arrReturnElements = new Array(); strClassName = strClassName.replace(/\-/g, "\\-"); var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)"); var oElement; for(var i=0; i<arrElements.length; i++){ oElement = arrElements[i]; if(oRegExp.test(oElement.className)){ arrReturnElements.push(oElement); } } return (arrReturnElements) } function hideAll(elems) { for (var e = 0; e < elems.length; e++) { elems[e].style.display = 'none'; } } window.onload = function() { hideAll(getElementsByClassName(document, 'table', 'vars')); hideAll(getElementsByClassName(document, 'ol', 'pre-context')); hideAll(getElementsByClassName(document, 'ol', 'post-context')); hideAll(getElementsByClassName(document, 'div', 'pastebin')); } function toggle() { for (var i = 0; i < arguments.length; i++) { var e = document.getElementById(arguments[i]); if (e) { e.style.display = e.style.display == 'none' ? 'block': 'none'; } } return false; } function varToggle(link, id) { toggle('v' + id); var s = link.getElementsByTagName('span')[0]; var uarr = String.fromCharCode(0x25b6); var darr = String.fromCharCode(0x25bc); s.textContent = s.textContent == uarr ? darr : uarr; return false; } function switchPastebinFriendly(link) { s1 = "Switch to copy-and-paste view"; s2 = "Switch back to interactive view"; link.textContent = link.textContent.trim() == s1 ? s2: s1; toggle('browserTraceback', 'pastebinTraceback'); return false; } //--> </script> {% endif %} </head> <body> <div id="summary"> <h1>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}""" """{% if request %} at {{ request.path_info|escape }}{% endif %}</h1> <pre class="exception_value">""" """{% if exception_value %}{{ exception_value|force_escape }}{% else %}No exception message supplied{% endif %}""" """</pre> <table class="meta"> {% if request %} <tr> <th>Request Method:</th> <td>{{ request.META.REQUEST_METHOD }}</td> </tr> <tr> <th>Request URL:</th> <td>{{ request.get_raw_uri|escape }}</td> </tr> {% endif %} <tr> <th>Django Version:</th> <td>{{ django_version_info }}</td> </tr> {% if exception_type %} <tr> <th>Exception Type:</th> <td>{{ exception_type }}</td> </tr> {% endif %} {% if exception_type and exception_value %} <tr> <th>Exception Value:</th> <td><pre>{{ exception_value|force_escape }}</pre></td> </tr> {% endif %} {% if lastframe %} <tr> <th>Exception Location:</th> <td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td> </tr> {% endif %} <tr> <th>Python Executable:</th> <td>{{ sys_executable|escape }}</td> </tr> <tr> <th>Python Version:</th> <td>{{ sys_version_info }}</td> </tr> <tr> <th>Python Path:</th> <td><pre>{{ sys_path|pprint }}</pre></td> </tr> <tr> <th>Server time:</th> <td>{{server_time|date:"r"}}</td> </tr> </table> </div> {% if unicode_hint %} <div id="unicode-hint"> <h2>Unicode error hint</h2> <p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|force_escape }}</strong></p> </div> {% endif %} {% if template_does_not_exist %} <div id="template-not-exist"> <h2>Template-loader postmortem</h2> {% if postmortem %} <p class="append-bottom">Django tried loading these templates, in this order:</p> {% for entry in postmortem %} <p class="postmortem-section">Using engine <code>{{ entry.backend.name }}</code>:</p> <ul> {% if entry.tried %} {% for attempt in entry.tried %} <li><code>{{ attempt.0.loader_name }}</code>: {{ attempt.0.name }} ({{ attempt.1 }})</li> {% endfor %} {% else %} <li>This engine did not provide a list of tried templates.</li> {% endif %} </ul> {% endfor %} {% else %} <p>No templates were found because your 'TEMPLATES' setting is not configured.</p> {% endif %} </div> {% endif %} {% if template_info %} <div id="template"> <h2>Error during template rendering</h2> <p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p> <h3>{{ template_info.message }}</h3> <table class="source{% if template_info.top %} cut-top{% endif %} {% if template_info.bottom != template_info.total %} cut-bottom{% endif %}"> {% for source_line in template_info.source_lines %} {% if source_line.0 == template_info.line %} <tr class="error"><th>{{ source_line.0 }}</th> <td>{{ template_info.before }}""" """<span class="specific">{{ template_info.during }}</span>""" """{{ template_info.after }}</td> </tr> {% else %} <tr><th>{{ source_line.0 }}</th> <td>{{ source_line.1 }}</td></tr> {% endif %} {% endfor %} </table> </div> {% endif %} {% if frames %} <div id="traceback"> <h2>Traceback <span class="commands">{% if not is_email %}<a href="#" onclick="return switchPastebinFriendly(this);"> Switch to copy-and-paste view</a></span>{% endif %} </h2> {% autoescape off %} <div id="browserTraceback"> <ul class="traceback"> {% for frame in frames %} {% ifchanged frame.exc_cause %}{% if frame.exc_cause %} <li><h3> {% if frame.exc_cause_explicit %} The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception: {% else %} During handling of the above exception ({{ frame.exc_cause }}), another exception occurred: {% endif %} </h3></li> {% endif %}{% endifchanged %} <li class="frame {{ frame.type }}"> <code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code> {% if frame.context_line %} <div class="context" id="c{{ frame.id }}"> {% if frame.pre_context and not is_email %} <ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}"> {% for line in frame.pre_context %} <li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li> {% endfor %} </ol> {% endif %} <ol start="{{ frame.lineno }}" class="context-line"> <li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre> """ """{{ frame.context_line|escape }}</pre>{% if not is_email %} <span>...</span>{% endif %}</li></ol> {% if frame.post_context and not is_email %} <ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}"> {% for line in frame.post_context %} <li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li> {% endfor %} </ol> {% endif %} </div> {% endif %} {% if frame.vars %} <div class="commands"> {% if is_email %} <h2>Local Vars</h2> {% else %} <a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>&#x25b6;</span> Local vars</a> {% endif %} </div> <table class="vars" id="v{{ frame.id }}"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in frame.vars|dictsort:0 %} <tr> <td>{{ var.0|force_escape }}</td> <td class="code"><pre>{{ var.1 }}</pre></td> </tr> {% endfor %} </tbody> </table> {% endif %} </li> {% endfor %} </ul> </div> {% endautoescape %} <form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post"> {% if not is_email %} <div id="pastebinTraceback" class="pastebin"> <input type="hidden" name="language" value="PythonConsole"> <input type="hidden" name="title" value="{{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}"> <input type="hidden" name="source" value="Django Dpaste Agent"> <input type="hidden" name="poster" value="Django"> <textarea name="content" id="traceback_area" cols="140" rows="25"> Environment: {% if request %} Request Method: {{ request.META.REQUEST_METHOD }} Request URL: {{ request.get_raw_uri|escape }} {% endif %} Django Version: {{ django_version_info }} Python Version: {{ sys_version_info }} Installed Applications: {{ settings.INSTALLED_APPS|pprint }} Installed Middleware: {% if settings.MIDDLEWARE is not None %}{{ settings.MIDDLEWARE|pprint }}""" """{% else %}{{ settings.MIDDLEWARE_CLASSES|pprint }}{% endif %} {% if template_does_not_exist %}Template loader postmortem {% if postmortem %}Django tried loading these templates, in this order: {% for entry in postmortem %} Using engine {{ entry.backend.name }}: {% if entry.tried %}{% for attempt in entry.tried %}""" """ * {{ attempt.0.loader_name }}: {{ attempt.0.name }} ({{ attempt.1 }}) {% endfor %}{% else %} This engine did not provide a list of tried templates. {% endif %}{% endfor %} {% else %}No templates were found because your 'TEMPLATES' setting is not configured. {% endif %}{% endif %}{% if template_info %} Template error: In template {{ template_info.name }}, error at line {{ template_info.line }} {{ template_info.message }}""" "{% for source_line in template_info.source_lines %}" "{% if source_line.0 == template_info.line %}" " {{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}" "{% else %}" " {{ source_line.0 }} : {{ source_line.1 }}" """{% endif %}{% endfor %}{% endif %} Traceback:{% for frame in frames %} {% ifchanged frame.exc_cause %}{% if frame.exc_cause %}{% if frame.exc_cause_explicit %} The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception: {% else %} During handling of the above exception ({{ frame.exc_cause }}), another exception occurred: {% endif %}{% endif %}{% endifchanged %} File "{{ frame.filename|escape }}" in {{ frame.function|escape }} {% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %}{% endfor %} Exception Type: {{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %} Exception Value: {{ exception_value|force_escape }} </textarea> <br><br> <input type="submit" value="Share this traceback on a public website"> </div> </form> </div> {% endif %} {% endif %} <div id="requestinfo"> <h2>Request information</h2> {% if request %} {% if user_str %} <h3 id="user-info">USER</h3> <p>{{ user_str }}</p> {% endif %} <h3 id="get-info">GET</h3> {% if request.GET %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.GET.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><pre>{{ var.1|pprint }}</pre></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No GET data</p> {% endif %} <h3 id="post-info">POST</h3> {% if filtered_POST %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in filtered_POST.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><pre>{{ var.1|pprint }}</pre></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No POST data</p> {% endif %} <h3 id="files-info">FILES</h3> {% if request.FILES %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.FILES.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><pre>{{ var.1|pprint }}</pre></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No FILES data</p> {% endif %} <h3 id="cookie-info">COOKIES</h3> {% if request.COOKIES %} <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.COOKIES.items %} <tr> <td>{{ var.0 }}</td> <td class="code"><pre>{{ var.1|pprint }}</pre></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>No cookie data</p> {% endif %} <h3 id="meta-info">META</h3> <table class="req"> <thead> <tr> <th>Variable</th> <th>Value</th> </tr> </thead> <tbody> {% for var in request.META.items|dictsort:0 %} <tr> <td>{{ var.0 }}</td> <td class="code"><pre>{{ var.1|pprint }}</pre></td> </tr> {% endfor %} </tbody> </table> {% else %} <p>Request data not supplied</p> {% endif %} <h3 id="settings-info">Settings</h3> <h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4> <table class="req"> <thead> <tr> <th>Setting</th> <th>Value</th> </tr> </thead> <tbody> {% for var in settings.items|dictsort:0 %} <tr> <td>{{ var.0 }}</td> <td class="code"><pre>{{ var.1|pprint }}</pre></td> </tr> {% endfor %} </tbody> </table> </div> {% if not is_email %} <div id="explanation"> <p> You're seeing this error because you have <code>DEBUG = True</code> in your Django settings file. Change that to <code>False</code>, and Django will display a standard page generated by the handler for this status code. </p> </div> {% endif %} </body> </html> """) # NOQA TECHNICAL_500_TEXT_TEMPLATE = ("""""" """{% firstof exception_type 'Report' %}{% if request %} at {{ request.path_info }}{% endif %} {% firstof exception_value 'No exception message supplied' %} {% if request %} Request Method: {{ request.META.REQUEST_METHOD }} Request URL: {{ request.get_raw_uri }}{% endif %} Django Version: {{ django_version_info }} Python Executable: {{ sys_executable }} Python Version: {{ sys_version_info }} Python Path: {{ sys_path }} Server time: {{server_time|date:"r"}} Installed Applications: {{ settings.INSTALLED_APPS|pprint }} Installed Middleware: {% if settings.MIDDLEWARE is not None %}{{ settings.MIDDLEWARE|pprint }}""" """{% else %}{{ settings.MIDDLEWARE_CLASSES|pprint }}{% endif %} {% if template_does_not_exist %}Template loader postmortem {% if postmortem %}Django tried loading these templates, in this order: {% for entry in postmortem %} Using engine {{ entry.backend.name }}: {% if entry.tried %}{% for attempt in entry.tried %}""" """ * {{ attempt.0.loader_name }}: {{ attempt.0.name }} ({{ attempt.1 }}) {% endfor %}{% else %} This engine did not provide a list of tried templates. {% endif %}{% endfor %} {% else %}No templates were found because your 'TEMPLATES' setting is not configured. {% endif %} {% endif %}{% if template_info %} Template error: In template {{ template_info.name }}, error at line {{ template_info.line }} {{ template_info.message }} {% for source_line in template_info.source_lines %}""" "{% if source_line.0 == template_info.line %}" " {{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}" "{% else %}" " {{ source_line.0 }} : {{ source_line.1 }}" """{% endif %}{% endfor %}{% endif %}{% if frames %} Traceback:""" "{% for frame in frames %}" "{% ifchanged frame.exc_cause %}" " {% if frame.exc_cause %}" """ {% if frame.exc_cause_explicit %} The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception: {% else %} During handling of the above exception ({{ frame.exc_cause }}), another exception occurred: {% endif %} {% endif %} {% endifchanged %} File "{{ frame.filename }}" in {{ frame.function }} {% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line }}{% endif %} {% endfor %} {% if exception_type %}Exception Type: {{ exception_type }}{% if request %} at {{ request.path_info }}{% endif %} {% if exception_value %}Exception Value: {{ exception_value }}{% endif %}{% endif %}{% endif %} {% if request %}Request information: {% if user_str %}USER: {{ user_str }}{% endif %} GET:{% for k, v in request.GET.items %} {{ k }} = {{ v|stringformat:"r" }}{% empty %} No GET data{% endfor %} POST:{% for k, v in filtered_POST.items %} {{ k }} = {{ v|stringformat:"r" }}{% empty %} No POST data{% endfor %} FILES:{% for k, v in request.FILES.items %} {{ k }} = {{ v|stringformat:"r" }}{% empty %} No FILES data{% endfor %} COOKIES:{% for k, v in request.COOKIES.items %} {{ k }} = {{ v|stringformat:"r" }}{% empty %} No cookie data{% endfor %} META:{% for k, v in request.META.items|dictsort:0 %} {{ k }} = {{ v|stringformat:"r" }}{% endfor %} {% else %}Request data not supplied {% endif %} Settings: Using settings module {{ settings.SETTINGS_MODULE }}{% for k, v in settings.items|dictsort:0 %} {{ k }} = {{ v|stringformat:"r" }}{% endfor %} {% if not is_email %} You're seeing this error because you have DEBUG = True in your Django settings file. Change that to False, and Django will display a standard page generated by the handler for this status code. {% endif %} """) # NOQA TECHNICAL_404_TEMPLATE = """ <!DOCTYPE html> <html lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <title>Page not found at {{ request.path_info|escape }}</title> <meta name="robots" content="NONE,NOARCHIVE"> <style type="text/css"> html * { padding:0; margin:0; } body * { padding:10px 20px; } body * * { padding:0; } body { font:small sans-serif; background:#eee; } body>div { border-bottom:1px solid #ddd; } h1 { font-weight:normal; margin-bottom:.4em; } h1 span { font-size:60%; color:#666; font-weight:normal; } table { border:none; border-collapse: collapse; width:100%; } td, th { vertical-align:top; padding:2px 3px; } th { width:12em; text-align:right; color:#666; padding-right:.5em; } #info { background:#f6f6f6; } #info ol { margin: 0.5em 4em; } #info ol li { font-family: monospace; } #summary { background: #ffc; } #explanation { background:#eee; border-bottom: 0px none; } </style> </head> <body> <div id="summary"> <h1>Page not found <span>(404)</span></h1> <table class="meta"> <tr> <th>Request Method:</th> <td>{{ request.META.REQUEST_METHOD }}</td> </tr> <tr> <th>Request URL:</th> <td>{{ request.build_absolute_uri|escape }}</td> </tr> {% if raising_view_name %} <tr> <th>Raised by:</th> <td>{{ raising_view_name }}</td> </tr> {% endif %} </table> </div> <div id="info"> {% if urlpatterns %} <p> Using the URLconf defined in <code>{{ urlconf }}</code>, Django tried these URL patterns, in this order: </p> <ol> {% for pattern in urlpatterns %} <li> {% for pat in pattern %} {{ pat.regex.pattern }} {% if forloop.last and pat.name %}[name='{{ pat.name }}']{% endif %} {% endfor %} </li> {% endfor %} </ol> <p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p> {% else %} <p>{{ reason }}</p> {% endif %} </div> <div id="explanation"> <p> You're seeing this error because you have <code>DEBUG = True</code> in your Django settings file. Change that to <code>False</code>, and Django will display a standard 404 page. </p> </div> </body> </html> """ DEFAULT_URLCONF_TEMPLATE = """ <!DOCTYPE html> <html lang="en"><head> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <meta name="robots" content="NONE,NOARCHIVE"><title>{{ title }}</title> <style type="text/css"> html * { padding:0; margin:0; } body * { padding:10px 20px; } body * * { padding:0; } body { font:small sans-serif; } body>div { border-bottom:1px solid #ddd; } h1 { font-weight:normal; } h2 { margin-bottom:.8em; } h2 span { font-size:80%; color:#666; font-weight:normal; } h3 { margin:1em 0 .5em 0; } h4 { margin:0 0 .5em 0; font-weight: normal; } table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; } tbody td, tbody th { vertical-align:top; padding:2px 3px; } thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; } tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; } #summary { background: #e0ebff; } #summary h2 { font-weight: normal; color: #666; } #explanation { background:#eee; } #instructions { background:#f6f6f6; } #summary table { border:none; background:transparent; } </style> </head> <body> <div id="summary"> <h1>{{ heading }}</h1> <h2>{{ subheading }}</h2> </div> <div id="instructions"> <p> {{ instructions|safe }} </p> </div> <div id="explanation"> <p> {{ explanation|safe }} </p> </div> </body></html> """
isyippee/ceilometer
refs/heads/master
ceilometer/storage/sqlalchemy/migrate_repo/versions/006_counter_volume_is_float.py
9
# -*- coding: utf-8 -*- # # Copyright 2013 eNovance SAS <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from sqlalchemy import Float from sqlalchemy import MetaData from sqlalchemy import Table def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) meter = Table('meter', meta, autoload=True) meter.c.counter_volume.alter(type=Float(53))
vitaly4uk/django
refs/heads/master
tests/forms_tests/tests/test_fields.py
134
# -*- coding: utf-8 -*- """ ########## # Fields # ########## Each Field class does some sort of validation. Each Field has a clean() method, which either raises django.forms.ValidationError or returns the "clean" data -- usually a Unicode object, but, in some rare cases, a list. Each Field's __init__() takes at least these parameters: required -- Boolean that specifies whether the field is required. True by default. widget -- A Widget class, or instance of a Widget class, that should be used for this Field when displaying it. Each Field has a default Widget that it'll use if you don't specify this. In most cases, the default widget is TextInput. label -- A verbose name for this field, for use in displaying this field in a form. By default, Django will use a "pretty" version of the form field name, if the Field is part of a Form. initial -- A value to use in this Field's initial display. This value is *not* used as a fallback if data isn't given. Other than that, the Field subclasses have class-specific options for __init__(). For example, CharField has a max_length option. """ from __future__ import unicode_literals import datetime import os import pickle import re import uuid from decimal import Decimal from unittest import skipIf from django.core.files.uploadedfile import SimpleUploadedFile from django.forms import ( BooleanField, CharField, ChoiceField, ComboField, DateField, DateTimeField, DecimalField, DurationField, EmailField, Field, FileField, FilePathField, FloatField, Form, GenericIPAddressField, HiddenInput, ImageField, IntegerField, MultipleChoiceField, NullBooleanField, NumberInput, PasswordInput, RadioSelect, RegexField, SlugField, SplitDateTimeField, Textarea, TextInput, TimeField, TypedChoiceField, TypedMultipleChoiceField, URLField, UUIDField, ValidationError, Widget, forms, ) from django.test import SimpleTestCase, ignore_warnings from django.utils import formats, six, translation from django.utils._os import upath from django.utils.deprecation import RemovedInDjango110Warning from django.utils.duration import duration_string try: from PIL import Image except ImportError: Image = None def fix_os_paths(x): if isinstance(x, six.string_types): return x.replace('\\', '/') elif isinstance(x, tuple): return tuple(fix_os_paths(list(x))) elif isinstance(x, list): return [fix_os_paths(y) for y in x] else: return x class FieldsTests(SimpleTestCase): def assertWidgetRendersTo(self, field, to): class _Form(Form): f = field self.assertHTMLEqual(str(_Form()['f']), to) def test_field_sets_widget_is_required(self): self.assertTrue(Field(required=True).widget.is_required) self.assertFalse(Field(required=False).widget.is_required) def test_cooperative_multiple_inheritance(self): class A(object): def __init__(self): self.class_a_var = True super(A, self).__init__() class ComplexField(Field, A): def __init__(self): super(ComplexField, self).__init__() f = ComplexField() self.assertTrue(f.class_a_var) # CharField ################################################################### def test_charfield_1(self): f = CharField() self.assertEqual('1', f.clean(1)) self.assertEqual('hello', f.clean('hello')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertEqual('[1, 2, 3]', f.clean([1, 2, 3])) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, None) def test_charfield_2(self): f = CharField(required=False) self.assertEqual('1', f.clean(1)) self.assertEqual('hello', f.clean('hello')) self.assertEqual('', f.clean(None)) self.assertEqual('', f.clean('')) self.assertEqual('[1, 2, 3]', f.clean([1, 2, 3])) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, None) def test_charfield_3(self): f = CharField(max_length=10, required=False) self.assertEqual('12345', f.clean('12345')) self.assertEqual('1234567890', f.clean('1234567890')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 10 characters (it has 11).'", f.clean, '1234567890a') self.assertEqual(f.max_length, 10) self.assertEqual(f.min_length, None) def test_charfield_4(self): f = CharField(min_length=10, required=False) self.assertEqual('', f.clean('')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 10 characters (it has 5).'", f.clean, '12345') self.assertEqual('1234567890', f.clean('1234567890')) self.assertEqual('1234567890a', f.clean('1234567890a')) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, 10) def test_charfield_5(self): f = CharField(min_length=10, required=True) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 10 characters (it has 5).'", f.clean, '12345') self.assertEqual('1234567890', f.clean('1234567890')) self.assertEqual('1234567890a', f.clean('1234567890a')) self.assertEqual(f.max_length, None) self.assertEqual(f.min_length, 10) def test_charfield_length_not_int(self): """ Ensure that setting min_length or max_length to something that is not a number returns an exception. """ self.assertRaises(ValueError, CharField, min_length='a') self.assertRaises(ValueError, CharField, max_length='a') self.assertRaises(ValueError, CharField, 'a') def test_charfield_widget_attrs(self): """ Ensure that CharField.widget_attrs() always returns a dictionary. Refs #15912 """ # Return an empty dictionary if max_length is None f = CharField() self.assertEqual(f.widget_attrs(TextInput()), {}) self.assertEqual(f.widget_attrs(Textarea()), {}) # Otherwise, return a maxlength attribute equal to max_length f = CharField(max_length=10) self.assertEqual(f.widget_attrs(TextInput()), {'maxlength': '10'}) self.assertEqual(f.widget_attrs(PasswordInput()), {'maxlength': '10'}) self.assertEqual(f.widget_attrs(Textarea()), {'maxlength': '10'}) def test_charfield_strip(self): """ Ensure that values have whitespace stripped and that strip=False works. """ f = CharField() self.assertEqual(f.clean(' 1'), '1') self.assertEqual(f.clean('1 '), '1') f = CharField(strip=False) self.assertEqual(f.clean(' 1'), ' 1') self.assertEqual(f.clean('1 '), '1 ') def test_charfield_disabled(self): f = CharField(disabled=True) self.assertWidgetRendersTo(f, '<input type="text" name="f" id="id_f" disabled />') # IntegerField ################################################################ def test_integerfield_1(self): f = IntegerField() self.assertWidgetRendersTo(f, '<input type="number" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(1, f.clean('1')) self.assertIsInstance(f.clean('1'), int) self.assertEqual(23, f.clean('23')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 'a') self.assertEqual(42, f.clean(42)) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 3.14) self.assertEqual(1, f.clean('1 ')) self.assertEqual(1, f.clean(' 1')) self.assertEqual(1, f.clean(' 1 ')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1a') self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_integerfield_2(self): f = IntegerField(required=False) self.assertIsNone(f.clean('')) self.assertEqual('None', repr(f.clean(''))) self.assertIsNone(f.clean(None)) self.assertEqual('None', repr(f.clean(None))) self.assertEqual(1, f.clean('1')) self.assertIsInstance(f.clean('1'), int) self.assertEqual(23, f.clean('23')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 'a') self.assertEqual(1, f.clean('1 ')) self.assertEqual(1, f.clean(' 1')) self.assertEqual(1, f.clean(' 1 ')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1a') self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_integerfield_3(self): f = IntegerField(max_value=10) self.assertWidgetRendersTo(f, '<input max="10" type="number" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(1, f.clean(1)) self.assertEqual(10, f.clean(10)) self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 10.'", f.clean, 11) self.assertEqual(10, f.clean('10')) self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 10.'", f.clean, '11') self.assertEqual(f.max_value, 10) self.assertEqual(f.min_value, None) def test_integerfield_4(self): f = IntegerField(min_value=10) self.assertWidgetRendersTo(f, '<input id="id_f" type="number" name="f" min="10" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 10.'", f.clean, 1) self.assertEqual(10, f.clean(10)) self.assertEqual(11, f.clean(11)) self.assertEqual(10, f.clean('10')) self.assertEqual(11, f.clean('11')) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, 10) def test_integerfield_5(self): f = IntegerField(min_value=10, max_value=20) self.assertWidgetRendersTo(f, '<input id="id_f" max="20" type="number" name="f" min="10" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 10.'", f.clean, 1) self.assertEqual(10, f.clean(10)) self.assertEqual(11, f.clean(11)) self.assertEqual(10, f.clean('10')) self.assertEqual(11, f.clean('11')) self.assertEqual(20, f.clean(20)) self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 20.'", f.clean, 21) self.assertEqual(f.max_value, 20) self.assertEqual(f.min_value, 10) def test_integerfield_localized(self): """ Make sure localized IntegerField's widget renders to a text input with no number input specific attributes. """ f1 = IntegerField(localize=True) self.assertWidgetRendersTo(f1, '<input id="id_f" name="f" type="text" />') def test_integerfield_float(self): f = IntegerField() self.assertEqual(1, f.clean(1.0)) self.assertEqual(1, f.clean('1.0')) self.assertEqual(1, f.clean(' 1.0 ')) self.assertEqual(1, f.clean('1.')) self.assertEqual(1, f.clean(' 1. ')) self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1.5') self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '…') def test_integerfield_big_num(self): f = IntegerField() self.assertEqual(9223372036854775808, f.clean(9223372036854775808)) self.assertEqual(9223372036854775808, f.clean('9223372036854775808')) self.assertEqual(9223372036854775808, f.clean('9223372036854775808.0')) def test_integerfield_subclass(self): """ Test that class-defined widget is not overwritten by __init__ (#22245). """ class MyIntegerField(IntegerField): widget = Textarea f = MyIntegerField() self.assertEqual(f.widget.__class__, Textarea) f = MyIntegerField(localize=True) self.assertEqual(f.widget.__class__, Textarea) # FloatField ################################################################## def test_floatfield_1(self): f = FloatField() self.assertWidgetRendersTo(f, '<input step="any" type="number" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(1.0, f.clean('1')) self.assertIsInstance(f.clean('1'), float) self.assertEqual(23.0, f.clean('23')) self.assertEqual(3.1400000000000001, f.clean('3.14')) self.assertEqual(3.1400000000000001, f.clean(3.14)) self.assertEqual(42.0, f.clean(42)) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'a') self.assertEqual(1.0, f.clean('1.0 ')) self.assertEqual(1.0, f.clean(' 1.0')) self.assertEqual(1.0, f.clean(' 1.0 ')) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '1.0a') self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'Infinity') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'NaN') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '-Inf') def test_floatfield_2(self): f = FloatField(required=False) self.assertIsNone(f.clean('')) self.assertIsNone(f.clean(None)) self.assertEqual(1.0, f.clean('1')) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_floatfield_3(self): f = FloatField(max_value=1.5, min_value=0.5) self.assertWidgetRendersTo(f, '<input step="any" name="f" min="0.5" max="1.5" type="number" id="id_f" />') self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 1.5.'", f.clean, '1.6') self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 0.5.'", f.clean, '0.4') self.assertEqual(1.5, f.clean('1.5')) self.assertEqual(0.5, f.clean('0.5')) self.assertEqual(f.max_value, 1.5) self.assertEqual(f.min_value, 0.5) def test_floatfield_widget_attrs(self): f = FloatField(widget=NumberInput(attrs={'step': 0.01, 'max': 1.0, 'min': 0.0})) self.assertWidgetRendersTo(f, '<input step="0.01" name="f" min="0.0" max="1.0" type="number" id="id_f" />') def test_floatfield_localized(self): """ Make sure localized FloatField's widget renders to a text input with no number input specific attributes. """ f = FloatField(localize=True) self.assertWidgetRendersTo(f, '<input id="id_f" name="f" type="text" />') def test_floatfield_changed(self): f = FloatField() n = 4.35 self.assertFalse(f.has_changed(n, '4.3500')) with translation.override('fr'), self.settings(USE_L10N=True): f = FloatField(localize=True) localized_n = formats.localize_input(n) # -> '4,35' in French self.assertFalse(f.has_changed(n, localized_n)) # DecimalField ################################################################ def test_decimalfield_1(self): f = DecimalField(max_digits=4, decimal_places=2) self.assertWidgetRendersTo(f, '<input id="id_f" step="0.01" type="number" name="f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(f.clean('1'), Decimal("1")) self.assertIsInstance(f.clean('1'), Decimal) self.assertEqual(f.clean('23'), Decimal("23")) self.assertEqual(f.clean('3.14'), Decimal("3.14")) self.assertEqual(f.clean(3.14), Decimal("3.14")) self.assertEqual(f.clean(Decimal('3.14')), Decimal("3.14")) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'NaN') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'Inf') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '-Inf') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'a') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'łąść') self.assertEqual(f.clean('1.0 '), Decimal("1.0")) self.assertEqual(f.clean(' 1.0'), Decimal("1.0")) self.assertEqual(f.clean(' 1.0 '), Decimal("1.0")) self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '1.0a') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 4 digits in total.'", f.clean, '123.45') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 decimal places.'", f.clean, '1.234') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 digits before the decimal point.'", f.clean, '123.4') self.assertEqual(f.clean('-12.34'), Decimal("-12.34")) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 4 digits in total.'", f.clean, '-123.45') self.assertEqual(f.clean('-.12'), Decimal("-0.12")) self.assertEqual(f.clean('-00.12'), Decimal("-0.12")) self.assertEqual(f.clean('-000.12'), Decimal("-0.12")) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 decimal places.'", f.clean, '-000.123') self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 4 digits in total.'", f.clean, '-000.12345') self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '--0.12') self.assertEqual(f.max_digits, 4) self.assertEqual(f.decimal_places, 2) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_decimalfield_2(self): f = DecimalField(max_digits=4, decimal_places=2, required=False) self.assertIsNone(f.clean('')) self.assertIsNone(f.clean(None)) self.assertEqual(f.clean('1'), Decimal("1")) self.assertEqual(f.max_digits, 4) self.assertEqual(f.decimal_places, 2) self.assertEqual(f.max_value, None) self.assertEqual(f.min_value, None) def test_decimalfield_3(self): f = DecimalField(max_digits=4, decimal_places=2, max_value=Decimal('1.5'), min_value=Decimal('0.5')) self.assertWidgetRendersTo(f, '<input step="0.01" name="f" min="0.5" max="1.5" type="number" id="id_f" />') self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 1.5.'", f.clean, '1.6') self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 0.5.'", f.clean, '0.4') self.assertEqual(f.clean('1.5'), Decimal("1.5")) self.assertEqual(f.clean('0.5'), Decimal("0.5")) self.assertEqual(f.clean('.5'), Decimal("0.5")) self.assertEqual(f.clean('00.50'), Decimal("0.50")) self.assertEqual(f.max_digits, 4) self.assertEqual(f.decimal_places, 2) self.assertEqual(f.max_value, Decimal('1.5')) self.assertEqual(f.min_value, Decimal('0.5')) def test_decimalfield_4(self): f = DecimalField(decimal_places=2) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 2 decimal places.'", f.clean, '0.00000001') def test_decimalfield_5(self): f = DecimalField(max_digits=3) # Leading whole zeros "collapse" to one digit. self.assertEqual(f.clean('0000000.10'), Decimal("0.1")) # But a leading 0 before the . doesn't count towards max_digits self.assertEqual(f.clean('0000000.100'), Decimal("0.100")) # Only leading whole zeros "collapse" to one digit. self.assertEqual(f.clean('000000.02'), Decimal('0.02')) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 3 digits in total.'", f.clean, '000000.0002') self.assertEqual(f.clean('.002'), Decimal("0.002")) def test_decimalfield_6(self): f = DecimalField(max_digits=2, decimal_places=2) self.assertEqual(f.clean('.01'), Decimal(".01")) self.assertRaisesMessage(ValidationError, "'Ensure that there are no more than 0 digits before the decimal point.'", f.clean, '1.1') def test_decimalfield_scientific(self): f = DecimalField(max_digits=2, decimal_places=2) self.assertEqual(f.clean('1E+2'), Decimal('1E+2')) self.assertEqual(f.clean('1e+2'), Decimal('1E+2')) with self.assertRaisesMessage(ValidationError, "Ensure that there are no more"): f.clean('0.546e+2') def test_decimalfield_widget_attrs(self): f = DecimalField(max_digits=6, decimal_places=2) self.assertEqual(f.widget_attrs(Widget()), {}) self.assertEqual(f.widget_attrs(NumberInput()), {'step': '0.01'}) f = DecimalField(max_digits=10, decimal_places=0) self.assertEqual(f.widget_attrs(NumberInput()), {'step': '1'}) f = DecimalField(max_digits=19, decimal_places=19) self.assertEqual(f.widget_attrs(NumberInput()), {'step': '1e-19'}) f = DecimalField(max_digits=20) self.assertEqual(f.widget_attrs(NumberInput()), {'step': 'any'}) f = DecimalField(max_digits=6, widget=NumberInput(attrs={'step': '0.01'})) self.assertWidgetRendersTo(f, '<input step="0.01" name="f" type="number" id="id_f" />') def test_decimalfield_localized(self): """ Make sure localized DecimalField's widget renders to a text input with no number input specific attributes. """ f = DecimalField(localize=True) self.assertWidgetRendersTo(f, '<input id="id_f" name="f" type="text" />') def test_decimalfield_changed(self): f = DecimalField(max_digits=2, decimal_places=2) d = Decimal("0.1") self.assertFalse(f.has_changed(d, '0.10')) self.assertTrue(f.has_changed(d, '0.101')) with translation.override('fr'), self.settings(USE_L10N=True): f = DecimalField(max_digits=2, decimal_places=2, localize=True) localized_d = formats.localize_input(d) # -> '0,1' in French self.assertFalse(f.has_changed(d, localized_d)) # DateField ################################################################### def test_datefield_1(self): f = DateField() self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))) self.assertEqual(datetime.date(2006, 10, 25), f.clean('2006-10-25')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('10/25/2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('10/25/06')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('Oct 25 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('October 25 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('October 25, 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('25 October 2006')) self.assertEqual(datetime.date(2006, 10, 25), f.clean('25 October, 2006')) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '2006-4-31') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '200a-10-25') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '25/10/06') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) def test_datefield_2(self): f = DateField(required=False) self.assertIsNone(f.clean(None)) self.assertEqual('None', repr(f.clean(None))) self.assertIsNone(f.clean('')) self.assertEqual('None', repr(f.clean(''))) def test_datefield_3(self): f = DateField(input_formats=['%Y %m %d']) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.date(2006, 10, 25), f.clean('2006 10 25')) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '2006-10-25') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '10/25/2006') self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '10/25/06') def test_datefield_4(self): # Test whitespace stripping behavior (#5714) f = DateField() self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 10/25/2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 10/25/06 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' Oct 25 2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' October 25 2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' October 25, 2006 ')) self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 25 October 2006 ')) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ' ') def test_datefield_5(self): # Test null bytes (#18982) f = DateField() self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, 'a\x00b') @ignore_warnings(category=RemovedInDjango110Warning) # for _has_changed def test_datefield_changed(self): format = '%d/%m/%Y' f = DateField(input_formats=[format]) d = datetime.date(2007, 9, 17) self.assertFalse(f.has_changed(d, '17/09/2007')) # Test for deprecated behavior _has_changed self.assertFalse(f._has_changed(d, '17/09/2007')) def test_datefield_strptime(self): """Test that field.strptime doesn't raise an UnicodeEncodeError (#16123)""" f = DateField() try: f.strptime('31 мая 2011', '%d-%b-%y') except Exception as e: # assertIsInstance or assertRaises cannot be used because UnicodeEncodeError # is a subclass of ValueError self.assertEqual(e.__class__, ValueError) # TimeField ################################################################### def test_timefield_1(self): f = TimeField() self.assertEqual(datetime.time(14, 25), f.clean(datetime.time(14, 25))) self.assertEqual(datetime.time(14, 25, 59), f.clean(datetime.time(14, 25, 59))) self.assertEqual(datetime.time(14, 25), f.clean('14:25')) self.assertEqual(datetime.time(14, 25, 59), f.clean('14:25:59')) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, 'hello') self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, '1:24 p.m.') def test_timefield_2(self): f = TimeField(input_formats=['%I:%M %p']) self.assertEqual(datetime.time(14, 25), f.clean(datetime.time(14, 25))) self.assertEqual(datetime.time(14, 25, 59), f.clean(datetime.time(14, 25, 59))) self.assertEqual(datetime.time(4, 25), f.clean('4:25 AM')) self.assertEqual(datetime.time(16, 25), f.clean('4:25 PM')) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, '14:30:45') def test_timefield_3(self): f = TimeField() # Test whitespace stripping behavior (#5714) self.assertEqual(datetime.time(14, 25), f.clean(' 14:25 ')) self.assertEqual(datetime.time(14, 25, 59), f.clean(' 14:25:59 ')) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ' ') def test_timefield_changed(self): t1 = datetime.time(12, 51, 34, 482548) t2 = datetime.time(12, 51) f = TimeField(input_formats=['%H:%M', '%H:%M %p']) self.assertTrue(f.has_changed(t1, '12:51')) self.assertFalse(f.has_changed(t2, '12:51')) self.assertFalse(f.has_changed(t2, '12:51 PM')) # DateTimeField ############################################################### def test_datetimefield_1(self): f = DateTimeField() self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59, 200), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.000200')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.0002')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('2006-10-25 14:30:45')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30:00')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('2006-10-25')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/2006 14:30:45.000200')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/2006 14:30:45')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30:00')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/2006')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/06 14:30:45.000200')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/06 14:30:45')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30:00')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/06')) self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, 'hello') self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, '2006-10-25 4:30 p.m.') def test_datetimefield_2(self): f = DateTimeField(input_formats=['%Y %m %d %I:%M %p']) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 59, 200), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006 10 25 2:30 PM')) self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, '2006-10-25 14:30:45') def test_datetimefield_3(self): f = DateTimeField(required=False) self.assertIsNone(f.clean(None)) self.assertEqual('None', repr(f.clean(None))) self.assertIsNone(f.clean('')) self.assertEqual('None', repr(f.clean(''))) def test_datetimefield_4(self): f = DateTimeField() # Test whitespace stripping behavior (#5714) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 2006-10-25 14:30:45 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 2006-10-25 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/2006 14:30:45 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(' 10/25/2006 14:30 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/2006 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/06 14:30:45 ')) self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/06 ')) self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, ' ') def test_datetimefield_5(self): f = DateTimeField(input_formats=['%Y.%m.%d %H:%M:%S.%f']) self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006.10.25 14:30:45.0002')) def test_datetimefield_changed(self): format = '%Y %m %d %I:%M %p' f = DateTimeField(input_formats=[format]) d = datetime.datetime(2006, 9, 17, 14, 30, 0) self.assertFalse(f.has_changed(d, '2006 09 17 2:30 PM')) # DurationField ########################################################### def test_durationfield_1(self): f = DurationField() self.assertEqual(datetime.timedelta(seconds=30), f.clean('30')) self.assertEqual( datetime.timedelta(minutes=15, seconds=30), f.clean('15:30') ) self.assertEqual( datetime.timedelta(hours=1, minutes=15, seconds=30), f.clean('1:15:30') ) self.assertEqual( datetime.timedelta( days=1, hours=1, minutes=15, seconds=30, milliseconds=300), f.clean('1 1:15:30.3') ) def test_durationfield_2(self): class DurationForm(Form): duration = DurationField(initial=datetime.timedelta(hours=1)) f = DurationForm() self.assertHTMLEqual( '<input id="id_duration" type="text" name="duration" value="01:00:00">', str(f['duration']) ) def test_durationfield_prepare_value(self): field = DurationField() td = datetime.timedelta(minutes=15, seconds=30) self.assertEqual(field.prepare_value(td), duration_string(td)) self.assertEqual(field.prepare_value('arbitrary'), 'arbitrary') self.assertIsNone(field.prepare_value(None)) # RegexField ################################################################## def test_regexfield_1(self): f = RegexField('^[0-9][A-F][0-9]$') self.assertEqual('2A2', f.clean('2A2')) self.assertEqual('3F3', f.clean('3F3')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, ' 2A2') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '2A2 ') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') def test_regexfield_2(self): f = RegexField('^[0-9][A-F][0-9]$', required=False) self.assertEqual('2A2', f.clean('2A2')) self.assertEqual('3F3', f.clean('3F3')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3') self.assertEqual('', f.clean('')) def test_regexfield_3(self): f = RegexField(re.compile('^[0-9][A-F][0-9]$')) self.assertEqual('2A2', f.clean('2A2')) self.assertEqual('3F3', f.clean('3F3')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, ' 2A2') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '2A2 ') @ignore_warnings(category=RemovedInDjango110Warning) # error_message deprecation def test_regexfield_4(self): f = RegexField('^[0-9][0-9][0-9][0-9]$', error_message='Enter a four-digit number.') self.assertEqual('1234', f.clean('1234')) self.assertRaisesMessage(ValidationError, "'Enter a four-digit number.'", f.clean, '123') self.assertRaisesMessage(ValidationError, "'Enter a four-digit number.'", f.clean, 'abcd') def test_regexfield_5(self): f = RegexField('^[0-9]+$', min_length=5, max_length=10) self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 5 characters (it has 3).'", f.clean, '123') six.assertRaisesRegex(self, ValidationError, "'Ensure this value has at least 5 characters \(it has 3\)\.', u?'Enter a valid value\.'", f.clean, 'abc') self.assertEqual('12345', f.clean('12345')) self.assertEqual('1234567890', f.clean('1234567890')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 10 characters (it has 11).'", f.clean, '12345678901') self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '12345a') def test_regexfield_6(self): """ Ensure that it works with unicode characters. Refs #. """ f = RegexField('^\w+$') self.assertEqual('éèøçÎÎ你好', f.clean('éèøçÎÎ你好')) def test_change_regex_after_init(self): f = RegexField('^[a-z]+$') f.regex = '^[0-9]+$' self.assertEqual('1234', f.clean('1234')) self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, 'abcd') # EmailField ################################################################## # See also validators tests for validate_email specific tests def test_emailfield_1(self): f = EmailField() self.assertWidgetRendersTo(f, '<input type="email" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'foo') self.assertEqual('[email protected]\xe4\xf6\xfc\xdfabc.part.com', f.clean('[email protected]äöüßabc.part.com')) def test_email_regexp_for_performance(self): f = EmailField() # Check for runaway regex security problem. This will take for-freeking-ever # if the security fix isn't in place. addr = '[email protected]' self.assertEqual(addr, f.clean(addr)) def test_emailfield_not_required(self): f = EmailField(required=False) self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertEqual('[email protected]', f.clean(' [email protected] \t \t ')) self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'foo') def test_emailfield_min_max_length(self): f = EmailField(min_length=10, max_length=15) self.assertWidgetRendersTo(f, '<input id="id_f" type="email" name="f" maxlength="15" />') self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 10 characters (it has 9).'", f.clean, '[email protected]') self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 15 characters (it has 20).'", f.clean, '[email protected]') # FileField ################################################################## def test_filefield_1(self): f = FileField() self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '', '') self.assertEqual('files/test1.pdf', f.clean('', 'files/test1.pdf')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None, '') self.assertEqual('files/test2.pdf', f.clean(None, 'files/test2.pdf')) self.assertRaisesMessage(ValidationError, "'No file was submitted. Check the encoding type on the form.'", f.clean, SimpleUploadedFile('', b'')) self.assertRaisesMessage(ValidationError, "'No file was submitted. Check the encoding type on the form.'", f.clean, SimpleUploadedFile('', b''), '') self.assertEqual('files/test3.pdf', f.clean(None, 'files/test3.pdf')) self.assertRaisesMessage(ValidationError, "'No file was submitted. Check the encoding type on the form.'", f.clean, 'some content that is not a file') self.assertRaisesMessage(ValidationError, "'The submitted file is empty.'", f.clean, SimpleUploadedFile('name', None)) self.assertRaisesMessage(ValidationError, "'The submitted file is empty.'", f.clean, SimpleUploadedFile('name', b'')) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content')))) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह'.encode('utf-8'))))) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content'), 'files/test4.pdf'))) def test_filefield_2(self): f = FileField(max_length=5) self.assertRaisesMessage(ValidationError, "'Ensure this filename has at most 5 characters (it has 18).'", f.clean, SimpleUploadedFile('test_maxlength.txt', b'hello world')) self.assertEqual('files/test1.pdf', f.clean('', 'files/test1.pdf')) self.assertEqual('files/test2.pdf', f.clean(None, 'files/test2.pdf')) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content')))) def test_filefield_3(self): f = FileField(allow_empty_file=True) self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'')))) def test_filefield_changed(self): ''' Test for the behavior of has_changed for FileField. The value of data will more than likely come from request.FILES. The value of initial data will likely be a filename stored in the database. Since its value is of no use to a FileField it is ignored. ''' f = FileField() # No file was uploaded and no initial data. self.assertFalse(f.has_changed('', None)) # A file was uploaded and no initial data. self.assertTrue(f.has_changed('', {'filename': 'resume.txt', 'content': 'My resume'})) # A file was not uploaded, but there is initial data self.assertFalse(f.has_changed('resume.txt', None)) # A file was uploaded and there is initial data (file identity is not dealt # with here) self.assertTrue(f.has_changed('resume.txt', {'filename': 'resume.txt', 'content': 'My resume'})) # ImageField ################################################################## @skipIf(Image is None, "Pillow is required to test ImageField") def test_imagefield_annotate_with_image_after_clean(self): f = ImageField() img_path = os.path.dirname(upath(__file__)) + '/filepath_test_files/1x1.png' with open(img_path, 'rb') as img_file: img_data = img_file.read() img_file = SimpleUploadedFile('1x1.png', img_data) img_file.content_type = 'text/plain' uploaded_file = f.clean(img_file) self.assertEqual('PNG', uploaded_file.image.format) self.assertEqual('image/png', uploaded_file.content_type) @skipIf(Image is None, "Pillow is required to test ImageField") def test_imagefield_annotate_with_bitmap_image_after_clean(self): """ This also tests the situation when Pillow doesn't detect the MIME type of the image (#24948). """ from PIL.BmpImagePlugin import BmpImageFile try: Image.register_mime(BmpImageFile.format, None) f = ImageField() img_path = os.path.dirname(upath(__file__)) + '/filepath_test_files/1x1.bmp' with open(img_path, 'rb') as img_file: img_data = img_file.read() img_file = SimpleUploadedFile('1x1.bmp', img_data) img_file.content_type = 'text/plain' uploaded_file = f.clean(img_file) self.assertEqual('BMP', uploaded_file.image.format) self.assertIsNone(uploaded_file.content_type) finally: Image.register_mime(BmpImageFile.format, 'image/bmp') # URLField ################################################################## def test_urlfield_1(self): f = URLField() self.assertWidgetRendersTo(f, '<input type="url" name="f" id="id_f" />') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual('http://localhost', f.clean('http://localhost')) self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertEqual('http://example.com.', f.clean('http://example.com.')) self.assertEqual('http://www.example.com', f.clean('http://www.example.com')) self.assertEqual('http://www.example.com:8000/test', f.clean('http://www.example.com:8000/test')) self.assertEqual('http://valid-with-hyphens.com', f.clean('valid-with-hyphens.com')) self.assertEqual('http://subdomain.domain.com', f.clean('subdomain.domain.com')) self.assertEqual('http://200.8.9.10', f.clean('http://200.8.9.10')) self.assertEqual('http://200.8.9.10:8000/test', f.clean('http://200.8.9.10:8000/test')) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'com.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, '.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://invalid-.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://-invalid.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://inv-.alid-.com') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://inv-.-alid.com') self.assertEqual('http://valid-----hyphens.com', f.clean('http://valid-----hyphens.com')) self.assertEqual('http://some.idn.xyz\xe4\xf6\xfc\xdfabc.domain.com:123/blah', f.clean('http://some.idn.xyzäöüßabc.domain.com:123/blah')) self.assertEqual('http://www.example.com/s/http://code.djangoproject.com/ticket/13804', f.clean('www.example.com/s/http://code.djangoproject.com/ticket/13804')) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, '[a') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://[a') def test_url_regex_ticket11198(self): f = URLField() # hangs "forever" if catastrophic backtracking in ticket:#11198 not fixed self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 200,)) # a second test, to make sure the problem is really addressed, even on # domains that don't fail the domain label length check in the regex self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 60,)) def test_urlfield_2(self): f = URLField(required=False) self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertEqual('http://www.example.com', f.clean('http://www.example.com')) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example.') self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://.com') def test_urlfield_5(self): f = URLField(min_length=15, max_length=20) self.assertWidgetRendersTo(f, '<input id="id_f" type="url" name="f" maxlength="20" />') self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 15 characters (it has 12).'", f.clean, 'http://f.com') self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 37).'", f.clean, 'http://abcdefghijklmnopqrstuvwxyz.com') def test_urlfield_6(self): f = URLField(required=False) self.assertEqual('http://example.com', f.clean('example.com')) self.assertEqual('', f.clean('')) self.assertEqual('https://example.com', f.clean('https://example.com')) def test_urlfield_7(self): f = URLField() self.assertEqual('http://example.com', f.clean('http://example.com')) self.assertEqual('http://example.com/test', f.clean('http://example.com/test')) self.assertEqual('http://example.com?some_param=some_value', f.clean('http://example.com?some_param=some_value')) def test_urlfield_9(self): f = URLField() urls = ( 'http://עברית.idn.icann.org/', 'http://sãopaulo.com/', 'http://sãopaulo.com.br/', 'http://пример.испытание/', 'http://مثال.إختبار/', 'http://例子.测试/', 'http://例子.測試/', 'http://उदाहरण.परीक्षा/', 'http://例え.テスト/', 'http://مثال.آزمایشی/', 'http://실례.테스트/', 'http://العربية.idn.icann.org/', ) for url in urls: # Valid IDN self.assertEqual(url, f.clean(url)) def test_urlfield_10(self): """Test URLField correctly validates IPv6 (#18779).""" f = URLField() urls = ( 'http://[12:34::3a53]/', 'http://[a34:9238::]:8080/', ) for url in urls: self.assertEqual(url, f.clean(url)) def test_urlfield_not_string(self): f = URLField(required=False) self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 23) def test_urlfield_normalization(self): f = URLField() self.assertEqual(f.clean('http://example.com/ '), 'http://example.com/') # BooleanField ################################################################ def test_booleanfield_1(self): f = BooleanField() self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(True, f.clean(True)) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, False) self.assertEqual(True, f.clean(1)) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, 0) self.assertEqual(True, f.clean('Django rocks')) self.assertEqual(True, f.clean('True')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, 'False') def test_booleanfield_2(self): f = BooleanField(required=False) self.assertEqual(False, f.clean('')) self.assertEqual(False, f.clean(None)) self.assertEqual(True, f.clean(True)) self.assertEqual(False, f.clean(False)) self.assertEqual(True, f.clean(1)) self.assertEqual(False, f.clean(0)) self.assertEqual(True, f.clean('1')) self.assertEqual(False, f.clean('0')) self.assertEqual(True, f.clean('Django rocks')) self.assertEqual(False, f.clean('False')) self.assertEqual(False, f.clean('false')) self.assertEqual(False, f.clean('FaLsE')) def test_boolean_picklable(self): self.assertIsInstance(pickle.loads(pickle.dumps(BooleanField())), BooleanField) def test_booleanfield_changed(self): f = BooleanField() self.assertFalse(f.has_changed(None, None)) self.assertFalse(f.has_changed(None, '')) self.assertFalse(f.has_changed('', None)) self.assertFalse(f.has_changed('', '')) self.assertTrue(f.has_changed(False, 'on')) self.assertFalse(f.has_changed(True, 'on')) self.assertTrue(f.has_changed(True, '')) # Initial value may have mutated to a string due to show_hidden_initial (#19537) self.assertTrue(f.has_changed('False', 'on')) # ChoiceField ################################################################# def test_choicefield_1(self): f = ChoiceField(choices=[('1', 'One'), ('2', 'Two')]) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual('1', f.clean(1)) self.assertEqual('1', f.clean('1')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, '3') def test_choicefield_2(self): f = ChoiceField(choices=[('1', 'One'), ('2', 'Two')], required=False) self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) self.assertEqual('1', f.clean(1)) self.assertEqual('1', f.clean('1')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, '3') def test_choicefield_3(self): f = ChoiceField(choices=[('J', 'John'), ('P', 'Paul')]) self.assertEqual('J', f.clean('J')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. John is not one of the available choices.'", f.clean, 'John') def test_choicefield_4(self): f = ChoiceField(choices=[('Numbers', (('1', 'One'), ('2', 'Two'))), ('Letters', (('3', 'A'), ('4', 'B'))), ('5', 'Other')]) self.assertEqual('1', f.clean(1)) self.assertEqual('1', f.clean('1')) self.assertEqual('3', f.clean(3)) self.assertEqual('3', f.clean('3')) self.assertEqual('5', f.clean(5)) self.assertEqual('5', f.clean('5')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 6 is not one of the available choices.'", f.clean, '6') def test_choicefield_callable(self): choices = lambda: [('J', 'John'), ('P', 'Paul')] f = ChoiceField(choices=choices) self.assertEqual('J', f.clean('J')) def test_choicefield_callable_may_evaluate_to_different_values(self): choices = [] def choices_as_callable(): return choices class ChoiceFieldForm(Form): choicefield = ChoiceField(choices=choices_as_callable) choices = [('J', 'John')] form = ChoiceFieldForm() self.assertEqual([('J', 'John')], list(form.fields['choicefield'].choices)) choices = [('P', 'Paul')] form = ChoiceFieldForm() self.assertEqual([('P', 'Paul')], list(form.fields['choicefield'].choices)) def test_choicefield_disabled(self): f = ChoiceField(choices=[('J', 'John'), ('P', 'Paul')], disabled=True) self.assertWidgetRendersTo(f, '<select id="id_f" name="f" disabled><option value="J">John</option>' '<option value="P">Paul</option></select>') # TypedChoiceField ############################################################ # TypedChoiceField is just like ChoiceField, except that coerced types will # be returned: def test_typedchoicefield_1(self): f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int) self.assertEqual(1, f.clean('1')) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 2 is not one of the available choices.'", f.clean, '2') def test_typedchoicefield_2(self): # Different coercion, same validation. f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=float) self.assertEqual(1.0, f.clean('1')) def test_typedchoicefield_3(self): # This can also cause weirdness: be careful (bool(-1) == True, remember) f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=bool) self.assertEqual(True, f.clean('-1')) def test_typedchoicefield_4(self): # Even more weirdness: if you have a valid choice but your coercion function # can't coerce, you'll still get a validation error. Don't do this! f = TypedChoiceField(choices=[('A', 'A'), ('B', 'B')], coerce=int) self.assertRaisesMessage(ValidationError, "'Select a valid choice. B is not one of the available choices.'", f.clean, 'B') # Required fields require values self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') def test_typedchoicefield_5(self): # Non-required fields aren't required f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False) self.assertEqual('', f.clean('')) # If you want cleaning an empty value to return a different type, tell the field def test_typedchoicefield_6(self): f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False, empty_value=None) self.assertIsNone(f.clean('')) def test_typedchoicefield_has_changed(self): # has_changed should not trigger required validation f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=True) self.assertFalse(f.has_changed(None, '')) self.assertFalse(f.has_changed(1, '1')) self.assertFalse(f.has_changed('1', '1')) def test_typedchoicefield_special_coerce(self): """ Test a coerce function which results in a value not present in choices. Refs #21397. """ def coerce_func(val): return Decimal('1.%s' % val) f = TypedChoiceField(choices=[(1, "1"), (2, "2")], coerce=coerce_func, required=True) self.assertEqual(Decimal('1.2'), f.clean('2')) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, '3') # NullBooleanField ############################################################ def test_nullbooleanfield_1(self): f = NullBooleanField() self.assertIsNone(f.clean('')) self.assertEqual(True, f.clean(True)) self.assertEqual(False, f.clean(False)) self.assertIsNone(f.clean(None)) self.assertEqual(False, f.clean('0')) self.assertEqual(True, f.clean('1')) self.assertIsNone(f.clean('2')) self.assertIsNone(f.clean('3')) self.assertIsNone(f.clean('hello')) self.assertEqual(True, f.clean('true')) self.assertEqual(False, f.clean('false')) def test_nullbooleanfield_2(self): # Make sure that the internal value is preserved if using HiddenInput (#7753) class HiddenNullBooleanForm(Form): hidden_nullbool1 = NullBooleanField(widget=HiddenInput, initial=True) hidden_nullbool2 = NullBooleanField(widget=HiddenInput, initial=False) f = HiddenNullBooleanForm() self.assertHTMLEqual('<input type="hidden" name="hidden_nullbool1" value="True" id="id_hidden_nullbool1" /><input type="hidden" name="hidden_nullbool2" value="False" id="id_hidden_nullbool2" />', str(f)) def test_nullbooleanfield_3(self): class HiddenNullBooleanForm(Form): hidden_nullbool1 = NullBooleanField(widget=HiddenInput, initial=True) hidden_nullbool2 = NullBooleanField(widget=HiddenInput, initial=False) f = HiddenNullBooleanForm({'hidden_nullbool1': 'True', 'hidden_nullbool2': 'False'}) self.assertIsNone(f.full_clean()) self.assertEqual(True, f.cleaned_data['hidden_nullbool1']) self.assertEqual(False, f.cleaned_data['hidden_nullbool2']) def test_nullbooleanfield_4(self): # Make sure we're compatible with MySQL, which uses 0 and 1 for its boolean # values. (#9609) NULLBOOL_CHOICES = (('1', 'Yes'), ('0', 'No'), ('', 'Unknown')) class MySQLNullBooleanForm(Form): nullbool0 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES)) nullbool1 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES)) nullbool2 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES)) f = MySQLNullBooleanForm({'nullbool0': '1', 'nullbool1': '0', 'nullbool2': ''}) self.assertIsNone(f.full_clean()) self.assertEqual(True, f.cleaned_data['nullbool0']) self.assertEqual(False, f.cleaned_data['nullbool1']) self.assertIsNone(f.cleaned_data['nullbool2']) def test_nullbooleanfield_changed(self): f = NullBooleanField() self.assertTrue(f.has_changed(False, None)) self.assertTrue(f.has_changed(None, False)) self.assertFalse(f.has_changed(None, None)) self.assertFalse(f.has_changed(False, False)) self.assertTrue(f.has_changed(True, False)) self.assertTrue(f.has_changed(True, None)) self.assertTrue(f.has_changed(True, False)) # MultipleChoiceField ######################################################### def test_multiplechoicefield_1(self): f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two')]) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(['1'], f.clean([1])) self.assertEqual(['1'], f.clean(['1'])) self.assertEqual(['1', '2'], f.clean(['1', '2'])) self.assertEqual(['1', '2'], f.clean([1, '2'])) self.assertEqual(['1', '2'], f.clean((1, '2'))) self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, []) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, ()) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, ['3']) def test_multiplechoicefield_2(self): f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two')], required=False) self.assertEqual([], f.clean('')) self.assertEqual([], f.clean(None)) self.assertEqual(['1'], f.clean([1])) self.assertEqual(['1'], f.clean(['1'])) self.assertEqual(['1', '2'], f.clean(['1', '2'])) self.assertEqual(['1', '2'], f.clean([1, '2'])) self.assertEqual(['1', '2'], f.clean((1, '2'))) self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') self.assertEqual([], f.clean([])) self.assertEqual([], f.clean(())) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, ['3']) def test_multiplechoicefield_3(self): f = MultipleChoiceField(choices=[('Numbers', (('1', 'One'), ('2', 'Two'))), ('Letters', (('3', 'A'), ('4', 'B'))), ('5', 'Other')]) self.assertEqual(['1'], f.clean([1])) self.assertEqual(['1'], f.clean(['1'])) self.assertEqual(['1', '5'], f.clean([1, 5])) self.assertEqual(['1', '5'], f.clean([1, '5'])) self.assertEqual(['1', '5'], f.clean(['1', 5])) self.assertEqual(['1', '5'], f.clean(['1', '5'])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 6 is not one of the available choices.'", f.clean, ['6']) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 6 is not one of the available choices.'", f.clean, ['1', '6']) def test_multiplechoicefield_changed(self): f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two'), ('3', 'Three')]) self.assertFalse(f.has_changed(None, None)) self.assertFalse(f.has_changed([], None)) self.assertTrue(f.has_changed(None, ['1'])) self.assertFalse(f.has_changed([1, 2], ['1', '2'])) self.assertFalse(f.has_changed([2, 1], ['1', '2'])) self.assertTrue(f.has_changed([1, 2], ['1'])) self.assertTrue(f.has_changed([1, 2], ['1', '3'])) # TypedMultipleChoiceField ############################################################ # TypedMultipleChoiceField is just like MultipleChoiceField, except that coerced types # will be returned: def test_typedmultiplechoicefield_1(self): f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int) self.assertEqual([1], f.clean(['1'])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 2 is not one of the available choices.'", f.clean, ['2']) def test_typedmultiplechoicefield_2(self): # Different coercion, same validation. f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=float) self.assertEqual([1.0], f.clean(['1'])) def test_typedmultiplechoicefield_3(self): # This can also cause weirdness: be careful (bool(-1) == True, remember) f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=bool) self.assertEqual([True], f.clean(['-1'])) def test_typedmultiplechoicefield_4(self): f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int) self.assertEqual([1, -1], f.clean(['1', '-1'])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 2 is not one of the available choices.'", f.clean, ['1', '2']) def test_typedmultiplechoicefield_5(self): # Even more weirdness: if you have a valid choice but your coercion function # can't coerce, you'll still get a validation error. Don't do this! f = TypedMultipleChoiceField(choices=[('A', 'A'), ('B', 'B')], coerce=int) self.assertRaisesMessage(ValidationError, "'Select a valid choice. B is not one of the available choices.'", f.clean, ['B']) # Required fields require values self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, []) def test_typedmultiplechoicefield_6(self): # Non-required fields aren't required f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False) self.assertEqual([], f.clean([])) def test_typedmultiplechoicefield_7(self): # If you want cleaning an empty value to return a different type, tell the field f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False, empty_value=None) self.assertIsNone(f.clean([])) def test_typedmultiplechoicefield_has_changed(self): # has_changed should not trigger required validation f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=True) self.assertFalse(f.has_changed(None, '')) def test_typedmultiplechoicefield_special_coerce(self): """ Test a coerce function which results in a value not present in choices. Refs #21397. """ def coerce_func(val): return Decimal('1.%s' % val) f = TypedMultipleChoiceField( choices=[(1, "1"), (2, "2")], coerce=coerce_func, required=True) self.assertEqual([Decimal('1.2')], f.clean(['2'])) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, []) self.assertRaisesMessage(ValidationError, "'Select a valid choice. 3 is not one of the available choices.'", f.clean, ['3']) # ComboField ################################################################## def test_combofield_1(self): f = ComboField(fields=[CharField(max_length=20), EmailField()]) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 28).'", f.clean, '[email protected]') self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'not an email') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) def test_combofield_2(self): f = ComboField(fields=[CharField(max_length=20), EmailField()], required=False) self.assertEqual('[email protected]', f.clean('[email protected]')) self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 28).'", f.clean, '[email protected]') self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'not an email') self.assertEqual('', f.clean('')) self.assertEqual('', f.clean(None)) # FilePathField ############################################################### def test_filepathfield_1(self): path = os.path.abspath(upath(forms.__file__)) path = os.path.dirname(path) + '/' self.assertTrue(fix_os_paths(path).endswith('/django/forms/')) def test_filepathfield_2(self): path = upath(forms.__file__) path = os.path.dirname(os.path.abspath(path)) + '/' f = FilePathField(path=path) f.choices = [p for p in f.choices if p[0].endswith('.py')] f.choices.sort() expected = [ ('/django/forms/__init__.py', '__init__.py'), ('/django/forms/fields.py', 'fields.py'), ('/django/forms/forms.py', 'forms.py'), ('/django/forms/formsets.py', 'formsets.py'), ('/django/forms/models.py', 'models.py'), ('/django/forms/utils.py', 'utils.py'), ('/django/forms/widgets.py', 'widgets.py') ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) self.assertRaisesMessage(ValidationError, "'Select a valid choice. fields.py is not one of the available choices.'", f.clean, 'fields.py') assert fix_os_paths(f.clean(path + 'fields.py')).endswith('/django/forms/fields.py') def test_filepathfield_3(self): path = upath(forms.__file__) path = os.path.dirname(os.path.abspath(path)) + '/' f = FilePathField(path=path, match='^.*?\.py$') f.choices.sort() expected = [ ('/django/forms/__init__.py', '__init__.py'), ('/django/forms/fields.py', 'fields.py'), ('/django/forms/forms.py', 'forms.py'), ('/django/forms/formsets.py', 'formsets.py'), ('/django/forms/models.py', 'models.py'), ('/django/forms/utils.py', 'utils.py'), ('/django/forms/widgets.py', 'widgets.py') ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) def test_filepathfield_4(self): path = os.path.abspath(upath(forms.__file__)) path = os.path.dirname(path) + '/' f = FilePathField(path=path, recursive=True, match='^.*?\.py$') f.choices.sort() expected = [ ('/django/forms/__init__.py', '__init__.py'), ('/django/forms/extras/__init__.py', 'extras/__init__.py'), ('/django/forms/extras/widgets.py', 'extras/widgets.py'), ('/django/forms/fields.py', 'fields.py'), ('/django/forms/forms.py', 'forms.py'), ('/django/forms/formsets.py', 'formsets.py'), ('/django/forms/models.py', 'models.py'), ('/django/forms/utils.py', 'utils.py'), ('/django/forms/widgets.py', 'widgets.py') ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) def test_filepathfield_folders(self): path = os.path.dirname(upath(__file__)) + '/filepath_test_files/' f = FilePathField(path=path, allow_folders=True, allow_files=False) f.choices.sort() expected = [ ('/tests/forms_tests/tests/filepath_test_files/directory', 'directory'), ] for exp, got in zip(expected, fix_os_paths(f.choices)): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) f = FilePathField(path=path, allow_folders=True, allow_files=True) f.choices.sort() expected = [ ('/tests/forms_tests/tests/filepath_test_files/.dot-file', '.dot-file'), ('/tests/forms_tests/tests/filepath_test_files/1x1.bmp', '1x1.bmp'), ('/tests/forms_tests/tests/filepath_test_files/1x1.png', '1x1.png'), ('/tests/forms_tests/tests/filepath_test_files/directory', 'directory'), ('/tests/forms_tests/tests/filepath_test_files/fake-image.jpg', 'fake-image.jpg'), ('/tests/forms_tests/tests/filepath_test_files/real-text-file.txt', 'real-text-file.txt'), ] actual = fix_os_paths(f.choices) self.assertEqual(len(expected), len(actual)) for exp, got in zip(expected, actual): self.assertEqual(exp[1], got[1]) self.assertTrue(got[0].endswith(exp[0])) # SplitDateTimeField ########################################################## def test_splitdatetimefield_1(self): from django.forms.widgets import SplitDateTimeWidget f = SplitDateTimeField() self.assertIsInstance(f.widget, SplitDateTimeWidget) self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') six.assertRaisesRegex(self, ValidationError, "'Enter a valid date\.', u?'Enter a valid time\.'", f.clean, ['hello', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['hello', '07:30']) def test_splitdatetimefield_2(self): f = SplitDateTimeField(required=False) self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])) self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean(['2006-01-10', '07:30'])) self.assertIsNone(f.clean(None)) self.assertIsNone(f.clean('')) self.assertIsNone(f.clean([''])) self.assertIsNone(f.clean(['', ''])) self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello') six.assertRaisesRegex(self, ValidationError, "'Enter a valid date\.', u?'Enter a valid time\.'", f.clean, ['hello', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', 'there']) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['hello', '07:30']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', '']) self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10']) self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['', '07:30']) def test_splitdatetimefield_changed(self): f = SplitDateTimeField(input_date_formats=['%d/%m/%Y']) self.assertFalse(f.has_changed(['11/01/2012', '09:18:15'], ['11/01/2012', '09:18:15'])) self.assertTrue(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['2008-05-06', '12:40:00'])) self.assertFalse(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['06/05/2008', '12:40'])) self.assertTrue(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['06/05/2008', '12:41'])) # GenericIPAddressField ####################################################### def test_generic_ipaddress_invalid_arguments(self): self.assertRaises(ValueError, GenericIPAddressField, protocol="hamster") self.assertRaises(ValueError, GenericIPAddressField, protocol="ipv4", unpack_ipv4=True) def test_generic_ipaddress_as_generic(self): # The edge cases of the IPv6 validation code are not deeply tested # here, they are covered in the tests for django.utils.ipv6 f = GenericIPAddressField() self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(f.clean(' 127.0.0.1 '), '127.0.0.1') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '127.0.0.') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '1.2.3.4.5') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '256.125.1.5') self.assertEqual(f.clean(' fe80::223:6cff:fe8a:2e8a '), 'fe80::223:6cff:fe8a:2e8a') self.assertEqual(f.clean(' 2a02::223:6cff:fe8a:2e8a '), '2a02::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '12345:2:3:4') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3::4') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, 'foo::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3:4:5:6:7:8') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1:2') def test_generic_ipaddress_as_ipv4_only(self): f = GenericIPAddressField(protocol="IPv4") self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertEqual(f.clean(' 127.0.0.1 '), '127.0.0.1') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '127.0.0.') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '1.2.3.4.5') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '256.125.1.5') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, 'fe80::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '2a02::223:6cff:fe8a:2e8a') def test_generic_ipaddress_as_ipv6_only(self): f = GenericIPAddressField(protocol="IPv6") self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '') self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None) self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '127.0.0.1') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '127.0.0.') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '1.2.3.4.5') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '256.125.1.5') self.assertEqual(f.clean(' fe80::223:6cff:fe8a:2e8a '), 'fe80::223:6cff:fe8a:2e8a') self.assertEqual(f.clean(' 2a02::223:6cff:fe8a:2e8a '), '2a02::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '12345:2:3:4') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3::4') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, 'foo::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3:4:5:6:7:8') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1:2') def test_generic_ipaddress_as_generic_not_required(self): f = GenericIPAddressField(required=False) self.assertEqual(f.clean(''), '') self.assertEqual(f.clean(None), '') self.assertEqual(f.clean('127.0.0.1'), '127.0.0.1') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, 'foo') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '127.0.0.') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '1.2.3.4.5') self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '256.125.1.5') self.assertEqual(f.clean(' fe80::223:6cff:fe8a:2e8a '), 'fe80::223:6cff:fe8a:2e8a') self.assertEqual(f.clean(' 2a02::223:6cff:fe8a:2e8a '), '2a02::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '12345:2:3:4') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3::4') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, 'foo::223:6cff:fe8a:2e8a') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3:4:5:6:7:8') self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1:2') def test_generic_ipaddress_normalization(self): # Test the normalizing code f = GenericIPAddressField() self.assertEqual(f.clean(' ::ffff:0a0a:0a0a '), '::ffff:10.10.10.10') self.assertEqual(f.clean(' ::ffff:10.10.10.10 '), '::ffff:10.10.10.10') self.assertEqual(f.clean(' 2001:000:a:0000:0:fe:fe:beef '), '2001:0:a::fe:fe:beef') self.assertEqual(f.clean(' 2001::a:0000:0:fe:fe:beef '), '2001:0:a::fe:fe:beef') f = GenericIPAddressField(unpack_ipv4=True) self.assertEqual(f.clean(' ::ffff:0a0a:0a0a'), '10.10.10.10') # SlugField ################################################################### def test_slugfield_normalization(self): f = SlugField() self.assertEqual(f.clean(' aa-bb-cc '), 'aa-bb-cc') def test_slugfield_unicode_normalization(self): f = SlugField(allow_unicode=True) self.assertEqual(f.clean('a'), 'a') self.assertEqual(f.clean('1'), '1') self.assertEqual(f.clean('a1'), 'a1') self.assertEqual(f.clean('你好'), '你好') self.assertEqual(f.clean(' 你-好 '), '你-好') self.assertEqual(f.clean('ıçğüş'), 'ıçğüş') self.assertEqual(f.clean('foo-ıç-bar'), 'foo-ıç-bar') # UUIDField ################################################################### def test_uuidfield_1(self): field = UUIDField() value = field.clean('550e8400e29b41d4a716446655440000') self.assertEqual(value, uuid.UUID('550e8400e29b41d4a716446655440000')) def test_uuidfield_2(self): field = UUIDField(required=False) value = field.clean('') self.assertEqual(value, None) def test_uuidfield_3(self): field = UUIDField() with self.assertRaises(ValidationError) as cm: field.clean('550e8400') self.assertEqual(cm.exception.messages[0], 'Enter a valid UUID.') def test_uuidfield_4(self): field = UUIDField() value = field.prepare_value(uuid.UUID('550e8400e29b41d4a716446655440000')) self.assertEqual(value, '550e8400e29b41d4a716446655440000')
JPWKU/unix-agent
refs/heads/master
src/dcm/agent/tests/unit/messaging/__init__.py
51
# # Copyright (C) 2014 Dell, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
plum-umd/java-sketch
refs/heads/master
java_sk/glob2/fnmatch.py
13
"""Filename matching with shell patterns. fnmatch(FILENAME, PATTERN) matches according to the local convention. fnmatchcase(FILENAME, PATTERN) always takes case in account. The functions operate by translating the pattern into a regular expression. They cache the compiled regular expressions for speed. The function translate(PATTERN) returns a regular expression corresponding to PATTERN. (It does not compile it.) """ import os import posixpath import re try: from functools import lru_cache except ImportError: from .compat import lru_cache __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] def fnmatch(name, pat): """Test whether FILENAME matches PATTERN. Patterns are Unix shell style: * matches everything ? matches any single character [seq] matches any character in seq [!seq] matches any char not in seq An initial period in FILENAME is not special. Both FILENAME and PATTERN are first case-normalized if the operating system requires it. If you don't want this, use fnmatchcase(FILENAME, PATTERN). """ name = os.path.normcase(name) pat = os.path.normcase(pat) return fnmatchcase(name, pat) lru_cache(maxsize=256, typed=True) def _compile_pattern(pat): if isinstance(pat, bytes): pat_str = pat.decode('ISO-8859-1') res_str = translate(pat_str) res = res_str.encode('ISO-8859-1') else: res = translate(pat) return re.compile(res).match def filter(names, pat): """Return the subset of the list NAMES that match PAT.""" result = [] pat = os.path.normcase(pat) match = _compile_pattern(pat) if os.path is posixpath: # normcase on posix is NOP. Optimize it away from the loop. for name in names: m = match(name) if m: result.append((name, m.groups())) else: for name in names: m = match(os.path.normcase(name)) if m: result.append((name, m.groups())) return result def fnmatchcase(name, pat): """Test whether FILENAME matches PATTERN, including case. This is a version of fnmatch() which doesn't case-normalize its arguments. """ match = _compile_pattern(pat) return match(name) is not None def translate(pat): """Translate a shell PATTERN to a regular expression. There is no way to quote meta-characters. """ i, n = 0, len(pat) res = '' while i < n: c = pat[i] i = i+1 if c == '*': res = res + '(.*)' elif c == '?': res = res + '(.)' elif c == '[': j = i if j < n and pat[j] == '!': j = j+1 if j < n and pat[j] == ']': j = j+1 while j < n and pat[j] != ']': j = j+1 if j >= n: res = res + '\\[' else: stuff = pat[i:j].replace('\\','\\\\') i = j+1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s([%s])' % (res, stuff) else: res = res + re.escape(c) return res + '\Z(?ms)'
flashycud/timestack
refs/heads/master
django/http/multipartparser.py
87
""" Multi-part parsing for file uploads. Exposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to file upload handlers for processing. """ import cgi from django.conf import settings from django.core.exceptions import SuspiciousOperation from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_unicode from django.utils.text import unescape_entities from django.core.files.uploadhandler import StopUpload, SkipFile, StopFutureHandlers __all__ = ('MultiPartParser', 'MultiPartParserError', 'InputStreamExhausted') class MultiPartParserError(Exception): pass class InputStreamExhausted(Exception): """ No more reads are allowed from this device. """ pass RAW = "raw" FILE = "file" FIELD = "field" class MultiPartParser(object): """ A rfc2388 multipart/form-data parser. ``MultiValueDict.parse()`` reads the input stream in ``chunk_size`` chunks and returns a tuple of ``(MultiValueDict(POST), MultiValueDict(FILES))``. If """ def __init__(self, META, input_data, upload_handlers, encoding=None): """ Initialize the MultiPartParser object. :META: The standard ``META`` dictionary in Django request objects. :input_data: The raw post data, as a file-like object. :upload_handler: An UploadHandler instance that performs operations on the uploaded data. :encoding: The encoding with which to treat the incoming data. """ # # Content-Type should containt multipart and the boundary information. # content_type = META.get('HTTP_CONTENT_TYPE', META.get('CONTENT_TYPE', '')) if not content_type.startswith('multipart/'): raise MultiPartParserError('Invalid Content-Type: %s' % content_type) # Parse the header to get the boundary to split the parts. ctypes, opts = parse_header(content_type) boundary = opts.get('boundary') if not boundary or not cgi.valid_boundary(boundary): raise MultiPartParserError('Invalid boundary in multipart: %s' % boundary) # # Content-Length should contain the length of the body we are about # to receive. # try: content_length = int(META.get('HTTP_CONTENT_LENGTH', META.get('CONTENT_LENGTH',0))) except (ValueError, TypeError): # For now set it to 0; we'll try again later on down. content_length = 0 if content_length <= 0: # This means we shouldn't continue...raise an error. raise MultiPartParserError("Invalid content length: %r" % content_length) self._boundary = boundary self._input_data = input_data # For compatibility with low-level network APIs (with 32-bit integers), # the chunk size should be < 2^31, but still divisible by 4. possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size] self._chunk_size = min([2**31-4] + possible_sizes) self._meta = META self._encoding = encoding or settings.DEFAULT_CHARSET self._content_length = content_length self._upload_handlers = upload_handlers def parse(self): """ Parse the POST data and break it into a FILES MultiValueDict and a POST MultiValueDict. Returns a tuple containing the POST and FILES dictionary, respectively. """ # We have to import QueryDict down here to avoid a circular import. from django.http import QueryDict encoding = self._encoding handlers = self._upload_handlers limited_input_data = LimitBytes(self._input_data, self._content_length) # See if the handler will want to take care of the parsing. # This allows overriding everything if somebody wants it. for handler in handlers: result = handler.handle_raw_input(limited_input_data, self._meta, self._content_length, self._boundary, encoding) if result is not None: return result[0], result[1] # Create the data structures to be used later. self._post = QueryDict('', mutable=True) self._files = MultiValueDict() # Instantiate the parser and stream: stream = LazyStream(ChunkIter(limited_input_data, self._chunk_size)) # Whether or not to signal a file-completion at the beginning of the loop. old_field_name = None counters = [0] * len(handlers) try: for item_type, meta_data, field_stream in Parser(stream, self._boundary): if old_field_name: # We run this at the beginning of the next loop # since we cannot be sure a file is complete until # we hit the next boundary/part of the multipart content. self.handle_file_complete(old_field_name, counters) old_field_name = None try: disposition = meta_data['content-disposition'][1] field_name = disposition['name'].strip() except (KeyError, IndexError, AttributeError): continue transfer_encoding = meta_data.get('content-transfer-encoding') field_name = force_unicode(field_name, encoding, errors='replace') if item_type == FIELD: # This is a post field, we can just set it in the post if transfer_encoding == 'base64': raw_data = field_stream.read() try: data = str(raw_data).decode('base64') except: data = raw_data else: data = field_stream.read() self._post.appendlist(field_name, force_unicode(data, encoding, errors='replace')) elif item_type == FILE: # This is a file, use the handler... file_name = disposition.get('filename') if not file_name: continue file_name = force_unicode(file_name, encoding, errors='replace') file_name = self.IE_sanitize(unescape_entities(file_name)) content_type = meta_data.get('content-type', ('',))[0].strip() content_type_extra = meta_data.get('content-type', (0,{}))[1] if content_type_extra is None: content_type_extra = {} try: charset = content_type_extra.get('charset', None) except: charset = None try: content_length = int(meta_data.get('content-length')[0]) except (IndexError, TypeError, ValueError): content_length = None counters = [0] * len(handlers) try: for handler in handlers: try: handler.new_file(field_name, file_name, content_type, content_length, charset, content_type_extra.copy()) except StopFutureHandlers: break for chunk in field_stream: if transfer_encoding == 'base64': # We only special-case base64 transfer encoding try: chunk = str(chunk).decode('base64') except Exception, e: # Since this is only a chunk, any error is an unfixable error. raise MultiPartParserError("Could not decode base64 data: %r" % e) for i, handler in enumerate(handlers): chunk_length = len(chunk) chunk = handler.receive_data_chunk(chunk, counters[i]) counters[i] += chunk_length if chunk is None: # If the chunk received by the handler is None, then don't continue. break except SkipFile, e: # Just use up the rest of this file... exhaust(field_stream) else: # Handle file upload completions on next iteration. old_field_name = field_name else: # If this is neither a FIELD or a FILE, just exhaust the stream. exhaust(stream) except StopUpload, e: if not e.connection_reset: exhaust(limited_input_data) else: # Make sure that the request data is all fed exhaust(limited_input_data) # Signal that the upload has completed. for handler in handlers: retval = handler.upload_complete() if retval: break return self._post, self._files def handle_file_complete(self, old_field_name, counters): """ Handle all the signalling that takes place when a file is complete. """ for i, handler in enumerate(self._upload_handlers): file_obj = handler.file_complete(counters[i]) if file_obj: # If it returns a file object, then set the files dict. self._files.appendlist(force_unicode(old_field_name, self._encoding, errors='replace'), file_obj) break def IE_sanitize(self, filename): """Cleanup filename from Internet Explorer full paths.""" return filename and filename[filename.rfind("\\")+1:].strip() class LazyStream(object): """ The LazyStream wrapper allows one to get and "unget" bytes from a stream. Given a producer object (an iterator that yields bytestrings), the LazyStream object will support iteration, reading, and keeping a "look-back" variable in case you need to "unget" some bytes. """ def __init__(self, producer, length=None): """ Every LazyStream must have a producer when instantiated. A producer is an iterable that returns a string each time it is called. """ self._producer = producer self._empty = False self._leftover = '' self.length = length self.position = 0 self._remaining = length self._unget_history = [] def tell(self): return self.position def read(self, size=None): def parts(): remaining = (size is not None and [size] or [self._remaining])[0] # do the whole thing in one shot if no limit was provided. if remaining is None: yield ''.join(self) return # otherwise do some bookkeeping to return exactly enough # of the stream and stashing any extra content we get from # the producer while remaining != 0: assert remaining > 0, 'remaining bytes to read should never go negative' chunk = self.next() emitting = chunk[:remaining] self.unget(chunk[remaining:]) remaining -= len(emitting) yield emitting out = ''.join(parts()) return out def next(self): """ Used when the exact number of bytes to read is unimportant. This procedure just returns whatever is chunk is conveniently returned from the iterator instead. Useful to avoid unnecessary bookkeeping if performance is an issue. """ if self._leftover: output = self._leftover self._leftover = '' else: output = self._producer.next() self._unget_history = [] self.position += len(output) return output def close(self): """ Used to invalidate/disable this lazy stream. Replaces the producer with an empty list. Any leftover bytes that have already been read will still be reported upon read() and/or next(). """ self._producer = [] def __iter__(self): return self def unget(self, bytes): """ Places bytes back onto the front of the lazy stream. Future calls to read() will return those bytes first. The stream position and thus tell() will be rewound. """ if not bytes: return self._update_unget_history(len(bytes)) self.position -= len(bytes) self._leftover = ''.join([bytes, self._leftover]) def _update_unget_history(self, num_bytes): """ Updates the unget history as a sanity check to see if we've pushed back the same number of bytes in one chunk. If we keep ungetting the same number of bytes many times (here, 50), we're mostly likely in an infinite loop of some sort. This is usually caused by a maliciously-malformed MIME request. """ self._unget_history = [num_bytes] + self._unget_history[:49] number_equal = len([current_number for current_number in self._unget_history if current_number == num_bytes]) if number_equal > 40: raise SuspiciousOperation( "The multipart parser got stuck, which shouldn't happen with" " normal uploaded files. Check for malicious upload activity;" " if there is none, report this to the Django developers." ) class ChunkIter(object): """ An iterable that will yield chunks of data. Given a file-like object as the constructor, this object will yield chunks of read operations from that object. """ def __init__(self, flo, chunk_size=64 * 1024): self.flo = flo self.chunk_size = chunk_size def next(self): try: data = self.flo.read(self.chunk_size) except InputStreamExhausted: raise StopIteration() if data: return data else: raise StopIteration() def __iter__(self): return self class LimitBytes(object): """ Limit bytes for a file object. """ def __init__(self, fileobject, length): self._file = fileobject self.remaining = length def read(self, num_bytes=None): """ Read data from the underlying file. If you ask for too much or there isn't anything left, this will raise an InputStreamExhausted error. """ if self.remaining <= 0: raise InputStreamExhausted() if num_bytes is None: num_bytes = self.remaining else: num_bytes = min(num_bytes, self.remaining) self.remaining -= num_bytes return self._file.read(num_bytes) class InterBoundaryIter(object): """ A Producer that will iterate over boundaries. """ def __init__(self, stream, boundary): self._stream = stream self._boundary = boundary def __iter__(self): return self def next(self): try: return LazyStream(BoundaryIter(self._stream, self._boundary)) except InputStreamExhausted: raise StopIteration() class BoundaryIter(object): """ A Producer that is sensitive to boundaries. Will happily yield bytes until a boundary is found. Will yield the bytes before the boundary, throw away the boundary bytes themselves, and push the post-boundary bytes back on the stream. The future calls to .next() after locating the boundary will raise a StopIteration exception. """ def __init__(self, stream, boundary): self._stream = stream self._boundary = boundary self._done = False # rollback an additional six bytes because the format is like # this: CRLF<boundary>[--CRLF] self._rollback = len(boundary) + 6 # Try to use mx fast string search if available. Otherwise # use Python find. Wrap the latter for consistency. unused_char = self._stream.read(1) if not unused_char: raise InputStreamExhausted() self._stream.unget(unused_char) try: from mx.TextTools import FS self._fs = FS(boundary).find except ImportError: self._fs = lambda data: data.find(boundary) def __iter__(self): return self def next(self): if self._done: raise StopIteration() stream = self._stream rollback = self._rollback bytes_read = 0 chunks = [] for bytes in stream: bytes_read += len(bytes) chunks.append(bytes) if bytes_read > rollback: break if not bytes: break else: self._done = True if not chunks: raise StopIteration() chunk = ''.join(chunks) boundary = self._find_boundary(chunk, len(chunk) < self._rollback) if boundary: end, next = boundary stream.unget(chunk[next:]) self._done = True return chunk[:end] else: # make sure we dont treat a partial boundary (and # its separators) as data if not chunk[:-rollback]:# and len(chunk) >= (len(self._boundary) + 6): # There's nothing left, we should just return and mark as done. self._done = True return chunk else: stream.unget(chunk[-rollback:]) return chunk[:-rollback] def _find_boundary(self, data, eof = False): """ Finds a multipart boundary in data. Should no boundry exist in the data None is returned instead. Otherwise a tuple containing the indices of the following are returned: * the end of current encapsulation * the start of the next encapsulation """ index = self._fs(data) if index < 0: return None else: end = index next = index + len(self._boundary) # backup over CRLF if data[max(0,end-1)] == '\n': end -= 1 if data[max(0,end-1)] == '\r': end -= 1 return end, next def exhaust(stream_or_iterable): """ Completely exhausts an iterator or stream. Raise a MultiPartParserError if the argument is not a stream or an iterable. """ iterator = None try: iterator = iter(stream_or_iterable) except TypeError: iterator = ChunkIter(stream_or_iterable, 16384) if iterator is None: raise MultiPartParserError('multipartparser.exhaust() was passed a non-iterable or stream parameter') for __ in iterator: pass def parse_boundary_stream(stream, max_header_size): """ Parses one and exactly one stream that encapsulates a boundary. """ # Stream at beginning of header, look for end of header # and parse it if found. The header must fit within one # chunk. chunk = stream.read(max_header_size) # 'find' returns the top of these four bytes, so we'll # need to munch them later to prevent them from polluting # the payload. header_end = chunk.find('\r\n\r\n') def _parse_header(line): main_value_pair, params = parse_header(line) try: name, value = main_value_pair.split(':', 1) except: raise ValueError("Invalid header: %r" % line) return name, (value, params) if header_end == -1: # we find no header, so we just mark this fact and pass on # the stream verbatim stream.unget(chunk) return (RAW, {}, stream) header = chunk[:header_end] # here we place any excess chunk back onto the stream, as # well as throwing away the CRLFCRLF bytes from above. stream.unget(chunk[header_end + 4:]) TYPE = RAW outdict = {} # Eliminate blank lines for line in header.split('\r\n'): # This terminology ("main value" and "dictionary of # parameters") is from the Python docs. try: name, (value, params) = _parse_header(line) except: continue if name == 'content-disposition': TYPE = FIELD if params.get('filename'): TYPE = FILE outdict[name] = value, params if TYPE == RAW: stream.unget(chunk) return (TYPE, outdict, stream) class Parser(object): def __init__(self, stream, boundary): self._stream = stream self._separator = '--' + boundary def __iter__(self): boundarystream = InterBoundaryIter(self._stream, self._separator) for sub_stream in boundarystream: # Iterate over each part yield parse_boundary_stream(sub_stream, 1024) def parse_header(line): """ Parse the header into a key-value. """ plist = _parse_header_params(';' + line) key = plist.pop(0).lower() pdict = {} for p in plist: i = p.find('=') if i >= 0: name = p[:i].strip().lower() value = p[i+1:].strip() if len(value) >= 2 and value[0] == value[-1] == '"': value = value[1:-1] value = value.replace('\\\\', '\\').replace('\\"', '"') pdict[name] = value return key, pdict def _parse_header_params(s): plist = [] while s[:1] == ';': s = s[1:] end = s.find(';') while end > 0 and s.count('"', 0, end) % 2: end = s.find(';', end + 1) if end < 0: end = len(s) f = s[:end] plist.append(f.strip()) s = s[end:] return plist
seberg/numpy
refs/heads/ufunc-refactor-2021-normal
numpy/f2py/tests/test_module_doc.py
10
import os import sys import pytest import textwrap from . import util from numpy.testing import assert_equal, IS_PYPY def _path(*a): return os.path.join(*((os.path.dirname(__file__),) + a)) class TestModuleDocString(util.F2PyTest): sources = [_path('src', 'module_data', 'module_data_docstring.f90')] @pytest.mark.skipif(sys.platform=='win32', reason='Fails with MinGW64 Gfortran (Issue #9673)') @pytest.mark.xfail(IS_PYPY, reason="PyPy cannot modify tp_doc after PyType_Ready") def test_module_docstring(self): assert_equal(self.module.mod.__doc__, textwrap.dedent('''\ i : 'i'-scalar x : 'i'-array(4) a : 'f'-array(2,3) b : 'f'-array(-1,-1), not allocated\x00 foo()\n Wrapper for ``foo``.\n\n''') )
richtier/alexa-browser-client
refs/heads/master
alexa_browser_client/config/routing.py
1
from channels.routing import ProtocolTypeRouter from channels.sessions import SessionMiddlewareStack from alexa_browser_client import consumers application = ProtocolTypeRouter({ 'websocket': SessionMiddlewareStack(consumers.AlexaConsumer) })
olasitarska/django
refs/heads/master
django/conf/locale/hi/formats.py
619
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j F Y' TIME_FORMAT = 'g:i A' # DATETIME_FORMAT = # YEAR_MONTH_FORMAT = MONTH_DAY_FORMAT = 'j F' SHORT_DATE_FORMAT = 'd-m-Y' # SHORT_DATETIME_FORMAT = # FIRST_DAY_OF_WEEK = # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior # DATE_INPUT_FORMATS = # TIME_INPUT_FORMATS = # DATETIME_INPUT_FORMATS = DECIMAL_SEPARATOR = '.' THOUSAND_SEPARATOR = ',' # NUMBER_GROUPING =
wfxiang08/django190
refs/heads/master
tests/null_queries/tests.py
290
from __future__ import unicode_literals from django.core.exceptions import FieldError from django.test import TestCase from .models import Choice, Inner, OuterA, OuterB, Poll class NullQueriesTests(TestCase): def test_none_as_null(self): """ Regression test for the use of None as a query value. None is interpreted as an SQL NULL, but only in __exact and __iexact queries. Set up some initial polls and choices """ p1 = Poll(question='Why?') p1.save() c1 = Choice(poll=p1, choice='Because.') c1.save() c2 = Choice(poll=p1, choice='Why Not?') c2.save() # Exact query with value None returns nothing ("is NULL" in sql, # but every 'id' field has a value). self.assertQuerysetEqual(Choice.objects.filter(choice__exact=None), []) # The same behavior for iexact query. self.assertQuerysetEqual(Choice.objects.filter(choice__iexact=None), []) # Excluding the previous result returns everything. self.assertQuerysetEqual( Choice.objects.exclude(choice=None).order_by('id'), [ '<Choice: Choice: Because. in poll Q: Why? >', '<Choice: Choice: Why Not? in poll Q: Why? >' ] ) # Valid query, but fails because foo isn't a keyword self.assertRaises(FieldError, Choice.objects.filter, foo__exact=None) # Can't use None on anything other than __exact and __iexact self.assertRaises(ValueError, Choice.objects.filter, id__gt=None) # Related managers use __exact=None implicitly if the object hasn't been saved. p2 = Poll(question="How?") self.assertEqual(repr(p2.choice_set.all()), '[]') def test_reverse_relations(self): """ Querying across reverse relations and then another relation should insert outer joins correctly so as not to exclude results. """ obj = OuterA.objects.create() self.assertQuerysetEqual( OuterA.objects.filter(inner__third=None), ['<OuterA: OuterA object>'] ) self.assertQuerysetEqual( OuterA.objects.filter(inner__third__data=None), ['<OuterA: OuterA object>'] ) Inner.objects.create(first=obj) self.assertQuerysetEqual( Inner.objects.filter(first__inner__third=None), ['<Inner: Inner object>'] ) # Ticket #13815: check if <reverse>_isnull=False does not produce # faulty empty lists OuterB.objects.create(data="reverse") self.assertQuerysetEqual( OuterB.objects.filter(inner__isnull=False), [] ) Inner.objects.create(first=obj) self.assertQuerysetEqual( OuterB.objects.exclude(inner__isnull=False), ['<OuterB: OuterB object>'] )
mfwarren/FreeCoding
refs/heads/master
2014/10/fc_2014_10_14.py
1
#!/usr/bin/env python #imports go here import datetime import smtplib from email.mime.text import MIMEText from github import Github # # Free Coding session for 2014-10-14 # Written by Matt Warren # EMAIL = '[email protected]' hub = Github() hub_user = hub.get_user('mfwarren') event = hub_user.get_public_events()[0] # the most recent public event last_event_time = event.created_at last_event_time = last_event_time + datetime.timedelta(hours=-6) # GMT To Mountain Time today = datetime.datetime.now() + datetime.timedelta(hours=-6) if last_event_time.day != today.day: msg = MIMEText("You haven't made any commits to GitHub yet today!") msg['Subject'] = "GITHUB ALERT!" msg['From'] = EMAIL msg['To'] = EMAIL s = smtplib.SMTP('localhost') s.sendmail(EMAIL, [EMAIL], msg.as_string()) s.quit()
savoirfairelinux/shinken
refs/heads/master
shinken/easter.py
17
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2009-2014: # Gabes Jean, [email protected] # Gerhard Lausser, [email protected] # Gregory Starck, [email protected] # Hartmut Goebel, [email protected] # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. from shinken.log import logger def episode_iv(): hst = 'towel.blinkenlights.nl' from telnetlib import Telnet t = Telnet(hst) while True: buf = t.read_until('mesfesses', 0.1) logger.info(buf) def perdu(): import urllib f = urllib.urlopen("http://www.perdu.com") logger.info(f.read()) def myip(): import urllib f = urllib.urlopen("http://whatismyip.org/") logger.info(f.read()) def naheulbeuk(): import os import urllib2 from cStringIO import StringIO from PIL import Image import aalib if os.getenv('TERM') == 'linux': screen = aalib.LinuxScreen else: screen = aalib.AnsiScreen screen = screen(width=128, height=128) fp = StringIO(urllib2.urlopen( 'http://www.penofchaos.com/warham/bd/images/NBK-win7portrait-Nain02.JPG').read()) image = Image.open(fp).convert('L').resize(screen.virtual_size) screen.put_image((0, 0), image) logger.info(screen.render()) def what_it_make_me_think(subject): import hashlib if hashlib.md5(subject.lower()).hexdigest() == '6376e9755f8047391621b577ae03966a': print "Thanks to %s now I feel like this: https://youtu.be/efTZslkr5Fs?t=60" % subject def dark(): r""" .-. |_:_| /(_Y_)\ ( \/M\/ ) '. _.'-/'-'\-'._ ': _/.--'[[[[]'--.\_ ': /_' : |::"| : '.\ ': // ./ |oUU| \.' :\ ': _:'..' \_|___|_/ : :| ':. .' |_[___]_| :.':\ [::\ | : | | : ; : \ '-' \/'.| |.' \ .;.' | |\_ \ '-' : | | \ \ .: : | | | \ | '. : \ | / \ :. .; | / | | :__/ : \\ | | | \: | \ | || / \ : : |: / |__| /| snd | : : :_/_| /'._\ '--|_\ /___.-/_|-' \ \ '-' """ logger.info(dark.__doc__) def get_coffee(): r""" ( ) ( ___...(-------)-....___ .-"" ) ( ""-. .-'``'|-._ ) _.-| / .--.| `""---...........---""` | / / | | | | | | \ \ | | `\ `\ | | `\ `| | _/ /\ / (__/ \ / _..---""` \ /`""---.._ .-' \ / '-. : `-.__ __.-' : : ) ""---...---"" ( : '._ `"--...___...--"` _.' jgs \""--..__ __..--""/ '._ "'"----.....______.....----"'" _.' `""--..,,_____ _____,,..--""` `"'"----"'"` """ logger.info(get_coffee.__doc__)
yannrouillard/weboob
refs/heads/master
modules/apivie/pages.py
2
# -*- coding: utf-8 -*- # Copyright(C) 2013 Romain Bignon # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. from decimal import Decimal from weboob.capabilities.bank import Account from weboob.tools.browser import BasePage from weboob.tools.capabilities.bank.transactions import FrenchTransaction __all__ = ['LoginPage', 'AccountsPage', 'OperationsPage'] class LoginPage(BasePage): def login(self, username, password): self.browser.select_form(nr=0) self.browser['_58_login'] = username.encode('utf-8') self.browser['_58_password'] = password.encode('utf-8') self.browser.submit(nologin=True) class AccountsPage(BasePage): COL_LABEL = 0 COL_OWNER = 1 COL_ID = 2 COL_AMOUNT = 3 def iter_accounts(self): for line in self.document.xpath('//table[@summary="informations contrat"]/tbody/tr'): yield self._get_account(line) def _get_account(self, line): tds = line.findall('td') account = Account() account.id = self.parser.tocleanstring(tds[self.COL_ID]) account.label = self.parser.tocleanstring(tds[self.COL_LABEL]) balance_str = self.parser.tocleanstring(tds[self.COL_AMOUNT]) account.balance = Decimal(FrenchTransaction.clean_amount(balance_str)) account.currency = account.get_currency(balance_str) return account class Transaction(FrenchTransaction): pass class OperationsPage(BasePage): COL_DATE = 0 COL_LABEL = 1 COL_AMOUNT = 2 def iter_history(self): for line in self.document.xpath('//table[@role="treegrid"]/tbody/tr'): tds = line.findall('td') operation = Transaction(int(line.attrib['data-rk'])) date = self.parser.tocleanstring(tds[self.COL_DATE]) label = self.parser.tocleanstring(tds[self.COL_LABEL]) amount = self.parser.tocleanstring(tds[self.COL_AMOUNT]) if len(amount) == 0: continue color = tds[self.COL_AMOUNT].find('span').attrib['class'] if color == 'black': continue operation.parse(date, label) operation.set_amount(amount) if color == 'red' and operation.amount > 0: operation.amount = - operation.amount yield operation
dios-game/dios
refs/heads/master
src/oslibs/google_breakpad/google-breakpad-read-only/src/tools/gyp/test/exclusion/gyptest-exclusion.py
363
#!/usr/bin/env python # Copyright (c) 2010 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies that exclusions (e.g. sources!) are respected. Excluded sources that do not exist should not prevent the build from succeeding. """ import TestGyp test = TestGyp.TestGyp() test.run_gyp('exclusion.gyp') test.build('exclusion.gyp') # executables test.built_file_must_exist('hello' + test._exe, test.EXECUTABLE, bare=True) test.pass_test()
Zex0n/django-simple-cms
refs/heads/master
shop/migrations/0030_item_variation_in_stock.py
1
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2018-09-11 09:07 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('shop', '0029_category_code1c'), ] operations = [ migrations.AddField( model_name='item_variation', name='in_stock', field=models.IntegerField(default=0, verbose_name='Количество на складе (шт.)'), ), ]
eoogbe/api-client-staging
refs/heads/master
generated/python/proto-google-cloud-vision-v1/google/cloud/proto/vision/v1/text_annotation_pb2.py
8
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/proto/vision/v1/text_annotation.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.cloud.proto.vision.v1 import geometry_pb2 as google_dot_cloud_dot_proto_dot_vision_dot_v1_dot_geometry__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='google/cloud/proto/vision/v1/text_annotation.proto', package='google.cloud.vision.v1', syntax='proto3', serialized_pb=_b('\n2google/cloud/proto/vision/v1/text_annotation.proto\x12\x16google.cloud.vision.v1\x1a\x1cgoogle/api/annotations.proto\x1a+google/cloud/proto/vision/v1/geometry.proto\"\x96\x04\n\x0eTextAnnotation\x12+\n\x05pages\x18\x01 \x03(\x0b\x32\x1c.google.cloud.vision.v1.Page\x12\x0c\n\x04text\x18\x02 \x01(\t\x1a=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x1a\xd5\x01\n\rDetectedBreak\x12L\n\x04type\x18\x01 \x01(\x0e\x32>.google.cloud.vision.v1.TextAnnotation.DetectedBreak.BreakType\x12\x11\n\tis_prefix\x18\x02 \x01(\x08\"c\n\tBreakType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05SPACE\x10\x01\x12\x0e\n\nSURE_SPACE\x10\x02\x12\x12\n\x0e\x45OL_SURE_SPACE\x10\x03\x12\n\n\x06HYPHEN\x10\x04\x12\x0e\n\nLINE_BREAK\x10\x05\x1a\xb1\x01\n\x0cTextProperty\x12S\n\x12\x64\x65tected_languages\x18\x01 \x03(\x0b\x32\x37.google.cloud.vision.v1.TextAnnotation.DetectedLanguage\x12L\n\x0e\x64\x65tected_break\x18\x02 \x01(\x0b\x32\x34.google.cloud.vision.v1.TextAnnotation.DetectedBreak\"\x9b\x01\n\x04Page\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12\r\n\x05width\x18\x02 \x01(\x05\x12\x0e\n\x06height\x18\x03 \x01(\x05\x12-\n\x06\x62locks\x18\x04 \x03(\x0b\x32\x1d.google.cloud.vision.v1.Block\"\xd2\x02\n\x05\x42lock\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x35\n\nparagraphs\x18\x03 \x03(\x0b\x32!.google.cloud.vision.v1.Paragraph\x12;\n\nblock_type\x18\x04 \x01(\x0e\x32\'.google.cloud.vision.v1.Block.BlockType\"R\n\tBlockType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04TEXT\x10\x01\x12\t\n\x05TABLE\x10\x02\x12\x0b\n\x07PICTURE\x10\x03\x12\t\n\x05RULER\x10\x04\x12\x0b\n\x07\x42\x41RCODE\x10\x05\"\xbb\x01\n\tParagraph\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12+\n\x05words\x18\x03 \x03(\x0b\x32\x1c.google.cloud.vision.v1.Word\"\xba\x01\n\x04Word\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12/\n\x07symbols\x18\x03 \x03(\x0b\x32\x1e.google.cloud.vision.v1.Symbol\"\x99\x01\n\x06Symbol\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x0c\n\x04text\x18\x03 \x01(\tBt\n\x1a\x63om.google.cloud.vision.v1B\x13TextAnnotationProtoP\x01Z<google.golang.org/genproto/googleapis/cloud/vision/v1;vision\xf8\x01\x01\x62\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_proto_dot_vision_dot_v1_dot_geometry__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _TEXTANNOTATION_DETECTEDBREAK_BREAKTYPE = _descriptor.EnumDescriptor( name='BreakType', full_name='google.cloud.vision.v1.TextAnnotation.DetectedBreak.BreakType', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='UNKNOWN', index=0, number=0, options=None, type=None), _descriptor.EnumValueDescriptor( name='SPACE', index=1, number=1, options=None, type=None), _descriptor.EnumValueDescriptor( name='SURE_SPACE', index=2, number=2, options=None, type=None), _descriptor.EnumValueDescriptor( name='EOL_SURE_SPACE', index=3, number=3, options=None, type=None), _descriptor.EnumValueDescriptor( name='HYPHEN', index=4, number=4, options=None, type=None), _descriptor.EnumValueDescriptor( name='LINE_BREAK', index=5, number=5, options=None, type=None), ], containing_type=None, options=None, serialized_start=409, serialized_end=508, ) _sym_db.RegisterEnumDescriptor(_TEXTANNOTATION_DETECTEDBREAK_BREAKTYPE) _BLOCK_BLOCKTYPE = _descriptor.EnumDescriptor( name='BlockType', full_name='google.cloud.vision.v1.Block.BlockType', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='UNKNOWN', index=0, number=0, options=None, type=None), _descriptor.EnumValueDescriptor( name='TEXT', index=1, number=1, options=None, type=None), _descriptor.EnumValueDescriptor( name='TABLE', index=2, number=2, options=None, type=None), _descriptor.EnumValueDescriptor( name='PICTURE', index=3, number=3, options=None, type=None), _descriptor.EnumValueDescriptor( name='RULER', index=4, number=4, options=None, type=None), _descriptor.EnumValueDescriptor( name='BARCODE', index=5, number=5, options=None, type=None), ], containing_type=None, options=None, serialized_start=1105, serialized_end=1187, ) _sym_db.RegisterEnumDescriptor(_BLOCK_BLOCKTYPE) _TEXTANNOTATION_DETECTEDLANGUAGE = _descriptor.Descriptor( name='DetectedLanguage', full_name='google.cloud.vision.v1.TextAnnotation.DetectedLanguage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='language_code', full_name='google.cloud.vision.v1.TextAnnotation.DetectedLanguage.language_code', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='confidence', full_name='google.cloud.vision.v1.TextAnnotation.DetectedLanguage.confidence', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=231, serialized_end=292, ) _TEXTANNOTATION_DETECTEDBREAK = _descriptor.Descriptor( name='DetectedBreak', full_name='google.cloud.vision.v1.TextAnnotation.DetectedBreak', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='type', full_name='google.cloud.vision.v1.TextAnnotation.DetectedBreak.type', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='is_prefix', full_name='google.cloud.vision.v1.TextAnnotation.DetectedBreak.is_prefix', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ _TEXTANNOTATION_DETECTEDBREAK_BREAKTYPE, ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=295, serialized_end=508, ) _TEXTANNOTATION_TEXTPROPERTY = _descriptor.Descriptor( name='TextProperty', full_name='google.cloud.vision.v1.TextAnnotation.TextProperty', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='detected_languages', full_name='google.cloud.vision.v1.TextAnnotation.TextProperty.detected_languages', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='detected_break', full_name='google.cloud.vision.v1.TextAnnotation.TextProperty.detected_break', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=511, serialized_end=688, ) _TEXTANNOTATION = _descriptor.Descriptor( name='TextAnnotation', full_name='google.cloud.vision.v1.TextAnnotation', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='pages', full_name='google.cloud.vision.v1.TextAnnotation.pages', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='text', full_name='google.cloud.vision.v1.TextAnnotation.text', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[_TEXTANNOTATION_DETECTEDLANGUAGE, _TEXTANNOTATION_DETECTEDBREAK, _TEXTANNOTATION_TEXTPROPERTY, ], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=154, serialized_end=688, ) _PAGE = _descriptor.Descriptor( name='Page', full_name='google.cloud.vision.v1.Page', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='property', full_name='google.cloud.vision.v1.Page.property', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='width', full_name='google.cloud.vision.v1.Page.width', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='height', full_name='google.cloud.vision.v1.Page.height', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='blocks', full_name='google.cloud.vision.v1.Page.blocks', index=3, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=691, serialized_end=846, ) _BLOCK = _descriptor.Descriptor( name='Block', full_name='google.cloud.vision.v1.Block', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='property', full_name='google.cloud.vision.v1.Block.property', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='bounding_box', full_name='google.cloud.vision.v1.Block.bounding_box', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='paragraphs', full_name='google.cloud.vision.v1.Block.paragraphs', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='block_type', full_name='google.cloud.vision.v1.Block.block_type', index=3, number=4, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ _BLOCK_BLOCKTYPE, ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=849, serialized_end=1187, ) _PARAGRAPH = _descriptor.Descriptor( name='Paragraph', full_name='google.cloud.vision.v1.Paragraph', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='property', full_name='google.cloud.vision.v1.Paragraph.property', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='bounding_box', full_name='google.cloud.vision.v1.Paragraph.bounding_box', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='words', full_name='google.cloud.vision.v1.Paragraph.words', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1190, serialized_end=1377, ) _WORD = _descriptor.Descriptor( name='Word', full_name='google.cloud.vision.v1.Word', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='property', full_name='google.cloud.vision.v1.Word.property', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='bounding_box', full_name='google.cloud.vision.v1.Word.bounding_box', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='symbols', full_name='google.cloud.vision.v1.Word.symbols', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1380, serialized_end=1566, ) _SYMBOL = _descriptor.Descriptor( name='Symbol', full_name='google.cloud.vision.v1.Symbol', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='property', full_name='google.cloud.vision.v1.Symbol.property', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='bounding_box', full_name='google.cloud.vision.v1.Symbol.bounding_box', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='text', full_name='google.cloud.vision.v1.Symbol.text', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1569, serialized_end=1722, ) _TEXTANNOTATION_DETECTEDLANGUAGE.containing_type = _TEXTANNOTATION _TEXTANNOTATION_DETECTEDBREAK.fields_by_name['type'].enum_type = _TEXTANNOTATION_DETECTEDBREAK_BREAKTYPE _TEXTANNOTATION_DETECTEDBREAK.containing_type = _TEXTANNOTATION _TEXTANNOTATION_DETECTEDBREAK_BREAKTYPE.containing_type = _TEXTANNOTATION_DETECTEDBREAK _TEXTANNOTATION_TEXTPROPERTY.fields_by_name['detected_languages'].message_type = _TEXTANNOTATION_DETECTEDLANGUAGE _TEXTANNOTATION_TEXTPROPERTY.fields_by_name['detected_break'].message_type = _TEXTANNOTATION_DETECTEDBREAK _TEXTANNOTATION_TEXTPROPERTY.containing_type = _TEXTANNOTATION _TEXTANNOTATION.fields_by_name['pages'].message_type = _PAGE _PAGE.fields_by_name['property'].message_type = _TEXTANNOTATION_TEXTPROPERTY _PAGE.fields_by_name['blocks'].message_type = _BLOCK _BLOCK.fields_by_name['property'].message_type = _TEXTANNOTATION_TEXTPROPERTY _BLOCK.fields_by_name['bounding_box'].message_type = google_dot_cloud_dot_proto_dot_vision_dot_v1_dot_geometry__pb2._BOUNDINGPOLY _BLOCK.fields_by_name['paragraphs'].message_type = _PARAGRAPH _BLOCK.fields_by_name['block_type'].enum_type = _BLOCK_BLOCKTYPE _BLOCK_BLOCKTYPE.containing_type = _BLOCK _PARAGRAPH.fields_by_name['property'].message_type = _TEXTANNOTATION_TEXTPROPERTY _PARAGRAPH.fields_by_name['bounding_box'].message_type = google_dot_cloud_dot_proto_dot_vision_dot_v1_dot_geometry__pb2._BOUNDINGPOLY _PARAGRAPH.fields_by_name['words'].message_type = _WORD _WORD.fields_by_name['property'].message_type = _TEXTANNOTATION_TEXTPROPERTY _WORD.fields_by_name['bounding_box'].message_type = google_dot_cloud_dot_proto_dot_vision_dot_v1_dot_geometry__pb2._BOUNDINGPOLY _WORD.fields_by_name['symbols'].message_type = _SYMBOL _SYMBOL.fields_by_name['property'].message_type = _TEXTANNOTATION_TEXTPROPERTY _SYMBOL.fields_by_name['bounding_box'].message_type = google_dot_cloud_dot_proto_dot_vision_dot_v1_dot_geometry__pb2._BOUNDINGPOLY DESCRIPTOR.message_types_by_name['TextAnnotation'] = _TEXTANNOTATION DESCRIPTOR.message_types_by_name['Page'] = _PAGE DESCRIPTOR.message_types_by_name['Block'] = _BLOCK DESCRIPTOR.message_types_by_name['Paragraph'] = _PARAGRAPH DESCRIPTOR.message_types_by_name['Word'] = _WORD DESCRIPTOR.message_types_by_name['Symbol'] = _SYMBOL TextAnnotation = _reflection.GeneratedProtocolMessageType('TextAnnotation', (_message.Message,), dict( DetectedLanguage = _reflection.GeneratedProtocolMessageType('DetectedLanguage', (_message.Message,), dict( DESCRIPTOR = _TEXTANNOTATION_DETECTEDLANGUAGE, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.TextAnnotation.DetectedLanguage) )) , DetectedBreak = _reflection.GeneratedProtocolMessageType('DetectedBreak', (_message.Message,), dict( DESCRIPTOR = _TEXTANNOTATION_DETECTEDBREAK, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.TextAnnotation.DetectedBreak) )) , TextProperty = _reflection.GeneratedProtocolMessageType('TextProperty', (_message.Message,), dict( DESCRIPTOR = _TEXTANNOTATION_TEXTPROPERTY, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.TextAnnotation.TextProperty) )) , DESCRIPTOR = _TEXTANNOTATION, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.TextAnnotation) )) _sym_db.RegisterMessage(TextAnnotation) _sym_db.RegisterMessage(TextAnnotation.DetectedLanguage) _sym_db.RegisterMessage(TextAnnotation.DetectedBreak) _sym_db.RegisterMessage(TextAnnotation.TextProperty) Page = _reflection.GeneratedProtocolMessageType('Page', (_message.Message,), dict( DESCRIPTOR = _PAGE, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.Page) )) _sym_db.RegisterMessage(Page) Block = _reflection.GeneratedProtocolMessageType('Block', (_message.Message,), dict( DESCRIPTOR = _BLOCK, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.Block) )) _sym_db.RegisterMessage(Block) Paragraph = _reflection.GeneratedProtocolMessageType('Paragraph', (_message.Message,), dict( DESCRIPTOR = _PARAGRAPH, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.Paragraph) )) _sym_db.RegisterMessage(Paragraph) Word = _reflection.GeneratedProtocolMessageType('Word', (_message.Message,), dict( DESCRIPTOR = _WORD, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.Word) )) _sym_db.RegisterMessage(Word) Symbol = _reflection.GeneratedProtocolMessageType('Symbol', (_message.Message,), dict( DESCRIPTOR = _SYMBOL, __module__ = 'google.cloud.proto.vision.v1.text_annotation_pb2' # @@protoc_insertion_point(class_scope:google.cloud.vision.v1.Symbol) )) _sym_db.RegisterMessage(Symbol) DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032com.google.cloud.vision.v1B\023TextAnnotationProtoP\001Z<google.golang.org/genproto/googleapis/cloud/vision/v1;vision\370\001\001')) try: # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. import grpc from grpc.framework.common import cardinality from grpc.framework.interfaces.face import utilities as face_utilities from grpc.beta import implementations as beta_implementations from grpc.beta import interfaces as beta_interfaces except ImportError: pass # @@protoc_insertion_point(module_scope)
c-rhodes/hack2014
refs/heads/master
hack2014/category/tests.py
24123
from django.test import TestCase # Create your tests here.
drawcode/deployee-flask
refs/heads/master
app/main/views.py
2
from flask import Blueprint, request, render_template, flash, g, session, redirect, url_for from werkzeug import check_password_hash, generate_password_hash from app import db main = Blueprint('main', __name__) @main.route('/') def main_root(): return render_template("main.html") @main.route('/home') def main_home(): return render_template("main.html")
mavlab2015/paparazzi
refs/heads/indoor
sw/lib/python/pprz_math/__init__.py
79
__all__ = ["geodetic", "algebra"]
toshywoshy/ansible
refs/heads/devel
test/units/modules/network/fortios/test_fortios_firewall_DoS_policy.py
21
# Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <https://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest from mock import ANY from ansible.module_utils.network.fortios.fortios import FortiOSHandler try: from ansible.modules.network.fortios import fortios_firewall_DoS_policy except ImportError: pytest.skip("Could not load required modules for testing", allow_module_level=True) @pytest.fixture(autouse=True) def connection_mock(mocker): connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_firewall_DoS_policy.Connection') return connection_class_mock fos_instance = FortiOSHandler(connection_mock) def test_firewall_DoS_policy_creation(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'firewall_DoS_policy': {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_firewall_DoS_policy.fortios_firewall(input_data, fos_instance) expected_data = {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' } set_method_mock.assert_called_with('firewall', 'DoS-policy', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200 def test_firewall_DoS_policy_creation_fails(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'firewall_DoS_policy': {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_firewall_DoS_policy.fortios_firewall(input_data, fos_instance) expected_data = {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' } set_method_mock.assert_called_with('firewall', 'DoS-policy', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 500 def test_firewall_DoS_policy_removal(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result) input_data = { 'username': 'admin', 'state': 'absent', 'firewall_DoS_policy': {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_firewall_DoS_policy.fortios_firewall(input_data, fos_instance) delete_method_mock.assert_called_with('firewall', 'DoS-policy', mkey=ANY, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200 def test_firewall_DoS_policy_deletion_fails(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500} delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result) input_data = { 'username': 'admin', 'state': 'absent', 'firewall_DoS_policy': {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_firewall_DoS_policy.fortios_firewall(input_data, fos_instance) delete_method_mock.assert_called_with('firewall', 'DoS-policy', mkey=ANY, vdom='root') schema_method_mock.assert_not_called() assert is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 500 def test_firewall_DoS_policy_idempotent(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'firewall_DoS_policy': {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_firewall_DoS_policy.fortios_firewall(input_data, fos_instance) expected_data = {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' } set_method_mock.assert_called_with('firewall', 'DoS-policy', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 404 def test_firewall_DoS_policy_filter_foreign_attributes(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'firewall_DoS_policy': { 'random_attribute_not_valid': 'tag', 'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_firewall_DoS_policy.fortios_firewall(input_data, fos_instance) expected_data = {'comments': 'test_value_3', 'interface': 'test_value_4', 'policyid': '5', 'status': 'enable' } set_method_mock.assert_called_with('firewall', 'DoS-policy', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200
tedder/ansible
refs/heads/devel
test/units/modules/network/f5/test_bigip_profile_persistence_src_addr.py
21
# -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_profile_persistence_src_addr import ApiParameters from library.modules.bigip_profile_persistence_src_addr import ModuleParameters from library.modules.bigip_profile_persistence_src_addr import ModuleManager from library.modules.bigip_profile_persistence_src_addr import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.compat.mock import patch from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ApiParameters from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ModuleParameters from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ModuleManager from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.compat.mock import patch from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', parent='bar', match_across_services=False, match_across_virtuals=True, match_across_pools=False, hash_algorithm='carp', entry_timeout=100, override_connection_limit=True ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.parent == '/Common/bar' assert p.match_across_services == 'no' assert p.match_across_virtuals == 'yes' assert p.match_across_pools == 'no' assert p.hash_algorithm == 'carp' assert p.entry_timeout == 100 assert p.override_connection_limit == 'yes' def test_api_parameters(self): args = load_fixture('load_ltm_profile_persistence_src_addr_1.json') p = ApiParameters(params=args) assert p.name == 'source_addr' assert p.match_across_pools == 'no' assert p.match_across_services == 'no' assert p.match_across_virtuals == 'no' class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create(self, *args): # Configure the arguments that would be sent to the Ansible module set_module_args(dict( name='foo', match_across_virtuals='yes', parent='bar', password='password', server='localhost', user='admin' )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) mm = ModuleManager(module=module) # Override methods to force specific logic in the module to happen mm.exists = Mock(return_value=False) mm.create_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True assert results['match_across_virtuals'] == 'yes'
yakovenkodenis/rethinkdb
refs/heads/next
external/v8_3.30.33.16/tools/testrunner/local/progress.py
41
# Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import json import os import sys import time from . import junit_output ABS_PATH_PREFIX = os.getcwd() + os.sep def EscapeCommand(command): parts = [] for part in command: if ' ' in part: # Escape spaces. We may need to escape more characters for this # to work properly. parts.append('"%s"' % part) else: parts.append(part) return " ".join(parts) class ProgressIndicator(object): def __init__(self): self.runner = None def Starting(self): pass def Done(self): pass def AboutToRun(self, test): pass def HasRun(self, test, has_unexpected_output): pass def PrintFailureHeader(self, test): if test.suite.IsNegativeTest(test): negative_marker = '[negative] ' else: negative_marker = '' print "=== %(label)s %(negative)s===" % { 'label': test.GetLabel(), 'negative': negative_marker } class SimpleProgressIndicator(ProgressIndicator): """Abstract base class for {Verbose,Dots}ProgressIndicator""" def Starting(self): print 'Running %i tests' % self.runner.total def Done(self): print for failed in self.runner.failed: self.PrintFailureHeader(failed) if failed.output.stderr: print "--- stderr ---" print failed.output.stderr.strip() if failed.output.stdout: print "--- stdout ---" print failed.output.stdout.strip() print "Command: %s" % EscapeCommand(self.runner.GetCommand(failed)) if failed.output.HasCrashed(): print "exit code: %d" % failed.output.exit_code print "--- CRASHED ---" if failed.output.HasTimedOut(): print "--- TIMEOUT ---" if len(self.runner.failed) == 0: print "===" print "=== All tests succeeded" print "===" else: print print "===" print "=== %i tests failed" % len(self.runner.failed) if self.runner.crashed > 0: print "=== %i tests CRASHED" % self.runner.crashed print "===" class VerboseProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, test): print 'Starting %s...' % test.GetLabel() sys.stdout.flush() def HasRun(self, test, has_unexpected_output): if has_unexpected_output: if test.output.HasCrashed(): outcome = 'CRASH' else: outcome = 'FAIL' else: outcome = 'pass' print 'Done running %s: %s' % (test.GetLabel(), outcome) class DotsProgressIndicator(SimpleProgressIndicator): def HasRun(self, test, has_unexpected_output): total = self.runner.succeeded + len(self.runner.failed) if (total > 1) and (total % 50 == 1): sys.stdout.write('\n') if has_unexpected_output: if test.output.HasCrashed(): sys.stdout.write('C') sys.stdout.flush() elif test.output.HasTimedOut(): sys.stdout.write('T') sys.stdout.flush() else: sys.stdout.write('F') sys.stdout.flush() else: sys.stdout.write('.') sys.stdout.flush() class CompactProgressIndicator(ProgressIndicator): """Abstract base class for {Color,Monochrome}ProgressIndicator""" def __init__(self, templates): super(CompactProgressIndicator, self).__init__() self.templates = templates self.last_status_length = 0 self.start_time = time.time() def Done(self): self.PrintProgress('Done') print "" # Line break. def AboutToRun(self, test): self.PrintProgress(test.GetLabel()) def HasRun(self, test, has_unexpected_output): if has_unexpected_output: self.ClearLine(self.last_status_length) self.PrintFailureHeader(test) stdout = test.output.stdout.strip() if len(stdout): print self.templates['stdout'] % stdout stderr = test.output.stderr.strip() if len(stderr): print self.templates['stderr'] % stderr print "Command: %s" % EscapeCommand(self.runner.GetCommand(test)) if test.output.HasCrashed(): print "exit code: %d" % test.output.exit_code print "--- CRASHED ---" if test.output.HasTimedOut(): print "--- TIMEOUT ---" def Truncate(self, string, length): if length and (len(string) > (length - 3)): return string[:(length - 3)] + "..." else: return string def PrintProgress(self, name): self.ClearLine(self.last_status_length) elapsed = time.time() - self.start_time status = self.templates['status_line'] % { 'passed': self.runner.succeeded, 'remaining': (((self.runner.total - self.runner.remaining) * 100) // self.runner.total), 'failed': len(self.runner.failed), 'test': name, 'mins': int(elapsed) / 60, 'secs': int(elapsed) % 60 } status = self.Truncate(status, 78) self.last_status_length = len(status) print status, sys.stdout.flush() class ColorProgressIndicator(CompactProgressIndicator): def __init__(self): templates = { 'status_line': ("[%(mins)02i:%(secs)02i|" "\033[34m%%%(remaining) 4d\033[0m|" "\033[32m+%(passed) 4d\033[0m|" "\033[31m-%(failed) 4d\033[0m]: %(test)s"), 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } super(ColorProgressIndicator, self).__init__(templates) def ClearLine(self, last_line_length): print "\033[1K\r", class MonochromeProgressIndicator(CompactProgressIndicator): def __init__(self): templates = { 'status_line': ("[%(mins)02i:%(secs)02i|%%%(remaining) 4d|" "+%(passed) 4d|-%(failed) 4d]: %(test)s"), 'stdout': '%s', 'stderr': '%s', } super(MonochromeProgressIndicator, self).__init__(templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), class JUnitTestProgressIndicator(ProgressIndicator): def __init__(self, progress_indicator, junitout, junittestsuite): self.progress_indicator = progress_indicator self.outputter = junit_output.JUnitTestOutput(junittestsuite) if junitout: self.outfile = open(junitout, "w") else: self.outfile = sys.stdout def Starting(self): self.progress_indicator.runner = self.runner self.progress_indicator.Starting() def Done(self): self.progress_indicator.Done() self.outputter.FinishAndWrite(self.outfile) if self.outfile != sys.stdout: self.outfile.close() def AboutToRun(self, test): self.progress_indicator.AboutToRun(test) def HasRun(self, test, has_unexpected_output): self.progress_indicator.HasRun(test, has_unexpected_output) fail_text = "" if has_unexpected_output: stdout = test.output.stdout.strip() if len(stdout): fail_text += "stdout:\n%s\n" % stdout stderr = test.output.stderr.strip() if len(stderr): fail_text += "stderr:\n%s\n" % stderr fail_text += "Command: %s" % EscapeCommand(self.runner.GetCommand(test)) if test.output.HasCrashed(): fail_text += "exit code: %d\n--- CRASHED ---" % test.output.exit_code if test.output.HasTimedOut(): fail_text += "--- TIMEOUT ---" self.outputter.HasRunTest( [test.GetLabel()] + self.runner.context.mode_flags + test.flags, test.duration, fail_text) class JsonTestProgressIndicator(ProgressIndicator): def __init__(self, progress_indicator, json_test_results, arch, mode): self.progress_indicator = progress_indicator self.json_test_results = json_test_results self.arch = arch self.mode = mode self.results = [] def Starting(self): self.progress_indicator.runner = self.runner self.progress_indicator.Starting() def Done(self): self.progress_indicator.Done() complete_results = [] if os.path.exists(self.json_test_results): with open(self.json_test_results, "r") as f: # Buildbot might start out with an empty file. complete_results = json.loads(f.read() or "[]") complete_results.append({ "arch": self.arch, "mode": self.mode, "results": self.results, }) with open(self.json_test_results, "w") as f: f.write(json.dumps(complete_results)) def AboutToRun(self, test): self.progress_indicator.AboutToRun(test) def HasRun(self, test, has_unexpected_output): self.progress_indicator.HasRun(test, has_unexpected_output) if not has_unexpected_output: # Omit tests that run as expected. Passing tests of reruns after failures # will have unexpected_output to be reported here has well. return self.results.append({ "name": test.GetLabel(), "flags": test.flags, "command": EscapeCommand(self.runner.GetCommand(test)).replace( ABS_PATH_PREFIX, ""), "run": test.run, "stdout": test.output.stdout, "stderr": test.output.stderr, "exit_code": test.output.exit_code, "result": test.suite.GetOutcome(test), }) PROGRESS_INDICATORS = { 'verbose': VerboseProgressIndicator, 'dots': DotsProgressIndicator, 'color': ColorProgressIndicator, 'mono': MonochromeProgressIndicator }
pcchenxi/baseline
refs/heads/master
baselines/common/math_util.py
11
import numpy as np import scipy.signal def discount(x, gamma): """ computes discounted sums along 0th dimension of x. inputs ------ x: ndarray gamma: float outputs ------- y: ndarray with same shape as x, satisfying y[t] = x[t] + gamma*x[t+1] + gamma^2*x[t+2] + ... + gamma^k x[t+k], where k = len(x) - t - 1 """ assert x.ndim >= 1 return scipy.signal.lfilter([1],[1,-gamma],x[::-1], axis=0)[::-1] def explained_variance(ypred,y): """ Computes fraction of variance that ypred explains about y. Returns 1 - Var[y-ypred] / Var[y] interpretation: ev=0 => might as well have predicted zero ev=1 => perfect prediction ev<0 => worse than just predicting zero """ assert y.ndim == 1 and ypred.ndim == 1 vary = np.var(y) return np.nan if vary==0 else 1 - np.var(y-ypred)/vary def explained_variance_2d(ypred, y): assert y.ndim == 2 and ypred.ndim == 2 vary = np.var(y, axis=0) out = 1 - np.var(y-ypred)/vary out[vary < 1e-10] = 0 return out def ncc(ypred, y): return np.corrcoef(ypred, y)[1,0] def flatten_arrays(arrs): return np.concatenate([arr.flat for arr in arrs]) def unflatten_vector(vec, shapes): i=0 arrs = [] for shape in shapes: size = np.prod(shape) arr = vec[i:i+size].reshape(shape) arrs.append(arr) i += size return arrs def discount_with_boundaries(X, New, gamma): """ X: 2d array of floats, time x features New: 2d array of bools, indicating when a new episode has started """ Y = np.zeros_like(X) T = X.shape[0] Y[T-1] = X[T-1] for t in range(T-2, -1, -1): Y[t] = X[t] + gamma * Y[t+1] * (1 - New[t+1]) return Y def test_discount_with_boundaries(): gamma=0.9 x = np.array([1.0, 2.0, 3.0, 4.0], 'float32') starts = [1.0, 0.0, 0.0, 1.0] y = discount_with_boundaries(x, starts, gamma) assert np.allclose(y, [ 1 + gamma * 2 + gamma**2 * 3, 2 + gamma * 3, 3, 4 ])
shirayu/slex
refs/heads/master
slex/tool/singleton.py
1
#!/usr/bin/env python # -*- coding: utf-8 -*- """ http://python.g.hatena.ne.jp/nelnal_programing/20080225/1203927879 """ __author__ = '' __version__ = "" __copyright__ = "" __license__ = "" class Singleton( type ): def __init__( self, *args, **kwargs ): type.__init__( self, *args, **kwargs ) self._instances = {} def __call__( self, *args, **kwargs ): if not args in self._instances: self._instances[args] = type.__call__(self, *args) return self._instances[args]
patrickhartling/maestro
refs/heads/master
maestro/gui/environment.py
2
# Maestro is Copyright (C) 2006-2008 by Infiscape Corporation # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import socket import maestro.core.environment as env import stanzastore import connection class GuiEnvironment(env.Environment): def __init__(self): if not hasattr(self, '_init_called'): self.mViewPluginsHolder = None self.mStanzaStore = None env.Environment.__init__(self) def initialize(self, settings, opts = None, progressCB = None): env.Environment.initialize(self, settings, opts, progressCB) ip_address = socket.gethostbyname(socket.gethostname()) self.mConnectionMgr = connection.ConnectionManager(ip_address, self.mEventManager) # Create a stanza store and scan for files. self.mStanzaStore = stanzastore.StanzaStore() if opts.stanzas is not None and len(opts.stanzas) > 0: self.mStanzaStore.loadStanzas(opts.stanzas, progressCB) self.mStanzaStore.scan(progressCB) # -- Initialize the plugin holders -- # #self.mViewPluginsHolder = lucid.core.ViewPluginsHolder() #self.mViewPluginsHolder.scan()
elkingtonmcb/django
refs/heads/master
docs/_ext/djangodocs.py
321
""" Sphinx plugins for Django documentation. """ import json import os import re from docutils import nodes from docutils.parsers.rst import directives from sphinx import __version__ as sphinx_ver, addnodes from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.util.compat import Directive from sphinx.util.console import bold from sphinx.util.nodes import set_source_info from sphinx.writers.html import SmartyPantsHTMLTranslator # RE for option descriptions without a '--' prefix simple_option_desc_re = re.compile( r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)') def setup(app): app.add_crossref_type( directivename="setting", rolename="setting", indextemplate="pair: %s; setting", ) app.add_crossref_type( directivename="templatetag", rolename="ttag", indextemplate="pair: %s; template tag" ) app.add_crossref_type( directivename="templatefilter", rolename="tfilter", indextemplate="pair: %s; template filter" ) app.add_crossref_type( directivename="fieldlookup", rolename="lookup", indextemplate="pair: %s; field lookup type", ) app.add_description_unit( directivename="django-admin", rolename="djadmin", indextemplate="pair: %s; django-admin command", parse_node=parse_django_admin_node, ) app.add_description_unit( directivename="django-admin-option", rolename="djadminopt", indextemplate="pair: %s; django-admin command-line option", parse_node=parse_django_adminopt_node, ) app.add_config_value('django_next_version', '0.0', True) app.add_directive('versionadded', VersionDirective) app.add_directive('versionchanged', VersionDirective) app.add_builder(DjangoStandaloneHTMLBuilder) # register the snippet directive app.add_directive('snippet', SnippetWithFilename) # register a node for snippet directive so that the xml parser # knows how to handle the enter/exit parsing event app.add_node(snippet_with_filename, html=(visit_snippet, depart_snippet_literal), latex=(visit_snippet_latex, depart_snippet_latex), man=(visit_snippet_literal, depart_snippet_literal), text=(visit_snippet_literal, depart_snippet_literal), texinfo=(visit_snippet_literal, depart_snippet_literal)) class snippet_with_filename(nodes.literal_block): """ Subclass the literal_block to override the visit/depart event handlers """ pass def visit_snippet_literal(self, node): """ default literal block handler """ self.visit_literal_block(node) def depart_snippet_literal(self, node): """ default literal block handler """ self.depart_literal_block(node) def visit_snippet(self, node): """ HTML document generator visit handler """ lang = self.highlightlang linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1 fname = node['filename'] highlight_args = node.get('highlight_args', {}) if 'language' in node: # code-block directives lang = node['language'] highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] def warner(msg): self.builder.warn(msg, (self.builder.current_docname, node.line)) highlighted = self.highlighter.highlight_block(node.rawsource, lang, warn=warner, linenos=linenos, **highlight_args) starttag = self.starttag(node, 'div', suffix='', CLASS='highlight-%s' % lang) self.body.append(starttag) self.body.append('<div class="snippet-filename">%s</div>\n''' % (fname,)) self.body.append(highlighted) self.body.append('</div>\n') raise nodes.SkipNode def visit_snippet_latex(self, node): """ Latex document generator visit handler """ self.verbatim = '' def depart_snippet_latex(self, node): """ Latex document generator depart handler. """ code = self.verbatim.rstrip('\n') lang = self.hlsettingstack[-1][0] linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1 fname = node['filename'] highlight_args = node.get('highlight_args', {}) if 'language' in node: # code-block directives lang = node['language'] highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] def warner(msg): self.builder.warn(msg, (self.curfilestack[-1], node.line)) hlcode = self.highlighter.highlight_block(code, lang, warn=warner, linenos=linenos, **highlight_args) self.body.append('\n{\\colorbox[rgb]{0.9,0.9,0.9}' '{\\makebox[\\textwidth][l]' '{\\small\\texttt{%s}}}}\n' % (fname,)) if self.table: hlcode = hlcode.replace('\\begin{Verbatim}', '\\begin{OriginalVerbatim}') self.table.has_problematic = True self.table.has_verbatim = True hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim} hlcode = hlcode.rstrip() + '\n' self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' % (self.table and 'Original' or '')) self.verbatim = None class SnippetWithFilename(Directive): """ The 'snippet' directive that allows to add the filename (optional) of a code snippet in the document. This is modeled after CodeBlock. """ has_content = True optional_arguments = 1 option_spec = {'filename': directives.unchanged_required} def run(self): code = '\n'.join(self.content) literal = snippet_with_filename(code, code) if self.arguments: literal['language'] = self.arguments[0] literal['filename'] = self.options['filename'] set_source_info(self, literal) return [literal] class VersionDirective(Directive): has_content = True required_arguments = 1 optional_arguments = 1 final_argument_whitespace = True option_spec = {} def run(self): if len(self.arguments) > 1: msg = """Only one argument accepted for directive '{directive_name}::'. Comments should be provided as content, not as an extra argument.""".format(directive_name=self.name) raise self.error(msg) env = self.state.document.settings.env ret = [] node = addnodes.versionmodified() ret.append(node) if self.arguments[0] == env.config.django_next_version: node['version'] = "Development version" else: node['version'] = self.arguments[0] node['type'] = self.name if self.content: self.state.nested_parse(self.content, self.content_offset, node) env.note_versionchange(node['type'], node['version'], node, self.lineno) return ret class DjangoHTMLTranslator(SmartyPantsHTMLTranslator): """ Django-specific reST to HTML tweaks. """ # Don't use border=1, which docutils does by default. def visit_table(self, node): self.context.append(self.compact_p) self.compact_p = True self._table_row_index = 0 # Needed by Sphinx self.body.append(self.starttag(node, 'table', CLASS='docutils')) def depart_table(self, node): self.compact_p = self.context.pop() self.body.append('</table>\n') def visit_desc_parameterlist(self, node): self.body.append('(') # by default sphinx puts <big> around the "(" self.first_param = 1 self.optional_param_level = 0 self.param_separator = node.child_text_separator self.required_params_left = sum([isinstance(c, addnodes.desc_parameter) for c in node.children]) def depart_desc_parameterlist(self, node): self.body.append(')') if sphinx_ver < '1.0.8': # # Don't apply smartypants to literal blocks # def visit_literal_block(self, node): self.no_smarty += 1 SmartyPantsHTMLTranslator.visit_literal_block(self, node) def depart_literal_block(self, node): SmartyPantsHTMLTranslator.depart_literal_block(self, node) self.no_smarty -= 1 # # Turn the "new in version" stuff (versionadded/versionchanged) into a # better callout -- the Sphinx default is just a little span, # which is a bit less obvious that I'd like. # # FIXME: these messages are all hardcoded in English. We need to change # that to accommodate other language docs, but I can't work out how to make # that work. # version_text = { 'versionchanged': 'Changed in Django %s', 'versionadded': 'New in Django %s', } def visit_versionmodified(self, node): self.body.append( self.starttag(node, 'div', CLASS=node['type']) ) version_text = self.version_text.get(node['type']) if version_text: title = "%s%s" % ( version_text % node['version'], ":" if len(node) else "." ) self.body.append('<span class="title">%s</span> ' % title) def depart_versionmodified(self, node): self.body.append("</div>\n") # Give each section a unique ID -- nice for custom CSS hooks def visit_section(self, node): old_ids = node.get('ids', []) node['ids'] = ['s-' + i for i in old_ids] node['ids'].extend(old_ids) SmartyPantsHTMLTranslator.visit_section(self, node) node['ids'] = old_ids def parse_django_admin_node(env, sig, signode): command = sig.split(' ')[0] env._django_curr_admin_command = command title = "django-admin %s" % sig signode += addnodes.desc_name(title, title) return sig def parse_django_adminopt_node(env, sig, signode): """A copy of sphinx.directives.CmdoptionDesc.parse_signature()""" from sphinx.domains.std import option_desc_re count = 0 firstname = '' for m in option_desc_re.finditer(sig): optname, args = m.groups() if count: signode += addnodes.desc_addname(', ', ', ') signode += addnodes.desc_name(optname, optname) signode += addnodes.desc_addname(args, args) if not count: firstname = optname count += 1 if not count: for m in simple_option_desc_re.finditer(sig): optname, args = m.groups() if count: signode += addnodes.desc_addname(', ', ', ') signode += addnodes.desc_name(optname, optname) signode += addnodes.desc_addname(args, args) if not count: firstname = optname count += 1 if not firstname: raise ValueError return firstname class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder): """ Subclass to add some extra things we need. """ name = 'djangohtml' def finish(self): super(DjangoStandaloneHTMLBuilder, self).finish() self.info(bold("writing templatebuiltins.js...")) xrefs = self.env.domaindata["std"]["objects"] templatebuiltins = { "ttags": [n for ((t, n), (l, a)) in xrefs.items() if t == "templatetag" and l == "ref/templates/builtins"], "tfilters": [n for ((t, n), (l, a)) in xrefs.items() if t == "templatefilter" and l == "ref/templates/builtins"], } outfilename = os.path.join(self.outdir, "templatebuiltins.js") with open(outfilename, 'w') as fp: fp.write('var django_template_builtins = ') json.dump(templatebuiltins, fp) fp.write(';\n')
dysya92/monkeys
refs/heads/master
flask/lib/python2.7/site-packages/pbr/tests/testpackage/setup.py
139
#!/usr/bin/env python # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True, )