code
stringlengths
2
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int32
2
1.05M
from Source import Source from Components.Element import cached from Components.SystemInfo import SystemInfo from enigma import eServiceReference StreamServiceList = [] class StreamService(Source): def __init__(self, navcore): Source.__init__(self) self.ref = None self.__service = None self.navcore = navcore def serviceEvent(self, event): pass @cached def getService(self): return self.__service service = property(getService) def handleCommand(self, cmd): print "[StreamService] handle command", cmd self.ref = eServiceReference(cmd) def recordEvent(self, service, event): if service is self.__service: return print "[StreamService] RECORD event for us:", service self.changed((self.CHANGED_ALL, )) def execBegin(self): if self.ref is None: print "[StreamService] has no service ref set" return print "[StreamService]e execBegin", self.ref.toString() if SystemInfo["CanNotDoSimultaneousTranscodeAndPIP"]: from Screens.InfoBar import InfoBar if InfoBar.instance and hasattr(InfoBar.instance.session, 'pipshown') and InfoBar.instance.session.pipshown: hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP() print "[StreamService] try to disable pip before start stream" if hasattr(InfoBar.instance.session, 'pip'): del InfoBar.instance.session.pip InfoBar.instance.session.pipshown = False self.__service = self.navcore.recordService(self.ref) self.navcore.record_event.append(self.recordEvent) if self.__service is not None: if self.__service.__deref__() not in StreamServiceList: StreamServiceList.append(self.__service.__deref__()) self.__service.prepareStreaming() self.__service.start() def execEnd(self): print "[StreamService] execEnd", self.ref.toString() self.navcore.record_event.remove(self.recordEvent) if self.__service is not None: if self.__service.__deref__() in StreamServiceList: StreamServiceList.remove(self.__service.__deref__()) self.navcore.stopRecordService(self.__service) self.__service = None self.ref = None
Dima73/enigma2
lib/python/Components/Sources/StreamService.py
Python
gpl-2.0
2,074
#!/usr/bin/python # -*- coding: utf-8 -*- # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: aci_tenant_span_dst_group short_description: Manage SPAN destination groups (span:DestGrp) description: - Manage SPAN destination groups on Cisco ACI fabrics. notes: - The C(tenant) used must exist before using this module in your playbook. The M(aci_tenant) module can be used for this. - More information about the internal APIC class B(span:DestGrp) from L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/). author: - Dag Wieers (@dagwieers) version_added: '2.4' options: dst_group: description: - The name of the SPAN destination group. required: yes aliases: [ name ] description: description: - The description of the SPAN destination group. aliases: [ descr ] tenant: description: - The name of the tenant. required: yes aliases: [ tenant_name ] state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. choices: [ absent, present, query ] default: present extends_documentation_fragment: aci ''' # FIXME: Add more, better examples EXAMPLES = r''' - aci_tenant_span_dst_group: host: apic username: admin password: SomeSecretPassword dst_group: '{{ dst_group }}' description: '{{ descr }}' tenant: '{{ tenant }}' ''' RETURN = r''' current: description: The existing configuration from the APIC after the module has finished returned: success type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production environment", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] error: description: The error information as returned from the APIC returned: failure type: dict sample: { "code": "122", "text": "unknown managed object class foo" } raw: description: The raw output returned by the APIC REST API (xml or json) returned: parse error type: string sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>' sent: description: The actual/minimal configuration pushed to the APIC returned: info type: list sample: { "fvTenant": { "attributes": { "descr": "Production environment" } } } previous: description: The original configuration from the APIC before the module has started returned: info type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] proposed: description: The assembled configuration from the user-provided parameters returned: info type: dict sample: { "fvTenant": { "attributes": { "descr": "Production environment", "name": "production" } } } filter_string: description: The filter string used for the request returned: failure or debug type: string sample: ?rsp-prop-include=config-only method: description: The HTTP method used for the request to the APIC returned: failure or debug type: string sample: POST response: description: The HTTP response from the APIC returned: failure or debug type: string sample: OK (30 bytes) status: description: The HTTP status from the APIC returned: failure or debug type: int sample: 200 url: description: The HTTP url used for the request to the APIC returned: failure or debug type: string sample: https://10.11.12.13/api/mo/uni/tn-production.json ''' from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec from ansible.module_utils.basic import AnsibleModule def main(): argument_spec = aci_argument_spec() argument_spec.update( dst_group=dict(type='str', required=False, aliases=['name']), # Not required for querying all objects tenant=dict(type='str', required=False, aliases=['tenant_name']), # Not required for querying all objects description=dict(type='str', aliases=['descr']), state=dict(type='str', default='present', choices=['absent', 'present', 'query']), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['dst_group', 'tenant']], ['state', 'present', ['dst_group', 'tenant']], ], ) dst_group = module.params['dst_group'] description = module.params['description'] state = module.params['state'] tenant = module.params['tenant'] aci = ACIModule(module) aci.construct_url( root_class=dict( aci_class='fvTenant', aci_rn='tn-{0}'.format(tenant), filter_target='eq(fvTenant.name, "{0}")'.format(tenant), module_object=tenant, ), subclass_1=dict( aci_class='spanDestGrp', aci_rn='destgrp-{0}'.format(dst_group), filter_target='eq(spanDestGrp.name, "{0}")'.format(dst_group), module_object=dst_group, ), ) aci.get_existing() if state == 'present': aci.payload( aci_class='spanDestGrp', class_config=dict( name=dst_group, descr=description, ), ) aci.get_diff(aci_class='spanDestGrp') aci.post_config() elif state == 'absent': aci.delete_config() aci.exit_json() if __name__ == "__main__": main()
hryamzik/ansible
lib/ansible/modules/network/aci/aci_tenant_span_dst_group.py
Python
gpl-3.0
6,414
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models class ResConfigSettings(models.TransientModel): _inherit = "res.config.settings" google_drive_authorization_code = fields.Char(string='Authorization Code') google_drive_uri = fields.Char(compute='_compute_drive_uri', string='URI', help="The URL to generate the authorization code from Google") @api.depends('google_drive_authorization_code') def _compute_drive_uri(self): google_drive_uri = self.env['google.service']._get_google_token_uri('drive', scope=self.env['google.drive.config'].get_google_scope()) for config in self: config.google_drive_uri = google_drive_uri @api.model def get_values(self): res = super(ResConfigSettings, self).get_values() res.update( google_drive_authorization_code=self.env['ir.config_parameter'].sudo().get_param('google_drive_authorization_code'), ) return res def set_values(self): super(ResConfigSettings, self).set_values() params = self.env['ir.config_parameter'].sudo() authorization_code = self.google_drive_authorization_code refresh_token = False if authorization_code and authorization_code != params.get_param('google_drive_authorization_code'): refresh_token = self.env['google.service'].generate_refresh_token('drive', authorization_code) params.set_param('google_drive_authorization_code', authorization_code) params.set_param('google_drive_refresh_token', refresh_token)
Aravinthu/odoo
addons/google_drive/models/res_config_settings.py
Python
agpl-3.0
1,634
# Copyright 2008-2015 Nokia Solutions and Networks # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Implementation of the public test library logging API. This is exposed via :py:mod:`robot.api.logger`. Implementation must reside here to avoid cyclic imports. """ import sys import threading from robot.errors import DataError from robot.utils import unic, encode_output from .logger import LOGGER from .loggerhelper import Message LOGGING_THREADS = ('MainThread', 'RobotFrameworkTimeoutThread') def write(msg, level, html=False): # Callable messages allow lazy logging internally, but we don't want to # expose this functionality publicly. See the following issue for details: # http://code.google.com/p/robotframework/issues/detail?id=1505 if callable(msg): msg = unic(msg) if level.upper() not in ('TRACE', 'DEBUG', 'INFO', 'HTML', 'WARN', 'ERROR'): raise DataError("Invalid log level '%s'." % level) if threading.currentThread().getName() in LOGGING_THREADS: LOGGER.log_message(Message(msg, level, html)) def trace(msg, html=False): write(msg, 'TRACE', html) def debug(msg, html=False): write(msg, 'DEBUG', html) def info(msg, html=False, also_console=False): write(msg, 'INFO', html) if also_console: console(msg) def warn(msg, html=False): write(msg, 'WARN', html) def error(msg, html=False): write(msg, 'ERROR', html) def console(msg, newline=True, stream='stdout'): msg = unic(msg) if newline: msg += '\n' stream = sys.__stdout__ if stream.lower() != 'stderr' else sys.__stderr__ stream.write(encode_output(msg)) stream.flush()
caio2k/RIDE
src/robotide/lib/robot/output/librarylogger.py
Python
apache-2.0
2,175
import warnings from django.conf import settings from django.contrib.auth.models import User, Group, Permission, AnonymousUser from django.contrib.contenttypes.models import ContentType from django.test import TestCase class BackendTest(TestCase): backend = 'django.contrib.auth.backends.ModelBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = (self.backend,) User.objects.create_user('test', '[email protected]', 'test') def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth def test_has_perm(self): user = User.objects.get(username='test') self.assertEqual(user.has_perm('auth.test'), False) user.is_staff = True user.save() self.assertEqual(user.has_perm('auth.test'), False) user.is_superuser = True user.save() self.assertEqual(user.has_perm('auth.test'), True) user.is_staff = False user.is_superuser = False user.save() self.assertEqual(user.has_perm('auth.test'), False) user.is_staff = True user.is_superuser = True user.is_active = False user.save() self.assertEqual(user.has_perm('auth.test'), False) def test_custom_perms(self): user = User.objects.get(username='test') content_type=ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) user.save() # reloading user to purge the _perm_cache user = User.objects.get(username='test') self.assertEqual(user.get_all_permissions() == set([u'auth.test']), True) self.assertEqual(user.get_group_permissions(), set([])) self.assertEqual(user.has_module_perms('Group'), False) self.assertEqual(user.has_module_perms('auth'), True) perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2') user.user_permissions.add(perm) user.save() perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3') user.user_permissions.add(perm) user.save() user = User.objects.get(username='test') self.assertEqual(user.get_all_permissions(), set([u'auth.test2', u'auth.test', u'auth.test3'])) self.assertEqual(user.has_perm('test'), False) self.assertEqual(user.has_perm('auth.test'), True) self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), True) perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group') group = Group.objects.create(name='test_group') group.permissions.add(perm) group.save() user.groups.add(group) user = User.objects.get(username='test') exp = set([u'auth.test2', u'auth.test', u'auth.test3', u'auth.test_group']) self.assertEqual(user.get_all_permissions(), exp) self.assertEqual(user.get_group_permissions(), set([u'auth.test_group'])) self.assertEqual(user.has_perms(['auth.test3', 'auth.test_group']), True) user = AnonymousUser() self.assertEqual(user.has_perm('test'), False) self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), False) def test_has_no_object_perm(self): """Regressiontest for #12462""" user = User.objects.get(username='test') content_type=ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) user.save() self.assertEqual(user.has_perm('auth.test', 'object'), False) self.assertEqual(user.get_all_permissions('object'), set([])) self.assertEqual(user.has_perm('auth.test'), True) self.assertEqual(user.get_all_permissions(), set(['auth.test'])) class TestObj(object): pass class SimpleRowlevelBackend(object): supports_object_permissions = True # This class also supports tests for anonymous user permissions, # via subclasses which just set the 'supports_anonymous_user' attribute. def has_perm(self, user, perm, obj=None): if not obj: return # We only support row level perms if isinstance(obj, TestObj): if user.username == 'test2': return True elif user.is_anonymous() and perm == 'anon': # not reached due to supports_anonymous_user = False return True return False def has_module_perms(self, user, app_label): return app_label == "app1" def get_all_permissions(self, user, obj=None): if not obj: return [] # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if user.is_anonymous(): return ['anon'] if user.username == 'test2': return ['simple', 'advanced'] else: return ['simple'] def get_group_permissions(self, user, obj=None): if not obj: return # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if 'test_group' in [group.name for group in user.groups.all()]: return ['group_perm'] else: return ['none'] class RowlevelBackendTest(TestCase): """ Tests for auth backend that supports object level permissions """ backend = 'django.contrib.auth.tests.auth_backends.SimpleRowlevelBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = tuple(self.curr_auth) + (self.backend,) self.user1 = User.objects.create_user('test', '[email protected]', 'test') self.user2 = User.objects.create_user('test2', '[email protected]', 'test') self.user3 = User.objects.create_user('test3', '[email protected]', 'test') self.save_warnings_state() warnings.filterwarnings('ignore', category=DeprecationWarning, module='django.contrib.auth') def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth self.restore_warnings_state() def test_has_perm(self): self.assertEqual(self.user1.has_perm('perm', TestObj()), False) self.assertEqual(self.user2.has_perm('perm', TestObj()), True) self.assertEqual(self.user2.has_perm('perm'), False) self.assertEqual(self.user2.has_perms(['simple', 'advanced'], TestObj()), True) self.assertEqual(self.user3.has_perm('perm', TestObj()), False) self.assertEqual(self.user3.has_perm('anon', TestObj()), False) self.assertEqual(self.user3.has_perms(['simple', 'advanced'], TestObj()), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), set(['simple'])) self.assertEqual(self.user2.get_all_permissions(TestObj()), set(['simple', 'advanced'])) self.assertEqual(self.user2.get_all_permissions(), set([])) def test_get_group_permissions(self): content_type=ContentType.objects.get_for_model(Group) group = Group.objects.create(name='test_group') self.user3.groups.add(group) self.assertEqual(self.user3.get_group_permissions(TestObj()), set(['group_perm'])) class AnonymousUserBackend(SimpleRowlevelBackend): supports_anonymous_user = True class NoAnonymousUserBackend(SimpleRowlevelBackend): supports_anonymous_user = False class AnonymousUserBackendTest(TestCase): """ Tests for AnonymousUser delegating to backend if it has 'supports_anonymous_user' = True """ backend = 'django.contrib.auth.tests.auth_backends.AnonymousUserBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = (self.backend,) self.user1 = AnonymousUser() def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth def test_has_perm(self): self.assertEqual(self.user1.has_perm('perm', TestObj()), False) self.assertEqual(self.user1.has_perm('anon', TestObj()), True) def test_has_perms(self): self.assertEqual(self.user1.has_perms(['anon'], TestObj()), True) self.assertEqual(self.user1.has_perms(['anon', 'perm'], TestObj()), False) def test_has_module_perms(self): self.assertEqual(self.user1.has_module_perms("app1"), True) self.assertEqual(self.user1.has_module_perms("app2"), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), set(['anon'])) class NoAnonymousUserBackendTest(TestCase): """ Tests that AnonymousUser does not delegate to backend if it has 'supports_anonymous_user' = False """ backend = 'django.contrib.auth.tests.auth_backends.NoAnonymousUserBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = tuple(self.curr_auth) + (self.backend,) self.user1 = AnonymousUser() def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth def test_has_perm(self): self.assertEqual(self.user1.has_perm('perm', TestObj()), False) self.assertEqual(self.user1.has_perm('anon', TestObj()), False) def test_has_perms(self): self.assertEqual(self.user1.has_perms(['anon'], TestObj()), False) def test_has_module_perms(self): self.assertEqual(self.user1.has_module_perms("app1"), False) self.assertEqual(self.user1.has_module_perms("app2"), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), set())
towerjoo/mindsbook
django/contrib/auth/tests/auth_backends.py
Python
bsd-3-clause
9,952
from SimpleCV import Camera, Image, Color, TemporalColorTracker, ROI, Display import matplotlib.pyplot as plt cam = Camera(1) tct = TemporalColorTracker() img = cam.getImage() roi = ROI(img.width*0.45,img.height*0.45,img.width*0.1,img.height*0.1,img) tct.train(cam,roi=roi,maxFrames=250,pkWndw=20) # Matplot Lib example plotting plotc = {'r':'r','g':'g','b':'b','i':'m','h':'y'} for key in tct.data.keys(): plt.plot(tct.data[key],plotc[key]) for pt in tct.peaks[key]: plt.plot(pt[0],pt[1],'r*') for pt in tct.valleys[key]: plt.plot(pt[0],pt[1],'b*') plt.grid() plt.show() disp = Display((800,600)) while disp.isNotDone(): img = cam.getImage() result = tct.recognize(img) plt.plot(tct._rtData,'r-') plt.grid() plt.savefig('temp.png') plt.clf() plotImg = Image('temp.png') roi = ROI(img.width*0.45,img.height*0.45,img.width*0.1,img.height*0.1,img) roi.draw(width=3) img.drawText(str(result),20,20,color=Color.RED,fontsize=32) img = img.applyLayers() img = img.blit(plotImg.resize(w=img.width,h=img.height),pos=(0,0),alpha=0.5) img.save(disp)
beni55/SimpleCV
SimpleCV/MachineLearning/TestTemporalColorTracker.py
Python
bsd-3-clause
1,136
"""Extension argument processing code """ __all__ = ['Message', 'NamespaceMap', 'no_default', 'registerNamespaceAlias', 'OPENID_NS', 'BARE_NS', 'OPENID1_NS', 'OPENID2_NS', 'SREG_URI', 'IDENTIFIER_SELECT'] import copy import warnings import urllib from openid import oidutil from openid import kvform try: ElementTree = oidutil.importElementTree() except ImportError: # No elementtree found, so give up, but don't fail to import, # since we have fallbacks. ElementTree = None # This doesn't REALLY belong here, but where is better? IDENTIFIER_SELECT = 'http://specs.openid.net/auth/2.0/identifier_select' # URI for Simple Registration extension, the only commonly deployed # OpenID 1.x extension, and so a special case SREG_URI = 'http://openid.net/sreg/1.0' # The OpenID 1.X namespace URI OPENID1_NS = 'http://openid.net/signon/1.0' THE_OTHER_OPENID1_NS = 'http://openid.net/signon/1.1' OPENID1_NAMESPACES = OPENID1_NS, THE_OTHER_OPENID1_NS # The OpenID 2.0 namespace URI OPENID2_NS = 'http://specs.openid.net/auth/2.0' # The namespace consisting of pairs with keys that are prefixed with # "openid." but not in another namespace. NULL_NAMESPACE = oidutil.Symbol('Null namespace') # The null namespace, when it is an allowed OpenID namespace OPENID_NS = oidutil.Symbol('OpenID namespace') # The top-level namespace, excluding all pairs with keys that start # with "openid." BARE_NS = oidutil.Symbol('Bare namespace') # Limit, in bytes, of identity provider and return_to URLs, including # response payload. See OpenID 1.1 specification, Appendix D. OPENID1_URL_LIMIT = 2047 # All OpenID protocol fields. Used to check namespace aliases. OPENID_PROTOCOL_FIELDS = [ 'ns', 'mode', 'error', 'return_to', 'contact', 'reference', 'signed', 'assoc_type', 'session_type', 'dh_modulus', 'dh_gen', 'dh_consumer_public', 'claimed_id', 'identity', 'realm', 'invalidate_handle', 'op_endpoint', 'response_nonce', 'sig', 'assoc_handle', 'trust_root', 'openid', ] class UndefinedOpenIDNamespace(ValueError): """Raised if the generic OpenID namespace is accessed when there is no OpenID namespace set for this message.""" class InvalidOpenIDNamespace(ValueError): """Raised if openid.ns is not a recognized value. For recognized values, see L{Message.allowed_openid_namespaces} """ def __str__(self): s = "Invalid OpenID Namespace" if self.args: s += " %r" % (self.args[0],) return s # Sentinel used for Message implementation to indicate that getArg # should raise an exception instead of returning a default. no_default = object() # Global namespace / alias registration map. See # registerNamespaceAlias. registered_aliases = {} class NamespaceAliasRegistrationError(Exception): """ Raised when an alias or namespace URI has already been registered. """ pass def registerNamespaceAlias(namespace_uri, alias): """ Registers a (namespace URI, alias) mapping in a global namespace alias map. Raises NamespaceAliasRegistrationError if either the namespace URI or alias has already been registered with a different value. This function is required if you want to use a namespace with an OpenID 1 message. """ global registered_aliases if registered_aliases.get(alias) == namespace_uri: return if namespace_uri in registered_aliases.values(): raise NamespaceAliasRegistrationError, \ 'Namespace uri %r already registered' % (namespace_uri,) if alias in registered_aliases: raise NamespaceAliasRegistrationError, \ 'Alias %r already registered' % (alias,) registered_aliases[alias] = namespace_uri class Message(object): """ In the implementation of this object, None represents the global namespace as well as a namespace with no key. @cvar namespaces: A dictionary specifying specific namespace-URI to alias mappings that should be used when generating namespace aliases. @ivar ns_args: two-level dictionary of the values in this message, grouped by namespace URI. The first level is the namespace URI. """ allowed_openid_namespaces = [OPENID1_NS, THE_OTHER_OPENID1_NS, OPENID2_NS] def __init__(self, openid_namespace=None): """Create an empty Message. @raises InvalidOpenIDNamespace: if openid_namespace is not in L{Message.allowed_openid_namespaces} """ self.args = {} self.namespaces = NamespaceMap() if openid_namespace is None: self._openid_ns_uri = None else: implicit = openid_namespace in OPENID1_NAMESPACES self.setOpenIDNamespace(openid_namespace, implicit) def fromPostArgs(cls, args): """Construct a Message containing a set of POST arguments. """ self = cls() # Partition into "openid." args and bare args openid_args = {} for key, value in args.items(): if isinstance(value, list): raise TypeError("query dict must have one value for each key, " "not lists of values. Query is %r" % (args,)) try: prefix, rest = key.split('.', 1) except ValueError: prefix = None if prefix != 'openid': self.args[(BARE_NS, key)] = value else: openid_args[rest] = value self._fromOpenIDArgs(openid_args) return self fromPostArgs = classmethod(fromPostArgs) def fromOpenIDArgs(cls, openid_args): """Construct a Message from a parsed KVForm message. @raises InvalidOpenIDNamespace: if openid.ns is not in L{Message.allowed_openid_namespaces} """ self = cls() self._fromOpenIDArgs(openid_args) return self fromOpenIDArgs = classmethod(fromOpenIDArgs) def _fromOpenIDArgs(self, openid_args): ns_args = [] # Resolve namespaces for rest, value in openid_args.iteritems(): try: ns_alias, ns_key = rest.split('.', 1) except ValueError: ns_alias = NULL_NAMESPACE ns_key = rest if ns_alias == 'ns': self.namespaces.addAlias(value, ns_key) elif ns_alias == NULL_NAMESPACE and ns_key == 'ns': # null namespace self.setOpenIDNamespace(value, False) else: ns_args.append((ns_alias, ns_key, value)) # Implicitly set an OpenID namespace definition (OpenID 1) if not self.getOpenIDNamespace(): self.setOpenIDNamespace(OPENID1_NS, True) # Actually put the pairs into the appropriate namespaces for (ns_alias, ns_key, value) in ns_args: ns_uri = self.namespaces.getNamespaceURI(ns_alias) if ns_uri is None: # we found a namespaced arg without a namespace URI defined ns_uri = self._getDefaultNamespace(ns_alias) if ns_uri is None: ns_uri = self.getOpenIDNamespace() ns_key = '%s.%s' % (ns_alias, ns_key) else: self.namespaces.addAlias(ns_uri, ns_alias, implicit=True) self.setArg(ns_uri, ns_key, value) def _getDefaultNamespace(self, mystery_alias): """OpenID 1 compatibility: look for a default namespace URI to use for this alias.""" global registered_aliases # Only try to map an alias to a default if it's an # OpenID 1.x message. if self.isOpenID1(): return registered_aliases.get(mystery_alias) else: return None def setOpenIDNamespace(self, openid_ns_uri, implicit): """Set the OpenID namespace URI used in this message. @raises InvalidOpenIDNamespace: if the namespace is not in L{Message.allowed_openid_namespaces} """ if openid_ns_uri not in self.allowed_openid_namespaces: raise InvalidOpenIDNamespace(openid_ns_uri) self.namespaces.addAlias(openid_ns_uri, NULL_NAMESPACE, implicit) self._openid_ns_uri = openid_ns_uri def getOpenIDNamespace(self): return self._openid_ns_uri def isOpenID1(self): return self.getOpenIDNamespace() in OPENID1_NAMESPACES def isOpenID2(self): return self.getOpenIDNamespace() == OPENID2_NS def fromKVForm(cls, kvform_string): """Create a Message from a KVForm string""" return cls.fromOpenIDArgs(kvform.kvToDict(kvform_string)) fromKVForm = classmethod(fromKVForm) def copy(self): return copy.deepcopy(self) def toPostArgs(self): """Return all arguments with openid. in front of namespaced arguments. """ args = {} # Add namespace definitions to the output for ns_uri, alias in self.namespaces.iteritems(): if self.namespaces.isImplicit(ns_uri): continue if alias == NULL_NAMESPACE: ns_key = 'openid.ns' else: ns_key = 'openid.ns.' + alias args[ns_key] = oidutil.toUnicode(ns_uri).encode('UTF-8') for (ns_uri, ns_key), value in self.args.iteritems(): key = self.getKey(ns_uri, ns_key) # Ensure the resulting value is an UTF-8 encoded bytestring. args[key] = oidutil.toUnicode(value).encode('UTF-8') return args def toArgs(self): """Return all namespaced arguments, failing if any non-namespaced arguments exist.""" # FIXME - undocumented exception post_args = self.toPostArgs() kvargs = {} for k, v in post_args.iteritems(): if not k.startswith('openid.'): raise ValueError( 'This message can only be encoded as a POST, because it ' 'contains arguments that are not prefixed with "openid."') else: kvargs[k[7:]] = v return kvargs def toFormMarkup(self, action_url, form_tag_attrs=None, submit_text=u"Continue"): """Generate HTML form markup that contains the values in this message, to be HTTP POSTed as x-www-form-urlencoded UTF-8. @param action_url: The URL to which the form will be POSTed @type action_url: str @param form_tag_attrs: Dictionary of attributes to be added to the form tag. 'accept-charset' and 'enctype' have defaults that can be overridden. If a value is supplied for 'action' or 'method', it will be replaced. @type form_tag_attrs: {unicode: unicode} @param submit_text: The text that will appear on the submit button for this form. @type submit_text: unicode @returns: A string containing (X)HTML markup for a form that encodes the values in this Message object. @rtype: str or unicode """ if ElementTree is None: raise RuntimeError('This function requires ElementTree.') assert action_url is not None form = ElementTree.Element(u'form') if form_tag_attrs: for name, attr in form_tag_attrs.iteritems(): form.attrib[name] = attr form.attrib[u'action'] = oidutil.toUnicode(action_url) form.attrib[u'method'] = u'post' form.attrib[u'accept-charset'] = u'UTF-8' form.attrib[u'enctype'] = u'application/x-www-form-urlencoded' for name, value in self.toPostArgs().iteritems(): attrs = {u'type': u'hidden', u'name': oidutil.toUnicode(name), u'value': oidutil.toUnicode(value)} form.append(ElementTree.Element(u'input', attrs)) submit = ElementTree.Element(u'input', {u'type':'submit', u'value':oidutil.toUnicode(submit_text)}) form.append(submit) return ElementTree.tostring(form, encoding='utf-8') def toURL(self, base_url): """Generate a GET URL with the parameters in this message attached as query parameters.""" return oidutil.appendArgs(base_url, self.toPostArgs()) def toKVForm(self): """Generate a KVForm string that contains the parameters in this message. This will fail if the message contains arguments outside of the 'openid.' prefix. """ return kvform.dictToKV(self.toArgs()) def toURLEncoded(self): """Generate an x-www-urlencoded string""" args = self.toPostArgs().items() args.sort() return urllib.urlencode(args) def _fixNS(self, namespace): """Convert an input value into the internally used values of this object @param namespace: The string or constant to convert @type namespace: str or unicode or BARE_NS or OPENID_NS """ if namespace == OPENID_NS: if self._openid_ns_uri is None: raise UndefinedOpenIDNamespace('OpenID namespace not set') else: namespace = self._openid_ns_uri if namespace != BARE_NS and type(namespace) not in [str, unicode]: raise TypeError( "Namespace must be BARE_NS, OPENID_NS or a string. got %r" % (namespace,)) if namespace != BARE_NS and ':' not in namespace: fmt = 'OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r' warnings.warn(fmt % (namespace,), DeprecationWarning) if namespace == 'sreg': fmt = 'Using %r instead of "sreg" as namespace' warnings.warn(fmt % (SREG_URI,), DeprecationWarning,) return SREG_URI return namespace def hasKey(self, namespace, ns_key): namespace = self._fixNS(namespace) return (namespace, ns_key) in self.args def getKey(self, namespace, ns_key): """Get the key for a particular namespaced argument""" namespace = self._fixNS(namespace) if namespace == BARE_NS: return ns_key ns_alias = self.namespaces.getAlias(namespace) # No alias is defined, so no key can exist if ns_alias is None: return None if ns_alias == NULL_NAMESPACE: tail = ns_key else: tail = '%s.%s' % (ns_alias, ns_key) return 'openid.' + tail def getArg(self, namespace, key, default=None): """Get a value for a namespaced key. @param namespace: The namespace in the message for this key @type namespace: str @param key: The key to get within this namespace @type key: str @param default: The value to use if this key is absent from this message. Using the special value openid.message.no_default will result in this method raising a KeyError instead of returning the default. @rtype: str or the type of default @raises KeyError: if default is no_default @raises UndefinedOpenIDNamespace: if the message has not yet had an OpenID namespace set """ namespace = self._fixNS(namespace) args_key = (namespace, key) try: return self.args[args_key] except KeyError: if default is no_default: raise KeyError((namespace, key)) else: return default def getArgs(self, namespace): """Get the arguments that are defined for this namespace URI @returns: mapping from namespaced keys to values @returntype: dict """ namespace = self._fixNS(namespace) return dict([ (ns_key, value) for ((pair_ns, ns_key), value) in self.args.iteritems() if pair_ns == namespace ]) def updateArgs(self, namespace, updates): """Set multiple key/value pairs in one call @param updates: The values to set @type updates: {unicode:unicode} """ namespace = self._fixNS(namespace) for k, v in updates.iteritems(): self.setArg(namespace, k, v) def setArg(self, namespace, key, value): """Set a single argument in this namespace""" assert key is not None assert value is not None namespace = self._fixNS(namespace) self.args[(namespace, key)] = value if not (namespace is BARE_NS): self.namespaces.add(namespace) def delArg(self, namespace, key): namespace = self._fixNS(namespace) del self.args[(namespace, key)] def __repr__(self): return "<%s.%s %r>" % (self.__class__.__module__, self.__class__.__name__, self.args) def __eq__(self, other): return self.args == other.args def __ne__(self, other): return not (self == other) def getAliasedArg(self, aliased_key, default=None): if aliased_key == 'ns': return self.getOpenIDNamespace() if aliased_key.startswith('ns.'): uri = self.namespaces.getNamespaceURI(aliased_key[3:]) if uri is None: if default == no_default: raise KeyError else: return default else: return uri try: alias, key = aliased_key.split('.', 1) except ValueError: # need more than x values to unpack ns = None else: ns = self.namespaces.getNamespaceURI(alias) if ns is None: key = aliased_key ns = self.getOpenIDNamespace() return self.getArg(ns, key, default) class NamespaceMap(object): """Maintains a bijective map between namespace uris and aliases. """ def __init__(self): self.alias_to_namespace = {} self.namespace_to_alias = {} self.implicit_namespaces = [] def getAlias(self, namespace_uri): return self.namespace_to_alias.get(namespace_uri) def getNamespaceURI(self, alias): return self.alias_to_namespace.get(alias) def iterNamespaceURIs(self): """Return an iterator over the namespace URIs""" return iter(self.namespace_to_alias) def iterAliases(self): """Return an iterator over the aliases""" return iter(self.alias_to_namespace) def iteritems(self): """Iterate over the mapping @returns: iterator of (namespace_uri, alias) """ return self.namespace_to_alias.iteritems() def addAlias(self, namespace_uri, desired_alias, implicit=False): """Add an alias from this namespace URI to the desired alias """ # Check that desired_alias is not an openid protocol field as # per the spec. assert desired_alias not in OPENID_PROTOCOL_FIELDS, \ "%r is not an allowed namespace alias" % (desired_alias,) # Check that desired_alias does not contain a period as per # the spec. if type(desired_alias) in [str, unicode]: assert '.' not in desired_alias, \ "%r must not contain a dot" % (desired_alias,) # Check that there is not a namespace already defined for # the desired alias current_namespace_uri = self.alias_to_namespace.get(desired_alias) if (current_namespace_uri is not None and current_namespace_uri != namespace_uri): fmt = ('Cannot map %r to alias %r. ' '%r is already mapped to alias %r') msg = fmt % ( namespace_uri, desired_alias, current_namespace_uri, desired_alias) raise KeyError(msg) # Check that there is not already a (different) alias for # this namespace URI alias = self.namespace_to_alias.get(namespace_uri) if alias is not None and alias != desired_alias: fmt = ('Cannot map %r to alias %r. ' 'It is already mapped to alias %r') raise KeyError(fmt % (namespace_uri, desired_alias, alias)) assert (desired_alias == NULL_NAMESPACE or type(desired_alias) in [str, unicode]), repr(desired_alias) assert namespace_uri not in self.implicit_namespaces self.alias_to_namespace[desired_alias] = namespace_uri self.namespace_to_alias[namespace_uri] = desired_alias if implicit: self.implicit_namespaces.append(namespace_uri) return desired_alias def add(self, namespace_uri): """Add this namespace URI to the mapping, without caring what alias it ends up with""" # See if this namespace is already mapped to an alias alias = self.namespace_to_alias.get(namespace_uri) if alias is not None: return alias # Fall back to generating a numerical alias i = 0 while True: alias = 'ext' + str(i) try: self.addAlias(namespace_uri, alias) except KeyError: i += 1 else: return alias assert False, "Not reached" def isDefined(self, namespace_uri): return namespace_uri in self.namespace_to_alias def __contains__(self, namespace_uri): return self.isDefined(namespace_uri) def isImplicit(self, namespace_uri): return namespace_uri in self.implicit_namespaces
gquirozbogner/contentbox-master
third_party/openid/message.py
Python
apache-2.0
21,795
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from cpp_namespace_environment import CppNamespaceEnvironment from model import Model, UnixName from schema_loader import SchemaLoader def _GenerateFilenames(full_namespace): # Try to find the file defining the namespace. Eg. for # nameSpace.sub_name_space.Type' the following heuristics looks for: # 1. name_space_sub_name_space.json, # 2. name_space_sub_name_space.idl, # 3. sub_name_space.json, # 4. sub_name_space.idl, # 5. etc. sub_namespaces = full_namespace.split('.') filenames = [ ] basename = None for namespace in reversed(sub_namespaces): if basename is not None: basename = UnixName(namespace + '.' + basename) else: basename = UnixName(namespace) for ext in ['json', 'idl']: filenames.append('%s.%s' % (basename, ext)) return filenames class NamespaceResolver(object): '''Resolves a type name into the namespace the type belongs to. - |root| path to the root directory. - |path| path to the directory with the API header files, relative to the root. - |include_rules| List containing tuples with (path, cpp_namespace_pattern) used when searching for types. - |cpp_namespace_pattern| Default namespace pattern ''' def __init__(self, root, path, include_rules, cpp_namespace_pattern): self._root = root self._include_rules = [(path, cpp_namespace_pattern)] + include_rules def ResolveNamespace(self, full_namespace): '''Returns the model.Namespace object associated with the |full_namespace|, or None if one can't be found. ''' filenames = _GenerateFilenames(full_namespace) for path, cpp_namespace in self._include_rules: cpp_namespace_environment = None if cpp_namespace: cpp_namespace_environment = CppNamespaceEnvironment(cpp_namespace) for filename in reversed(filenames): filepath = os.path.join(path, filename); if os.path.exists(os.path.join(self._root, filepath)): schema = SchemaLoader(self._root).LoadSchema(filepath)[0] return Model().AddNamespace( schema, filepath, environment=cpp_namespace_environment) return None def ResolveType(self, full_name, default_namespace): '''Returns the model.Namespace object where the type with the given |full_name| is defined, or None if one can't be found. ''' name_parts = full_name.rsplit('.', 1) if len(name_parts) == 1: if full_name not in default_namespace.types: return None return default_namespace full_namespace, type_name = full_name.rsplit('.', 1) namespace = self.ResolveNamespace(full_namespace) if namespace and type_name in namespace.types: return namespace return None
scheib/chromium
tools/json_schema_compiler/namespace_resolver.py
Python
bsd-3-clause
2,904
from __future__ import division, absolute_import, print_function import sys import collections import pickle import warnings from os import path import numpy as np from numpy.testing import ( TestCase, run_module_suite, assert_, assert_equal, assert_array_equal, assert_array_almost_equal, assert_raises, assert_warns ) class TestFromrecords(TestCase): def test_fromrecords(self): r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]], names='col1,col2,col3') assert_equal(r[0].item(), (456, 'dbe', 1.2)) assert_equal(r['col1'].dtype.kind, 'i') if sys.version_info[0] >= 3: assert_equal(r['col2'].dtype.kind, 'U') assert_equal(r['col2'].dtype.itemsize, 12) else: assert_equal(r['col2'].dtype.kind, 'S') assert_equal(r['col2'].dtype.itemsize, 3) assert_equal(r['col3'].dtype.kind, 'f') def test_fromrecords_0len(self): """ Verify fromrecords works with a 0-length input """ dtype = [('a', np.float), ('b', np.float)] r = np.rec.fromrecords([], dtype=dtype) assert_equal(r.shape, (0,)) def test_fromrecords_2d(self): data = [ [(1, 2), (3, 4), (5, 6)], [(6, 5), (4, 3), (2, 1)] ] expected_a = [[1, 3, 5], [6, 4, 2]] expected_b = [[2, 4, 6], [5, 3, 1]] # try with dtype r1 = np.rec.fromrecords(data, dtype=[('a', int), ('b', int)]) assert_equal(r1['a'], expected_a) assert_equal(r1['b'], expected_b) # try with names r2 = np.rec.fromrecords(data, names=['a', 'b']) assert_equal(r2['a'], expected_a) assert_equal(r2['b'], expected_b) assert_equal(r1, r2) def test_method_array(self): r = np.rec.array(b'abcdefg' * 100, formats='i2,a3,i4', shape=3, byteorder='big') assert_equal(r[1].item(), (25444, b'efg', 1633837924)) def test_method_array2(self): r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'), (6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1') assert_equal(r[1].item(), (2, 22.0, b'b')) def test_recarray_slices(self): r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'), (6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1') assert_equal(r[1::2][1].item(), (4, 44.0, b'd')) def test_recarray_fromarrays(self): x1 = np.array([1, 2, 3, 4]) x2 = np.array(['a', 'dd', 'xyz', '12']) x3 = np.array([1.1, 2, 3, 4]) r = np.rec.fromarrays([x1, x2, x3], names='a,b,c') assert_equal(r[1].item(), (2, 'dd', 2.0)) x1[1] = 34 assert_equal(r.a, np.array([1, 2, 3, 4])) def test_recarray_fromfile(self): data_dir = path.join(path.dirname(__file__), 'data') filename = path.join(data_dir, 'recarray_from_file.fits') fd = open(filename, 'rb') fd.seek(2880 * 2) r1 = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big') fd.seek(2880 * 2) r2 = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big') fd.close() assert_equal(r1, r2) def test_recarray_from_obj(self): count = 10 a = np.zeros(count, dtype='O') b = np.zeros(count, dtype='f8') c = np.zeros(count, dtype='f8') for i in range(len(a)): a[i] = list(range(1, 10)) mine = np.rec.fromarrays([a, b, c], names='date,data1,data2') for i in range(len(a)): assert_((mine.date[i] == list(range(1, 10)))) assert_((mine.data1[i] == 0.0)) assert_((mine.data2[i] == 0.0)) def test_recarray_from_repr(self): a = np.array([(1,'ABC'), (2, "DEF")], dtype=[('foo', int), ('bar', 'S4')]) recordarr = np.rec.array(a) recarr = a.view(np.recarray) recordview = a.view(np.dtype((np.record, a.dtype))) recordarr_r = eval("numpy." + repr(recordarr), {'numpy': np}) recarr_r = eval("numpy." + repr(recarr), {'numpy': np}) recordview_r = eval("numpy." + repr(recordview), {'numpy': np}) assert_equal(type(recordarr_r), np.recarray) assert_equal(recordarr_r.dtype.type, np.record) assert_equal(recordarr, recordarr_r) assert_equal(type(recarr_r), np.recarray) assert_equal(recarr_r.dtype.type, np.record) assert_equal(recarr, recarr_r) assert_equal(type(recordview_r), np.ndarray) assert_equal(recordview.dtype.type, np.record) assert_equal(recordview, recordview_r) def test_recarray_views(self): a = np.array([(1,'ABC'), (2, "DEF")], dtype=[('foo', int), ('bar', 'S4')]) b = np.array([1,2,3,4,5], dtype=np.int64) #check that np.rec.array gives right dtypes assert_equal(np.rec.array(a).dtype.type, np.record) assert_equal(type(np.rec.array(a)), np.recarray) assert_equal(np.rec.array(b).dtype.type, np.int64) assert_equal(type(np.rec.array(b)), np.recarray) #check that viewing as recarray does the same assert_equal(a.view(np.recarray).dtype.type, np.record) assert_equal(type(a.view(np.recarray)), np.recarray) assert_equal(b.view(np.recarray).dtype.type, np.int64) assert_equal(type(b.view(np.recarray)), np.recarray) #check that view to non-structured dtype preserves type=np.recarray r = np.rec.array(np.ones(4, dtype="f4,i4")) rv = r.view('f8').view('f4,i4') assert_equal(type(rv), np.recarray) assert_equal(rv.dtype.type, np.record) #check that getitem also preserves np.recarray and np.record r = np.rec.array(np.ones(4, dtype=[('a', 'i4'), ('b', 'i4'), ('c', 'i4,i4')])) assert_equal(r['c'].dtype.type, np.record) assert_equal(type(r['c']), np.recarray) # suppress deprecation warning in 1.12 (remove in 1.13) with assert_warns(FutureWarning): assert_equal(r[['a', 'b']].dtype.type, np.record) assert_equal(type(r[['a', 'b']]), np.recarray) #and that it preserves subclasses (gh-6949) class C(np.recarray): pass c = r.view(C) assert_equal(type(c['c']), C) # check that accessing nested structures keep record type, but # not for subarrays, non-void structures, non-structured voids test_dtype = [('a', 'f4,f4'), ('b', 'V8'), ('c', ('f4',2)), ('d', ('i8', 'i4,i4'))] r = np.rec.array([((1,1), b'11111111', [1,1], 1), ((1,1), b'11111111', [1,1], 1)], dtype=test_dtype) assert_equal(r.a.dtype.type, np.record) assert_equal(r.b.dtype.type, np.void) assert_equal(r.c.dtype.type, np.float32) assert_equal(r.d.dtype.type, np.int64) # check the same, but for views r = np.rec.array(np.ones(4, dtype='i4,i4')) assert_equal(r.view('f4,f4').dtype.type, np.record) assert_equal(r.view(('i4',2)).dtype.type, np.int32) assert_equal(r.view('V8').dtype.type, np.void) assert_equal(r.view(('i8', 'i4,i4')).dtype.type, np.int64) #check that we can undo the view arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')] for arr in arrs: rec = np.rec.array(arr) # recommended way to view as an ndarray: arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray) assert_equal(arr2.dtype.type, arr.dtype.type) assert_equal(type(arr2), type(arr)) def test_recarray_repr(self): # make sure non-structured dtypes also show up as rec.array a = np.array(np.ones(4, dtype='f8')) assert_(repr(np.rec.array(a)).startswith('rec.array')) # check that the 'np.record' part of the dtype isn't shown a = np.rec.array(np.ones(3, dtype='i4,i4')) assert_equal(repr(a).find('numpy.record'), -1) a = np.rec.array(np.ones(3, dtype='i4')) assert_(repr(a).find('dtype=int32') != -1) def test_recarray_from_names(self): ra = np.rec.array([ (1, 'abc', 3.7000002861022949, 0), (2, 'xy', 6.6999998092651367, 1), (0, ' ', 0.40000000596046448, 0)], names='c1, c2, c3, c4') pa = np.rec.fromrecords([ (1, 'abc', 3.7000002861022949, 0), (2, 'xy', 6.6999998092651367, 1), (0, ' ', 0.40000000596046448, 0)], names='c1, c2, c3, c4') assert_(ra.dtype == pa.dtype) assert_(ra.shape == pa.shape) for k in range(len(ra)): assert_(ra[k].item() == pa[k].item()) def test_recarray_conflict_fields(self): ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2), (3, 'wrs', 1.3)], names='field, shape, mean') ra.mean = [1.1, 2.2, 3.3] assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3]) assert_(type(ra.mean) is type(ra.var)) ra.shape = (1, 3) assert_(ra.shape == (1, 3)) ra.shape = ['A', 'B', 'C'] assert_array_equal(ra['shape'], [['A', 'B', 'C']]) ra.field = 5 assert_array_equal(ra['field'], [[5, 5, 5]]) assert_(isinstance(ra.field, collections.Callable)) def test_fromrecords_with_explicit_dtype(self): a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=[('a', int), ('b', np.object)]) assert_equal(a.a, [1, 2]) assert_equal(a[0].a, 1) assert_equal(a.b, ['a', 'bbb']) assert_equal(a[-1].b, 'bbb') # ndtype = np.dtype([('a', int), ('b', np.object)]) a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype) assert_equal(a.a, [1, 2]) assert_equal(a[0].a, 1) assert_equal(a.b, ['a', 'bbb']) assert_equal(a[-1].b, 'bbb') def test_recarray_stringtypes(self): # Issue #3993 a = np.array([('abc ', 1), ('abc', 2)], dtype=[('foo', 'S4'), ('bar', int)]) a = a.view(np.recarray) assert_equal(a.foo[0] == a.foo[1], False) def test_recarray_returntypes(self): qux_fields = {'C': (np.dtype('S5'), 0), 'D': (np.dtype('S5'), 6)} a = np.rec.array([('abc ', (1,1), 1, ('abcde', 'fgehi')), ('abc', (2,3), 1, ('abcde', 'jklmn'))], dtype=[('foo', 'S4'), ('bar', [('A', int), ('B', int)]), ('baz', int), ('qux', qux_fields)]) assert_equal(type(a.foo), np.ndarray) assert_equal(type(a['foo']), np.ndarray) assert_equal(type(a.bar), np.recarray) assert_equal(type(a['bar']), np.recarray) assert_equal(a.bar.dtype.type, np.record) assert_equal(type(a['qux']), np.recarray) assert_equal(a.qux.dtype.type, np.record) assert_equal(dict(a.qux.dtype.fields), qux_fields) assert_equal(type(a.baz), np.ndarray) assert_equal(type(a['baz']), np.ndarray) assert_equal(type(a[0].bar), np.record) assert_equal(type(a[0]['bar']), np.record) assert_equal(a[0].bar.A, 1) assert_equal(a[0].bar['A'], 1) assert_equal(a[0]['bar'].A, 1) assert_equal(a[0]['bar']['A'], 1) assert_equal(a[0].qux.D, b'fgehi') assert_equal(a[0].qux['D'], b'fgehi') assert_equal(a[0]['qux'].D, b'fgehi') assert_equal(a[0]['qux']['D'], b'fgehi') def test_zero_width_strings(self): # Test for #6430, based on the test case from #1901 cols = [['test'] * 3, [''] * 3] rec = np.rec.fromarrays(cols) assert_equal(rec['f0'], ['test', 'test', 'test']) assert_equal(rec['f1'], ['', '', '']) dt = np.dtype([('f0', '|S4'), ('f1', '|S')]) rec = np.rec.fromarrays(cols, dtype=dt) assert_equal(rec.itemsize, 4) assert_equal(rec['f0'], [b'test', b'test', b'test']) assert_equal(rec['f1'], [b'', b'', b'']) class TestRecord(TestCase): def setUp(self): self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)], dtype=[("col1", "<i4"), ("col2", "<i4"), ("col3", "<i4")]) def test_assignment1(self): a = self.data assert_equal(a.col1[0], 1) a[0].col1 = 0 assert_equal(a.col1[0], 0) def test_assignment2(self): a = self.data assert_equal(a.col1[0], 1) a.col1[0] = 0 assert_equal(a.col1[0], 0) def test_invalid_assignment(self): a = self.data def assign_invalid_column(x): x[0].col5 = 1 self.assertRaises(AttributeError, assign_invalid_column, a) def test_nonwriteable_setfield(self): # gh-8171 r = np.rec.array([(0,), (1,)], dtype=[('f', 'i4')]) r.flags.writeable = False with assert_raises(ValueError): r.f = [2, 3] with assert_raises(ValueError): r.setfield([2,3], *r.dtype.fields['f']) def test_out_of_order_fields(self): """Ticket #1431.""" # this test will be invalid in 1.13 # suppress deprecation warning in 1.12 (remove in 1.13) with assert_warns(FutureWarning): x = self.data[['col1', 'col2']] y = self.data[['col2', 'col1']] assert_equal(x[0][0], y[0][1]) def test_pickle_1(self): # Issue #1529 a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)]) assert_equal(a, pickle.loads(pickle.dumps(a))) assert_equal(a[0], pickle.loads(pickle.dumps(a[0]))) def test_pickle_2(self): a = self.data assert_equal(a, pickle.loads(pickle.dumps(a))) assert_equal(a[0], pickle.loads(pickle.dumps(a[0]))) def test_pickle_3(self): # Issue #7140 a = self.data pa = pickle.loads(pickle.dumps(a[0])) assert_(pa.flags.c_contiguous) assert_(pa.flags.f_contiguous) assert_(pa.flags.writeable) assert_(pa.flags.aligned) def test_objview_record(self): # https://github.com/numpy/numpy/issues/2599 dt = np.dtype([('foo', 'i8'), ('bar', 'O')]) r = np.zeros((1,3), dtype=dt).view(np.recarray) r.foo = np.array([1, 2, 3]) # TypeError? # https://github.com/numpy/numpy/issues/3256 ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)]) with assert_warns(FutureWarning): ra[['x','y']] # TypeError? def test_record_scalar_setitem(self): # https://github.com/numpy/numpy/issues/3561 rec = np.recarray(1, dtype=[('x', float, 5)]) rec[0].x = 1 assert_equal(rec[0].x, np.ones(5)) def test_missing_field(self): # https://github.com/numpy/numpy/issues/4806 arr = np.zeros((3,), dtype=[('x', int), ('y', int)]) assert_raises(ValueError, lambda: arr[['nofield']]) def test_find_duplicate(): l1 = [1, 2, 3, 4, 5, 6] assert_(np.rec.find_duplicate(l1) == []) l2 = [1, 2, 1, 4, 5, 6] assert_(np.rec.find_duplicate(l2) == [1]) l3 = [1, 2, 1, 4, 1, 6, 2, 3] assert_(np.rec.find_duplicate(l3) == [1, 2]) l3 = [2, 2, 1, 4, 1, 6, 2, 3] assert_(np.rec.find_duplicate(l3) == [2, 1]) if __name__ == "__main__": run_module_suite()
mbayon/TFG-MachineLearning
venv/lib/python3.6/site-packages/numpy/core/tests/test_records.py
Python
mit
15,635
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function try: import ConfigParser except ImportError: import configparser as ConfigParser import logging import mock import os import subprocess from helpers import unittest import warnings from luigi import six import luigi from luigi.mock import MockTarget class SomeTask(luigi.Task): n = luigi.IntParameter() def output(self): return MockTarget('/tmp/test_%d' % self.n) def run(self): f = self.output().open('w') f.write('done') f.close() class AmbiguousClass(luigi.Task): pass class AmbiguousClass(luigi.Task): pass class TaskWithSameName(luigi.Task): def run(self): self.x = 42 class TaskWithSameName(luigi.Task): # there should be no ambiguity def run(self): self.x = 43 class WriteToFile(luigi.Task): filename = luigi.Parameter() def output(self): return luigi.LocalTarget(self.filename) def run(self): f = self.output().open('w') print('foo', file=f) f.close() class FooBaseClass(luigi.Task): x = luigi.Parameter(default='foo_base_default') class FooSubClass(FooBaseClass): pass class CmdlineTest(unittest.TestCase): def setUp(self): MockTarget.fs.clear() @mock.patch("logging.getLogger") def test_cmdline_main_task_cls(self, logger): luigi.run(['--local-scheduler', '--no-lock', '--n', '100'], main_task_cls=SomeTask) self.assertEqual(dict(MockTarget.fs.get_all_data()), {'/tmp/test_100': b'done'}) @mock.patch("logging.getLogger") def test_cmdline_local_scheduler(self, logger): luigi.run(['SomeTask', '--no-lock', '--n', '101'], local_scheduler=True) self.assertEqual(dict(MockTarget.fs.get_all_data()), {'/tmp/test_101': b'done'}) @mock.patch("logging.getLogger") def test_cmdline_other_task(self, logger): luigi.run(['--local-scheduler', '--no-lock', 'SomeTask', '--n', '1000']) self.assertEqual(dict(MockTarget.fs.get_all_data()), {'/tmp/test_1000': b'done'}) @mock.patch("logging.getLogger") def test_cmdline_ambiguous_class(self, logger): self.assertRaises(Exception, luigi.run, ['--local-scheduler', '--no-lock', 'AmbiguousClass']) @mock.patch("logging.getLogger") @mock.patch("logging.StreamHandler") def test_setup_interface_logging(self, handler, logger): handler.return_value = mock.Mock(name="stream_handler") with mock.patch("luigi.interface.setup_interface_logging.has_run", new=False): luigi.interface.setup_interface_logging() self.assertEqual([mock.call(handler.return_value)], logger.return_value.addHandler.call_args_list) with mock.patch("luigi.interface.setup_interface_logging.has_run", new=False): if six.PY2: error = ConfigParser.NoSectionError else: error = KeyError self.assertRaises(error, luigi.interface.setup_interface_logging, '/blah') @mock.patch("warnings.warn") @mock.patch("luigi.interface.setup_interface_logging") def test_cmdline_logger(self, setup_mock, warn): with mock.patch("luigi.interface.core") as env_params: env_params.return_value.logging_conf_file = None luigi.run(['SomeTask', '--n', '7', '--local-scheduler', '--no-lock']) self.assertEqual([mock.call(None)], setup_mock.call_args_list) with mock.patch("luigi.configuration.get_config") as getconf: getconf.return_value.get.side_effect = ConfigParser.NoOptionError(section='foo', option='bar') getconf.return_value.getint.return_value = 0 luigi.interface.setup_interface_logging.call_args_list = [] luigi.run(['SomeTask', '--n', '42', '--local-scheduler', '--no-lock']) self.assertEqual([], setup_mock.call_args_list) @mock.patch('argparse.ArgumentParser.print_usage') def test_non_existent_class(self, print_usage): self.assertRaises(luigi.task_register.TaskClassNotFoundException, luigi.run, ['--local-scheduler', '--no-lock', 'XYZ']) @mock.patch('argparse.ArgumentParser.print_usage') def test_no_task(self, print_usage): self.assertRaises(SystemExit, luigi.run, ['--local-scheduler', '--no-lock']) class InvokeOverCmdlineTest(unittest.TestCase): def _run_cmdline(self, args): env = os.environ.copy() env['PYTHONPATH'] = env.get('PYTHONPATH', '') + ':.:test' p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) stdout, stderr = p.communicate() # Unfortunately subprocess.check_output is 2.7+ return p.returncode, stdout, stderr def test_bin_luigi(self): t = luigi.LocalTarget(is_tmp=True) args = ['./bin/luigi', '--module', 'cmdline_test', 'WriteToFile', '--filename', t.path, '--local-scheduler', '--no-lock'] self._run_cmdline(args) self.assertTrue(t.exists()) def test_direct_python(self): t = luigi.LocalTarget(is_tmp=True) args = ['python', 'test/cmdline_test.py', 'WriteToFile', '--filename', t.path, '--local-scheduler', '--no-lock'] self._run_cmdline(args) self.assertTrue(t.exists()) def test_direct_python_help(self): returncode, stdout, stderr = self._run_cmdline(['python', 'test/cmdline_test.py', '--help']) self.assertTrue(stdout.find(b'--FooBaseClass-x') != -1) self.assertFalse(stdout.find(b'--x') != -1) def test_direct_python_help_class(self): returncode, stdout, stderr = self._run_cmdline(['python', 'test/cmdline_test.py', 'FooBaseClass', '--help']) self.assertTrue(stdout.find(b'--FooBaseClass-x') != -1) self.assertTrue(stdout.find(b'--x') != -1) def test_bin_luigi_help(self): returncode, stdout, stderr = self._run_cmdline(['./bin/luigi', '--module', 'cmdline_test', '--help']) self.assertTrue(stdout.find(b'--FooBaseClass-x') != -1) self.assertFalse(stdout.find(b'--x') != -1) def test_bin_luigi_help_no_module(self): returncode, stdout, stderr = self._run_cmdline(['./bin/luigi', '--help']) self.assertTrue(stdout.find(b'usage:') != -1) def test_bin_luigi_no_parameters(self): returncode, stdout, stderr = self._run_cmdline(['./bin/luigi']) self.assertTrue(stderr.find(b'No task specified') != -1) def test_bin_luigi_help_class(self): returncode, stdout, stderr = self._run_cmdline(['./bin/luigi', '--module', 'cmdline_test', 'FooBaseClass', '--help']) self.assertTrue(stdout.find(b'--FooBaseClass-x') != -1) self.assertTrue(stdout.find(b'--x') != -1) class NewStyleParameters822Test(unittest.TestCase): # See https://github.com/spotify/luigi/issues/822 def test_subclasses(self): ap = luigi.interface.ArgParseInterface() task, = ap.parse(['--local-scheduler', '--no-lock', 'FooSubClass', '--x', 'xyz', '--FooBaseClass-x', 'xyz']) self.assertEquals(task.x, 'xyz') # This won't work because --FooSubClass-x doesn't exist self.assertRaises(BaseException, ap.parse, (['--local-scheduler', '--no-lock', 'FooBaseClass', '--x', 'xyz', '--FooSubClass-x', 'xyz'])) def test_subclasses_2(self): ap = luigi.interface.ArgParseInterface() # https://github.com/spotify/luigi/issues/822#issuecomment-77782714 task, = ap.parse(['--local-scheduler', '--no-lock', 'FooBaseClass', '--FooBaseClass-x', 'xyz']) self.assertEquals(task.x, 'xyz') if __name__ == '__main__': # Needed for one of the tests luigi.run()
percyfal/luigi
test/cmdline_test.py
Python
apache-2.0
8,267
# coding: utf-8 # In[ ]: #练习一:文本加密解密(先看有关ASCII码的相关知识以及码表,查维基百科或百度百科) #输入:一个txt文件(假设全是字母的英文词,每个单词之间用单个空格隔开,假设单词最长为10个字母) #加密:得到每个单词的长度 n ,随机生成一个9位的数字,将 n-1 与这个9位的数字连接,形成一个10位的数字, #作为密匙 key 。依照 key 中各个数字对单词中每一个对应位置的字母进行向后移位(例:如过 key 中某数字为 2 , #对应该位置的字母为 a ,加密则应移位成 c ,如果超过 z ,则回到 A 处继续移位),对长度不到10的单词,移位后, #将移位后的单词利用随机字母补全到10个,最终形成以10个字母为一个单词,并以单个空格分割的加密文本,存入文件。 #解密:给定该文本文件并给定key(10位数字),恢复原来的文本。 #(提示,利用 ord() 及 chr() 函数, ord(x) 是取得字符 x 的ASCII码, chr(n) 是取得整数n(代表ASCII码)对应的字符。 #例: ord(a) 的值为 97 , chr(97) 的值为 'a' ,因字母 a 的ASCII码值为 97 。) fh = open(r'd:\temp\words.txt') text = fh.read() fh.close() print(len(text)) print(text)
ZMMzhangmingming/liupengyuan.github.io
chapter2/homework/computer/5-10/201611680890-5.10.py
Python
mit
1,296
#!/usr/bin/env python # File created Sept 29, 2010 from __future__ import division __author__ = "William Walters" __copyright__ = "Copyright 2011, The QIIME Project" __credits__ = ["William Walters", "Greg Caporaso"] __license__ = "GPL" __version__ = "1.9.1-dev" __maintainer__ = "William Walters" __email__ = "[email protected]" from os.path import basename from skbio.parse.sequences import parse_fasta from qiime.parse import parse_qual_score def parse_fasta_file(fasta_lines): """ Parses fasta file, generates dict of label:seq, list of seqs order fasta_lines: list of lines from fasta file. """ fasta_seqs = {} seq_order = [] for label, seq in parse_fasta(fasta_lines): fasta_seqs[label.split()[0].strip()] = seq seq_order.append(label) return fasta_seqs, seq_order def verify_equivalency(fasta_seqs, qual_scores): """ Tests for equivalent labels, base positions between fasta and qual file fasta_seqs: dict of label:seq from fasta file qual_scores: dict of label: qual scores """ if len(fasta_seqs) != len(qual_scores): raise ValueError('Number of sequences not equal in input fasta ' + 'and qual file.') qual_scores_labels = set(qual_scores.keys()) for label in fasta_seqs.keys(): # Should have equivalent labels if label not in qual_scores_labels: raise ValueError('Fasta label %s not found in quality score ' % label + 'file.') # should have equivalent lengths if len(fasta_seqs[label]) != len(qual_scores[label]): raise ValueError('Sequence %s does not have equivalent ' % label + 'base positions between fasta and quality score file.') def truncate_seqs(fasta_seqs, qual_scores, base_pos): """ Truncates sequences to base position specified with base_pos fasta_seqs: dict of seq label: seq string qual_scores: dict of seq label: numpy array of int scores base_pos: index in sequence to truncate at """ trunc_fasta_seqs = {} trunc_qual_scores = {} for seq in fasta_seqs: trunc_fasta_seqs[seq] = fasta_seqs[seq][:base_pos] trunc_qual_scores[seq] = qual_scores[seq][:base_pos] return trunc_fasta_seqs, trunc_qual_scores def get_output_filepaths(output_dir, fasta_fp, qual_fp): """ Returns output filepaths for filtered fasta and quality files output_dir: output directory fasta_fp: input fasta filepath qual_fp: input quality scores filepath """ if not output_dir.endswith('/'): output_dir += '/' fasta_out_fp = output_dir + basename(fasta_fp).split('.')[0] +\ "_filtered.fasta" qual_out_fp = output_dir + basename(qual_fp).split('.')[0] +\ "_filtered.qual" return fasta_out_fp, qual_out_fp def write_trunc_fasta(trunc_fasta_seqs, fasta_out_fp, seq_order): """ Writes truncated fasta seqs in order specified with seq_order trunc_fasta_seqs: dict of fasta label: truncated sequence string fasta_out_fp: output filepath to write to seq_order: list of fasta labels in the order of the original input fasta """ fasta_out = open(fasta_out_fp, "w") for label in seq_order: trunc_label = label.split()[0].strip() fasta_out.write(">%s\n%s\n" % (label, trunc_fasta_seqs[trunc_label])) def write_trunc_qual(trunc_qual_scores, qual_out_fp, seq_order): """ Writes truncated quality score files out in proper format trunc_qual_scores: dict of seq label: numpy array of scores as ints qual_out_fp: output filepath to write truncated quality scores to seq_order: List of full fasta labels to write to output filepath and maintain the same order as input quality file. """ qual_line_size = 60 qual_out = open(qual_out_fp, "w") for label in seq_order: trunc_label = label.split()[0].strip() current_trunc_qual_scores = trunc_qual_scores[trunc_label] qual_out.write(">%s\n" % label) current_qual_scores_lines = [] # Quality score format is a string of 60 base calls, followed by a # newline, until the last N bases are written for slice in range(0, len(trunc_qual_scores[trunc_label]), qual_line_size): # current_segment = map(str, # current_trunc_qual_scores[slice:slice + qual_line_size]) current_segment = current_trunc_qual_scores[ slice:slice + qual_line_size] current_qual_scores_lines.append(" ".join(current_segment)) qual_out.write('\n'.join(current_qual_scores_lines)) qual_out.write('\n') def truncate_fasta_qual(fasta_fp, qual_fp, output_dir, base_pos): """ Main program function for generating quality score histogram fasta_fp: fasta filepath qual_fp: quality score filepath output_dir: output directory base_pos: Nucleotide position to truncate the fasta and quality score at. """ qual_lines = open(qual_fp, "U") fasta_lines = open(fasta_fp, "U") qual_scores = parse_qual_score(qual_lines, value_cast_f=str) # Get dict of fasta label:seq, and the sequence order (so output can # be in the same order as the input sequences. fasta_seqs, seq_order = parse_fasta_file(fasta_lines) # Make sure the quality scores and fasta sequences have corresponding # labels and base numbers verify_equivalency(fasta_seqs, qual_scores) # Truncate seqs to base_pos index trunc_fasta_seqs, trunc_qual_scores = truncate_seqs(fasta_seqs, qual_scores, base_pos) # Get output filepaths fasta_out_fp, qual_out_fp = get_output_filepaths(output_dir, fasta_fp, qual_fp) # Write truncated sequences out write_trunc_fasta(trunc_fasta_seqs, fasta_out_fp, seq_order) write_trunc_qual(trunc_qual_scores, qual_out_fp, seq_order)
ssorgatem/qiime
qiime/truncate_fasta_qual_files.py
Python
gpl-2.0
6,323
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import shutil import tempfile from telemetry import decorators from telemetry.testing import options_for_unittests from telemetry.testing import page_test_test_case from measurements import skpicture_printer class SkpicturePrinterUnitTest(page_test_test_case.PageTestTestCase): def setUp(self): self._options = options_for_unittests.GetCopy() self._skp_outdir = tempfile.mkdtemp('_skp_test') def tearDown(self): shutil.rmtree(self._skp_outdir) @decorators.Disabled('android') def testSkpicturePrinter(self): ps = self.CreateStorySetFromFileInUnittestDataDir('blank.html') measurement = skpicture_printer.SkpicturePrinter(self._skp_outdir) results = self.RunMeasurement(measurement, ps, options=self._options) # Picture printing is not supported on all platforms. if results.failures: assert 'not supported' in results.failures[0].exc_info[1].message return saved_picture_count = results.FindAllPageSpecificValuesNamed( 'saved_picture_count') self.assertEquals(len(saved_picture_count), 1) self.assertGreater(saved_picture_count[0].GetRepresentativeNumber(), 0)
axinging/chromium-crosswalk
tools/perf/measurements/skpicture_printer_unittest.py
Python
bsd-3-clause
1,305
from __future__ import print_function import soco """ Prints the name of each discovered player in the network. """ for zone in soco.discover(): print(zone.player_name)
dundeemt/SoCo
examples/commandline/discover.py
Python
mit
175
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import itertools import os import re from importlib import import_module from django.apps import apps from django.conf import settings from django.contrib.admin.models import LogEntry from django.contrib.auth import REDIRECT_FIELD_NAME, SESSION_KEY from django.contrib.auth.forms import ( AuthenticationForm, PasswordChangeForm, SetPasswordForm, ) from django.contrib.auth.models import User from django.contrib.auth.tests.custom_user import CustomUser from django.contrib.auth.views import login as login_view, redirect_to_login from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sites.requests import RequestSite from django.core import mail from django.core.urlresolvers import NoReverseMatch, reverse, reverse_lazy from django.db import connection from django.http import HttpRequest, QueryDict from django.middleware.csrf import CsrfViewMiddleware, get_token from django.test import ( TestCase, ignore_warnings, modify_settings, override_settings, ) from django.test.utils import patch_logger from django.utils.deprecation import RemovedInDjango110Warning from django.utils.encoding import force_text from django.utils.http import urlquote from django.utils.six.moves.urllib.parse import ParseResult, urlparse from django.utils.translation import LANGUAGE_SESSION_KEY from .models import UUIDUser from .settings import AUTH_TEMPLATES @override_settings( LANGUAGES=[ ('en', 'English'), ], LANGUAGE_CODE='en', TEMPLATES=AUTH_TEMPLATES, USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='auth_tests.urls', ) class AuthViewsTestCase(TestCase): """ Helper base class for all the follow test cases. """ @classmethod def setUpTestData(cls): cls.u1 = User.objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient', first_name='Test', last_name='Client', email='[email protected]', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u2 = User.objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive', first_name='Inactive', last_name='User', email='[email protected]', is_staff=False, is_active=False, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u3 = User.objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff', first_name='Staff', last_name='Member', email='[email protected]', is_staff=True, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u4 = User.objects.create( password='', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='empty_password', first_name='Empty', last_name='Password', email='[email protected]', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u5 = User.objects.create( password='$', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='unmanageable_password', first_name='Unmanageable', last_name='Password', email='[email protected]', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u6 = User.objects.create( password='foo$bar', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='unknown_password', first_name='Unknown', last_name='Password', email='[email protected]', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) def login(self, username='testclient', password='password'): response = self.client.post('/login/', { 'username': username, 'password': password, }) self.assertIn(SESSION_KEY, self.client.session) return response def logout(self): response = self.client.get('/admin/logout/') self.assertEqual(response.status_code, 200) self.assertNotIn(SESSION_KEY, self.client.session) def assertFormError(self, response, error): """Assert that error is found in response.context['form'] errors""" form_errors = list(itertools.chain(*response.context['form'].errors.values())) self.assertIn(force_text(error), form_errors) def assertURLEqual(self, url, expected, parse_qs=False): """ Given two URLs, make sure all their components (the ones given by urlparse) are equal, only comparing components that are present in both URLs. If `parse_qs` is True, then the querystrings are parsed with QueryDict. This is useful if you don't want the order of parameters to matter. Otherwise, the query strings are compared as-is. """ fields = ParseResult._fields for attr, x, y in zip(fields, urlparse(url), urlparse(expected)): if parse_qs and attr == 'query': x, y = QueryDict(x), QueryDict(y) if x and y and x != y: self.fail("%r != %r (%s doesn't match)" % (url, expected, attr)) @override_settings(ROOT_URLCONF='django.contrib.auth.urls') class AuthViewNamedURLTests(AuthViewsTestCase): def test_named_urls(self): "Named URLs should be reversible" expected_named_urls = [ ('login', [], {}), ('logout', [], {}), ('password_change', [], {}), ('password_change_done', [], {}), ('password_reset', [], {}), ('password_reset_done', [], {}), ('password_reset_confirm', [], { 'uidb64': 'aaaaaaa', 'token': '1111-aaaaa', }), ('password_reset_complete', [], {}), ] for name, args, kwargs in expected_named_urls: try: reverse(name, args=args, kwargs=kwargs) except NoReverseMatch: self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name) class PasswordResetTest(AuthViewsTestCase): def test_email_not_found(self): """If the provided email is not registered, don't raise any error but also don't send any email.""" response = self.client.get('/password_reset/') self.assertEqual(response.status_code, 200) response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 0) def test_email_found(self): "Email is sent if a valid email address is provided for password reset" response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn("http://", mail.outbox[0].body) self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email) # optional multipart text/html email has been added. Make sure original, # default functionality is 100% the same self.assertFalse(mail.outbox[0].message().is_multipart()) def test_extra_email_context(self): """ extra_email_context should be available in the email template context. """ response = self.client.post( '/password_reset_extra_email_context/', {'email': '[email protected]'}, ) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn('Email email context: "Hello!"', mail.outbox[0].body) def test_html_mail_template(self): """ A multipart email with text/plain and text/html is sent if the html_email_template parameter is passed to the view """ response = self.client.post('/password_reset/html_email_template/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0].message() self.assertEqual(len(message.get_payload()), 2) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') self.assertNotIn('<html>', message.get_payload(0).get_payload()) self.assertIn('<html>', message.get_payload(1).get_payload()) def test_email_found_custom_from(self): "Email is sent if a valid email address is provided for password reset when a custom from_email is provided." response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertEqual("[email protected]", mail.outbox[0].from_email) @ignore_warnings(category=RemovedInDjango110Warning) @override_settings(ALLOWED_HOSTS=['adminsite.com']) def test_admin_reset(self): "If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override." response = self.client.post('/admin_password_reset/', {'email': '[email protected]'}, HTTP_HOST='adminsite.com' ) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn("http://adminsite.com", mail.outbox[0].body) self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email) # Skip any 500 handler action (like sending more mail...) @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host(self): "Poisoned HTTP_HOST headers can't be used for reset emails" # This attack is based on the way browsers handle URLs. The colon # should be used to separate the port, but if the URL contains an @, # the colon is interpreted as part of a username for login purposes, # making 'evil.com' the request domain. Since HTTP_HOST is used to # produce a meaningful reset URL, we need to be certain that the # HTTP_HOST header isn't poisoned. This is done as a check when get_host() # is invoked, but we check here as a practical consequence. with patch_logger('django.security.DisallowedHost', 'error') as logger_calls: response = self.client.post( '/password_reset/', {'email': '[email protected]'}, HTTP_HOST='www.example:[email protected]' ) self.assertEqual(response.status_code, 400) self.assertEqual(len(mail.outbox), 0) self.assertEqual(len(logger_calls), 1) # Skip any 500 handler action (like sending more mail...) @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host_admin_site(self): "Poisoned HTTP_HOST headers can't be used for reset emails on admin views" with patch_logger('django.security.DisallowedHost', 'error') as logger_calls: response = self.client.post( '/admin_password_reset/', {'email': '[email protected]'}, HTTP_HOST='www.example:[email protected]' ) self.assertEqual(response.status_code, 400) self.assertEqual(len(mail.outbox), 0) self.assertEqual(len(logger_calls), 1) def _test_confirm_start(self): # Start by creating the email self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(len(mail.outbox), 1) return self._read_signup_email(mail.outbox[0]) def _read_signup_email(self, email): urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body) self.assertIsNotNone(urlmatch, "No URL found in sent email") return urlmatch.group(), urlmatch.groups()[0] def test_confirm_valid(self): url, path = self._test_confirm_start() response = self.client.get(path) # redirect to a 'complete' page: self.assertContains(response, "Please enter your new password") def test_confirm_invalid(self): url, path = self._test_confirm_start() # Let's munge the token in the path, but keep the same length, # in case the URLconf will reject a different length. path = path[:-5] + ("0" * 4) + path[-1] response = self.client.get(path) self.assertContains(response, "The password reset link was invalid") def test_confirm_invalid_user(self): # Ensure that we get a 200 response for a non-existent user, not a 404 response = self.client.get('/reset/123456/1-1/') self.assertContains(response, "The password reset link was invalid") def test_confirm_overflow_user(self): # Ensure that we get a 200 response for a base36 user id that overflows int response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/') self.assertContains(response, "The password reset link was invalid") def test_confirm_invalid_post(self): # Same as test_confirm_invalid, but trying # to do a POST instead. url, path = self._test_confirm_start() path = path[:-5] + ("0" * 4) + path[-1] self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': ' anewpassword', }) # Check the password has not been changed u = User.objects.get(email='[email protected]') self.assertTrue(not u.check_password("anewpassword")) def test_confirm_complete(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) # Check the password has been changed u = User.objects.get(email='[email protected]') self.assertTrue(u.check_password("anewpassword")) # Check we can't use the link again response = self.client.get(path) self.assertContains(response, "The password reset link was invalid") def test_confirm_different_passwords(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'x'}) self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch']) def test_reset_redirect_default(self): response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_reset/done/') def test_reset_custom_redirect(self): response = self.client.post('/password_reset/custom_redirect/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/custom/') def test_reset_custom_redirect_named(self): response = self.client.post('/password_reset/custom_redirect/named/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_reset/') def test_confirm_redirect_default(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/reset/done/') def test_confirm_redirect_custom(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/custom/') def test_confirm_redirect_custom_named(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/named/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_reset/') def test_confirm_display_user_from_form(self): url, path = self._test_confirm_start() response = self.client.get(path) # #16919 -- The ``password_reset_confirm`` view should pass the user # object to the ``SetPasswordForm``, even on GET requests. # For this test, we render ``{{ form.user }}`` in the template # ``registration/password_reset_confirm.html`` so that we can test this. username = User.objects.get(email='[email protected]').username self.assertContains(response, "Hello, %s." % username) # However, the view should NOT pass any user object on a form if the # password reset link was invalid. response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/') self.assertContains(response, "Hello, .") @override_settings(AUTH_USER_MODEL='auth.CustomUser') class CustomUserPasswordResetTest(AuthViewsTestCase): user_email = '[email protected]' @classmethod def setUpTestData(cls): cls.u1 = CustomUser.custom_objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), email='[email protected]', is_active=True, is_admin=False, date_of_birth=datetime.date(1976, 11, 8) ) def _test_confirm_start(self): # Start by creating the email response = self.client.post('/password_reset/', {'email': self.user_email}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) return self._read_signup_email(mail.outbox[0]) def _read_signup_email(self, email): urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body) self.assertIsNotNone(urlmatch, "No URL found in sent email") return urlmatch.group(), urlmatch.groups()[0] def test_confirm_valid_custom_user(self): url, path = self._test_confirm_start() response = self.client.get(path) # redirect to a 'complete' page: self.assertContains(response, "Please enter your new password") # then submit a new password response = self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': 'anewpassword', }) self.assertRedirects(response, '/reset/done/') @override_settings(AUTH_USER_MODEL='auth.UUIDUser') class UUIDUserPasswordResetTest(CustomUserPasswordResetTest): def _test_confirm_start(self): # instead of fixture UUIDUser.objects.create_user( email=self.user_email, username='foo', password='foo', ) return super(UUIDUserPasswordResetTest, self)._test_confirm_start() class ChangePasswordTest(AuthViewsTestCase): def fail_login(self, password='password'): response = self.client.post('/login/', { 'username': 'testclient', 'password': password, }) self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name }) def logout(self): self.client.get('/logout/') def test_password_change_fails_with_invalid_old_password(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'donuts', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect']) def test_password_change_fails_with_mismatched_passwords(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'donuts', }) self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch']) def test_password_change_succeeds(self): self.login() self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.fail_login() self.login(password='password1') def test_password_change_done_succeeds(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_change/done/') @override_settings(LOGIN_URL='/login/') def test_password_change_done_fails(self): response = self.client.get('/password_change/done/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/login/?next=/password_change/done/') def test_password_change_redirect_default(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_change/done/') def test_password_change_redirect_custom(self): self.login() response = self.client.post('/password_change/custom/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/custom/') def test_password_change_redirect_custom_named(self): self.login() response = self.client.post('/password_change/custom/named/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_reset/') @modify_settings(MIDDLEWARE_CLASSES={ 'append': 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', }) class SessionAuthenticationTests(AuthViewsTestCase): def test_user_password_change_updates_session(self): """ #21649 - Ensure contrib.auth.views.password_change updates the user's session auth hash after a password change so the session isn't logged out. """ self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) # if the hash isn't updated, retrieving the redirection page will fail. self.assertRedirects(response, '/password_change/done/') class LoginTest(AuthViewsTestCase): def test_current_site_in_context_after_login(self): response = self.client.get(reverse('login')) self.assertEqual(response.status_code, 200) if apps.is_installed('django.contrib.sites'): Site = apps.get_model('sites.Site') site = Site.objects.get_current() self.assertEqual(response.context['site'], site) self.assertEqual(response.context['site_name'], site.name) else: self.assertIsInstance(response.context['site'], RequestSite) self.assertIsInstance(response.context['form'], AuthenticationForm) def test_security_check(self, password='password'): login_url = reverse('login') # Those URLs should not pass the security check for bad_url in ('http://example.com', 'http:///example.com', 'https://example.com', 'ftp://exampel.com', '///example.com', '//example.com', 'javascript:alert("XSS")'): nasty_url = '%(url)s?%(next)s=%(bad_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'bad_url': urlquote(bad_url), } response = self.client.post(nasty_url, { 'username': 'testclient', 'password': password, }) self.assertEqual(response.status_code, 302) self.assertNotIn(bad_url, response.url, "%s should be blocked" % bad_url) # These URLs *should* still pass the security check for good_url in ('/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://exampel.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/'): # see ticket #12534 safe_url = '%(url)s?%(next)s=%(good_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'good_url': urlquote(good_url), } response = self.client.post(safe_url, { 'username': 'testclient', 'password': password, }) self.assertEqual(response.status_code, 302) self.assertIn(good_url, response.url, "%s should be allowed" % good_url) def test_login_form_contains_request(self): # 15198 self.client.post('/custom_requestauth_login/', { 'username': 'testclient', 'password': 'password', }, follow=True) # the custom authentication form used by this login asserts # that a request is passed to the form successfully. def test_login_csrf_rotate(self, password='password'): """ Makes sure that a login rotates the currently-used CSRF token. """ # Do a GET to establish a CSRF token # TestClient isn't used here as we're testing middleware, essentially. req = HttpRequest() CsrfViewMiddleware().process_view(req, login_view, (), {}) # get_token() triggers CSRF token inclusion in the response get_token(req) resp = login_view(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None) token1 = csrf_cookie.coded_value # Prepare the POST request req = HttpRequest() req.COOKIES[settings.CSRF_COOKIE_NAME] = token1 req.method = "POST" req.POST = {'username': 'testclient', 'password': password, 'csrfmiddlewaretoken': token1} # Use POST request to log in SessionMiddleware().process_request(req) CsrfViewMiddleware().process_view(req, login_view, (), {}) req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view req.META["SERVER_PORT"] = 80 resp = login_view(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None) token2 = csrf_cookie.coded_value # Check the CSRF token switched self.assertNotEqual(token1, token2) def test_session_key_flushed_on_login(self): """ To avoid reusing another user's session, ensure a new, empty session is created if the existing session corresponds to a different authenticated user. """ self.login() original_session_key = self.client.session.session_key self.login(username='staff') self.assertNotEqual(original_session_key, self.client.session.session_key) def test_session_key_flushed_on_login_after_password_change(self): """ As above, but same user logging in after a password change. """ self.login() original_session_key = self.client.session.session_key # If no password change, session key should not be flushed. self.login() self.assertEqual(original_session_key, self.client.session.session_key) user = User.objects.get(username='testclient') user.set_password('foobar') user.save() self.login(password='foobar') self.assertNotEqual(original_session_key, self.client.session.session_key) def test_login_session_without_hash_session_key(self): """ Session without django.contrib.auth.HASH_SESSION_KEY should login without an exception. """ user = User.objects.get(username='testclient') engine = import_module(settings.SESSION_ENGINE) session = engine.SessionStore() session[SESSION_KEY] = user.id session.save() original_session_key = session.session_key self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key self.login() self.assertNotEqual(original_session_key, self.client.session.session_key) class LoginURLSettings(AuthViewsTestCase): """Tests for settings.LOGIN_URL.""" def assertLoginURLEquals(self, url, parse_qs=False): response = self.client.get('/login_required/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, url, parse_qs=parse_qs) @override_settings(LOGIN_URL='/login/') def test_standard_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') @override_settings(LOGIN_URL='login') def test_named_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') @override_settings(LOGIN_URL='http://remote.example.com/login') def test_remote_login_url(self): quoted_next = urlquote('http://testserver/login_required/') expected = 'http://remote.example.com/login?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL='https:///login/') def test_https_login_url(self): quoted_next = urlquote('http://testserver/login_required/') expected = 'https:///login/?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL='/login/?pretty=1') def test_login_url_with_querystring(self): self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/', parse_qs=True) @override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/') def test_remote_login_url_with_next_querystring(self): quoted_next = urlquote('http://testserver/login_required/') expected = 'http://remote.example.com/login/?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL=reverse_lazy('login')) def test_lazy_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') class LoginRedirectUrlTest(AuthViewsTestCase): """Tests for settings.LOGIN_REDIRECT_URL.""" def assertLoginRedirectURLEqual(self, url): response = self.login() self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, url) def test_default(self): self.assertLoginRedirectURLEqual('/accounts/profile/') @override_settings(LOGIN_REDIRECT_URL='/custom/') def test_custom(self): self.assertLoginRedirectURLEqual('/custom/') @override_settings(LOGIN_REDIRECT_URL='password_reset') def test_named(self): self.assertLoginRedirectURLEqual('/password_reset/') @override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/') def test_remote(self): self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/') class RedirectToLoginTests(AuthViewsTestCase): """Tests for the redirect_to_login view""" @override_settings(LOGIN_URL=reverse_lazy('login')) def test_redirect_to_login_with_lazy(self): login_redirect_response = redirect_to_login(next='/else/where/') expected = '/login/?next=/else/where/' self.assertEqual(expected, login_redirect_response.url) @override_settings(LOGIN_URL=reverse_lazy('login')) def test_redirect_to_login_with_lazy_and_unicode(self): login_redirect_response = redirect_to_login(next='/else/where/झ/') expected = '/login/?next=/else/where/%E0%A4%9D/' self.assertEqual(expected, login_redirect_response.url) class LogoutTest(AuthViewsTestCase): def confirm_logged_out(self): self.assertNotIn(SESSION_KEY, self.client.session) def test_logout_default(self): "Logout without next_page option renders the default template" self.login() response = self.client.get('/logout/') self.assertContains(response, 'Logged out') self.confirm_logged_out() def test_14377(self): # Bug 14377 self.login() response = self.client.get('/logout/') self.assertIn('site', response.context) def test_logout_with_overridden_redirect_url(self): # Bug 11223 self.login() response = self.client.get('/logout/next_page/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/somewhere/') response = self.client.get('/logout/next_page/?next=/login/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/login/') self.confirm_logged_out() def test_logout_with_next_page_specified(self): "Logout with next_page option given redirects to specified resource" self.login() response = self.client.get('/logout/next_page/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/somewhere/') self.confirm_logged_out() def test_logout_with_redirect_argument(self): "Logout with query string redirects to specified resource" self.login() response = self.client.get('/logout/?next=/login/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/login/') self.confirm_logged_out() def test_logout_with_custom_redirect_argument(self): "Logout with custom query string redirects to specified resource" self.login() response = self.client.get('/logout/custom_query/?follow=/somewhere/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/somewhere/') self.confirm_logged_out() def test_logout_with_named_redirect(self): "Logout resolves names or URLs passed as next_page." self.login() response = self.client.get('/logout/next_page/named/') self.assertEqual(response.status_code, 302) self.assertURLEqual(response.url, '/password_reset/') self.confirm_logged_out() def test_security_check(self, password='password'): logout_url = reverse('logout') # Those URLs should not pass the security check for bad_url in ('http://example.com', 'http:///example.com', 'https://example.com', 'ftp://exampel.com', '///example.com', '//example.com', 'javascript:alert("XSS")'): nasty_url = '%(url)s?%(next)s=%(bad_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'bad_url': urlquote(bad_url), } self.login() response = self.client.get(nasty_url) self.assertEqual(response.status_code, 302) self.assertNotIn(bad_url, response.url, "%s should be blocked" % bad_url) self.confirm_logged_out() # These URLs *should* still pass the security check for good_url in ('/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://exampel.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/'): # see ticket #12534 safe_url = '%(url)s?%(next)s=%(good_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'good_url': urlquote(good_url), } self.login() response = self.client.get(safe_url) self.assertEqual(response.status_code, 302) self.assertIn(good_url, response.url, "%s should be allowed" % good_url) self.confirm_logged_out() def test_logout_preserve_language(self): """Check that language stored in session is preserved after logout""" # Create a new session with language engine = import_module(settings.SESSION_ENGINE) session = engine.SessionStore() session[LANGUAGE_SESSION_KEY] = 'pl' session.save() self.client.cookies[settings.SESSION_COOKIE_NAME] = session.session_key self.client.get('/logout/') self.assertEqual(self.client.session[LANGUAGE_SESSION_KEY], 'pl') # Redirect in test_user_change_password will fail if session auth hash # isn't updated after password change (#21649) @modify_settings(MIDDLEWARE_CLASSES={ 'append': 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', }) @override_settings( PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='auth_tests.urls_admin', ) class ChangelistTests(AuthViewsTestCase): def setUp(self): # Make me a superuser before logging in. User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True) self.login() self.admin = User.objects.get(pk=self.u1.pk) def get_user_data(self, user): return { 'username': user.username, 'password': user.password, 'email': user.email, 'is_active': user.is_active, 'is_staff': user.is_staff, 'is_superuser': user.is_superuser, 'last_login_0': user.last_login.strftime('%Y-%m-%d'), 'last_login_1': user.last_login.strftime('%H:%M:%S'), 'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'), 'initial-last_login_1': user.last_login.strftime('%H:%M:%S'), 'date_joined_0': user.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': user.date_joined.strftime('%H:%M:%S'), 'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'), 'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'), 'first_name': user.first_name, 'last_name': user.last_name, } # #20078 - users shouldn't be allowed to guess password hashes via # repeated password__startswith queries. def test_changelist_disallows_password_lookups(self): # A lookup that tries to filter on password isn't OK with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls: response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$') self.assertEqual(response.status_code, 400) self.assertEqual(len(logger_calls), 1) def test_user_change_email(self): data = self.get_user_data(self.admin) data['email'] = 'new_' + data['email'] response = self.client.post( reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)), data ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) row = LogEntry.objects.latest('id') self.assertEqual(row.change_message, 'Changed email.') def test_user_not_change(self): response = self.client.post( reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)), self.get_user_data(self.admin) ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) row = LogEntry.objects.latest('id') self.assertEqual(row.change_message, 'No fields changed.') def test_user_change_password(self): user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)) password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,)) response = self.client.get(user_change_url) # Test the link inside password field help_text. rel_link = re.search( r'you can change the password using <a href="([^"]*)">this form</a>', force_text(response.content) ).groups()[0] self.assertEqual( os.path.normpath(user_change_url + rel_link), os.path.normpath(password_change_url) ) response = self.client.post( password_change_url, { 'password1': 'password1', 'password2': 'password1', } ) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.change_message, 'Changed password.') self.logout() self.login(password='password1') def test_user_change_different_user_password(self): u = User.objects.get(email='[email protected]') response = self.client.post( reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)), { 'password1': 'password1', 'password2': 'password1', } ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,))) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, self.admin.pk) self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.change_message, 'Changed password.') def test_password_change_bad_url(self): response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',))) self.assertEqual(response.status_code, 404) @override_settings( AUTH_USER_MODEL='auth.UUIDUser', ROOT_URLCONF='auth_tests.urls_custom_user_admin', ) class UUIDUserTests(TestCase): def test_admin_password_change(self): u = UUIDUser.objects.create_superuser(username='uuid', email='[email protected]', password='test') self.assertTrue(self.client.login(username='uuid', password='test')) user_change_url = reverse('custom_user_admin:auth_uuiduser_change', args=(u.pk,)) response = self.client.get(user_change_url) self.assertEqual(response.status_code, 200) password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,)) response = self.client.get(password_change_url) self.assertEqual(response.status_code, 200) # A LogEntry is created with pk=1 which breaks a FK constraint on MySQL with connection.constraint_checks_disabled(): response = self.client.post(password_change_url, { 'password1': 'password1', 'password2': 'password1', }) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, 1) # harcoded in CustomUserAdmin.log_change() self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.change_message, 'Changed password.')
bikong2/django
tests/auth_tests/test_views.py
Python
bsd-3-clause
45,028
# -*- coding: utf-8 -*- import os from operator import itemgetter from gluon.storage import Storage from gluon.dal import DAL, Field, Row DBHOST = os.environ.get('DBHOST', 'localhost') DATABASE_URI = 'mysql://benchmarkdbuser:benchmarkdbpass@%s:3306/hello_world' % DBHOST class Dal(object): def __init__(self, table=None, pool_size=8): self.db = DAL(DATABASE_URI, migrate_enabled=False, pool_size=pool_size) if table == 'World': self.db.define_table('World', Field('randomNumber', 'integer')) elif table == 'Fortune': self.db.define_table('Fortune', Field('message')) def get_world(self, wid): # Setting `cacheable=True` improves performance by foregoing the creation # of some non-essential attributes. It does *not* actually cache the # database results (it simply produces a Rows object that *could be* cached). return self.db(self.db.World.id == wid).select(cacheable=True)[0].as_dict() def update_world(self, wid, randomNumber): self.db(self.db.World.id == wid).update(randomNumber=randomNumber) def get_fortunes(self, new_message): fortunes = self.db(self.db.Fortune).select(cacheable=True) fortunes.records.append(Row(new_message)) return fortunes.sort(itemgetter('message')) class RawDal(Dal): def __init__(self): super(RawDal, self).__init__() self.world_updates = [] def get_world(self, wid): return self.db.executesql('SELECT * FROM World WHERE id = %s', placeholders=[wid], as_dict=True)[0] def update_world(self, wid, randomNumber): self.world_updates.extend([randomNumber, wid]) def flush_world_updates(self): query = ';'.join('UPDATE World SET randomNumber=%s WHERE id=%s' for _ in xrange(len(self.world_updates) / 2)) self.db.executesql(query, placeholders=self.world_updates) def get_fortunes(self, new_message): fortunes = self.db.executesql('SELECT * FROM Fortune', as_dict=True) fortunes.append(new_message) return sorted(fortunes, key=itemgetter('message')) def num_queries(queries): try: num = int(queries) return 1 if num < 1 else 500 if num > 500 else num except ValueError: return 1
ashawnbandy-te-tfb/FrameworkBenchmarks
frameworks/Python/web2py/app/standard/modules/database.py
Python
bsd-3-clause
2,330
# -*- coding: utf-8 -*- from __future__ import (unicode_literals, division, absolute_import, print_function) store_version = 1 # Needed for dynamic plugin loading __license__ = 'GPL 3' __copyright__ = '2011, John Schember <[email protected]>' __docformat__ = 'restructuredtext en' from calibre.gui2.store.basic_config import BasicStoreConfig from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore from calibre.gui2.store.search_result import SearchResult class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore): open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml' web_url = 'http://www.archive.org/details/texts' # http://bookserver.archive.org/catalog/ def search(self, query, max_results=10, timeout=60): for s in OpenSearchOPDSStore.search(self, query, max_results, timeout): s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1] s.price = '$0.00' s.drm = SearchResult.DRM_UNLOCKED yield s def get_details(self, search_result, timeout): ''' The opensearch feed only returns a subset of formats that are available. We want to get a list of all formats that the user can get. ''' from calibre import browser from contextlib import closing from lxml import html br = browser() with closing(br.open(search_result.detail_item, timeout=timeout)) as nf: idata = html.fromstring(nf.read()) formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()')) search_result.formats = formats.upper() return True
drxaero/calibre
src/calibre/gui2/store/stores/archive_org_plugin.py
Python
gpl-3.0
1,689
from fabric.api import * import fabric.contrib.project as project import os import shutil import sys import SocketServer from pelican.server import ComplexHTTPRequestHandler # Local path configuration (can be absolute or relative to fabfile) env.deploy_path = 'output' DEPLOY_PATH = env.deploy_path # Remote server configuration production = 'root@localhost:22' dest_path = '/var/www' # Rackspace Cloud Files configuration settings env.cloudfiles_username = 'my_rackspace_username' env.cloudfiles_api_key = 'my_rackspace_api_key' env.cloudfiles_container = 'my_cloudfiles_container' # Github Pages configuration env.github_pages_branch = "master" # Port for `serve` PORT = 8000 def clean(): """Remove generated files""" if os.path.isdir(DEPLOY_PATH): shutil.rmtree(DEPLOY_PATH) os.makedirs(DEPLOY_PATH) def build(): """Build local version of site""" local('pelican -s pelicanconf.py') def rebuild(): """`build` with the delete switch""" local('pelican -d -s pelicanconf.py') def regenerate(): """Automatically regenerate site upon file modification""" local('pelican -r -s pelicanconf.py') def serve(): """Serve site at http://localhost:8000/""" os.chdir(env.deploy_path) class AddressReuseTCPServer(SocketServer.TCPServer): allow_reuse_address = True server = AddressReuseTCPServer(('', PORT), ComplexHTTPRequestHandler) sys.stderr.write('Serving on port {0} ...\n'.format(PORT)) server.serve_forever() def reserve(): """`build`, then `serve`""" build() serve() def preview(): """Build production version of site""" local('pelican -s publishconf.py') def cf_upload(): """Publish to Rackspace Cloud Files""" rebuild() with lcd(DEPLOY_PATH): local('swift -v -A https://auth.api.rackspacecloud.com/v1.0 ' '-U {cloudfiles_username} ' '-K {cloudfiles_api_key} ' 'upload -c {cloudfiles_container} .'.format(**env)) @hosts(production) def publish(): """Publish to production via rsync""" local('pelican -s publishconf.py') project.rsync_project( remote_dir=dest_path, exclude=".DS_Store", local_dir=DEPLOY_PATH.rstrip('/') + '/', delete=True, extra_opts='-c', ) def gh_pages(): """Publish to GitHub Pages""" rebuild() local("ghp-import -b {github_pages_branch} {deploy_path} -p".format(**env))
oscarvarto/oscarvarto.github.io
fabfile.py
Python
apache-2.0
2,437
import os.path import sys import re import warnings import cx_Oracle from django.db import connection, models from django.db.backends.util import truncate_name from django.core.management.color import no_style from django.db.models.fields import NOT_PROVIDED from django.db.utils import DatabaseError # In revision r16016 function get_sequence_name has been transformed into # method of DatabaseOperations class. To make code backward-compatible we # need to handle both situations. try: from django.db.backends.oracle.base import get_sequence_name\ as original_get_sequence_name except ImportError: original_get_sequence_name = None from south.db import generic warnings.warn("! WARNING: South's Oracle support is still alpha. " "Be wary of possible bugs.") class DatabaseOperations(generic.DatabaseOperations): """ Oracle implementation of database operations. """ backend_name = 'oracle' alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;' alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;' add_column_string = 'ALTER TABLE %s ADD %s;' delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;' add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s' allows_combined_alters = False has_booleans = False constraints_dict = { 'P': 'PRIMARY KEY', 'U': 'UNIQUE', 'C': 'CHECK', 'R': 'FOREIGN KEY' } def get_sequence_name(self, table_name): if original_get_sequence_name is None: return self._get_connection().ops._get_sequence_name(table_name) else: return original_get_sequence_name(table_name) #TODO: This will cause very obscure bugs if anyone uses a column name or string value # that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it) # e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL' def adj_column_sql(self, col): # Syntax fixes -- Oracle is picky about clause order col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)', lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)', lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT return col def check_meta(self, table_name): return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django def normalize_name(self, name): """ Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes. """ nn = self.quote_name(name) if nn[0] == '"' and nn[-1] == '"': nn = nn[1:-1] return nn @generic.invalidate_table_constraints def create_table(self, table_name, fields): qn = self.quote_name(table_name) columns = [] autoinc_sql = '' for field_name, field in fields: col = self.column_sql(table_name, field_name, field) if not col: continue col = self.adj_column_sql(col) columns.append(col) if isinstance(field, models.AutoField): autoinc_sql = connection.ops.autoinc_sql(table_name, field_name) sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns])) self.execute(sql) if autoinc_sql: self.execute(autoinc_sql[0]) self.execute(autoinc_sql[1]) @generic.invalidate_table_constraints def delete_table(self, table_name, cascade=True): qn = self.quote_name(table_name) # Note: PURGE is not valid syntax for Oracle 9i (it was added in 10) if cascade: self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn) else: self.execute('DROP TABLE %s;' % qn) # If the table has an AutoField a sequence was created. sequence_sql = """ DECLARE i INTEGER; BEGIN SELECT COUNT(*) INTO i FROM USER_CATALOG WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; IF i = 1 THEN EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"'; END IF; END; /""" % {'sq_name': self.get_sequence_name(table_name)} self.execute(sequence_sql) @generic.invalidate_table_constraints def alter_column(self, table_name, name, field, explicit_name=True): if self.dry_run: if self.debug: print ' - no dry run output for alter_column() due to dynamic DDL, sorry' return qn = self.quote_name(table_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() field = self._field_sanity(field) # Add _id or whatever if we need to field.set_attributes_from_name(name) if not explicit_name: name = field.column qn_col = self.quote_name(name) # First, change the type # This will actually also add any CHECK constraints needed, # since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))' params = { 'table_name':qn, 'column': qn_col, 'type': self._db_type_for_alter_column(field), 'nullity': 'NOT NULL', 'default': 'NULL' } if field.null: params['nullity'] = 'NULL' if not field.null and field.has_default(): params['default'] = self._default_value_workaround(field.get_default()) sql_templates = [ (self.alter_string_set_type, params), (self.alter_string_set_default, params.copy()), ] # drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements # generated above, since those statements recreate the constraints we delete here. check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") for constraint in check_constraints: self.execute(self.delete_check_sql % { 'table': self.quote_name(table_name), 'constraint': self.quote_name(constraint), }) for sql_template, params in sql_templates: try: self.execute(sql_template % params) except DatabaseError, exc: description = str(exc) # Oracle complains if a column is already NULL/NOT NULL if 'ORA-01442' in description or 'ORA-01451' in description: # so we just drop NULL/NOT NULL part from target sql and retry params['nullity'] = '' sql = sql_template % params self.execute(sql) # Oracle also has issues if we try to change a regular column # to a LOB or vice versa (also REF, object, VARRAY or nested # table, but these don't come up much in Django apps) elif 'ORA-22858' in description or 'ORA-22859' in description: self._alter_column_lob_workaround(table_name, name, field) else: raise def _alter_column_lob_workaround(self, table_name, name, field): """ Oracle refuses to change a column type from/to LOB to/from a regular column. In Django, this shows up when the field is changed from/to a TextField. What we need to do instead is: - Rename the original column - Add the desired field as new - Update the table to transfer values from old to new - Drop old column """ renamed = self._generate_temp_name(name) self.rename_column(table_name, name, renamed) self.add_column(table_name, name, field, keep_default=False) self.execute("UPDATE %s set %s=%s" % ( self.quote_name(table_name), self.quote_name(name), self.quote_name(renamed), )) self.delete_column(table_name, renamed) def _generate_temp_name(self, for_name): suffix = hex(hash(for_name)).upper()[1:] return self.normalize_name(for_name + "_" + suffix) @generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below... @generic.delete_column_constraints def rename_column(self, table_name, old, new): if old == new: # Short-circuit out return [] self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % ( self.quote_name(table_name), self.quote_name(old), self.quote_name(new), )) @generic.invalidate_table_constraints def add_column(self, table_name, name, field, keep_default=True): sql = self.column_sql(table_name, name, field) sql = self.adj_column_sql(sql) if sql: params = ( self.quote_name(table_name), sql ) sql = self.add_column_string % params self.execute(sql) # Now, drop the default if we need to if not keep_default and field.default is not None: field.default = NOT_PROVIDED self.alter_column(table_name, name, field, explicit_name=False) def delete_column(self, table_name, name): return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name) def lookup_constraint(self, db_name, table_name, column_name=None): if column_name: # Column names in the constraint cache come from the database, # make sure we use the properly shortened/uppercased version # for lookup. column_name = self.normalize_name(column_name) return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name) def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): if columns: columns = [self.normalize_name(c) for c in columns] return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type) def _field_sanity(self, field): """ This particular override stops us sending DEFAULTs for BooleanField. """ if isinstance(field, models.BooleanField) and field.has_default(): field.default = int(field.to_python(field.get_default())) return field def _default_value_workaround(self, value): from datetime import date,time,datetime if isinstance(value, (date,time,datetime)): return "'%s'" % value else: return super(DatabaseOperations, self)._default_value_workaround(value) def _fill_constraint_cache(self, db_name, table_name): self._constraint_cache.setdefault(db_name, {}) self._constraint_cache[db_name][table_name] = {} rows = self.execute(""" SELECT user_cons_columns.constraint_name, user_cons_columns.column_name, user_constraints.constraint_type FROM user_constraints JOIN user_cons_columns ON user_constraints.table_name = user_cons_columns.table_name AND user_constraints.constraint_name = user_cons_columns.constraint_name WHERE user_constraints.table_name = '%s' """ % self.normalize_name(table_name)) for constraint, column, kind in rows: self._constraint_cache[db_name][table_name].setdefault(column, set()) self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint)) return
edisonlz/fruit
web_project/base/site-packages/south/db/oracle.py
Python
apache-2.0
12,431
#!/usr/bin/env python import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # create a scene with one of each cell type # Voxel voxelPoints = vtk.vtkPoints() voxelPoints.SetNumberOfPoints(8) voxelPoints.InsertPoint(0,0,0,0) voxelPoints.InsertPoint(1,1,0,0) voxelPoints.InsertPoint(2,0,1,0) voxelPoints.InsertPoint(3,1,1,0) voxelPoints.InsertPoint(4,0,0,1) voxelPoints.InsertPoint(5,1,0,1) voxelPoints.InsertPoint(6,0,1,1) voxelPoints.InsertPoint(7,1,1,1) aVoxel = vtk.vtkVoxel() aVoxel.GetPointIds().SetId(0,0) aVoxel.GetPointIds().SetId(1,1) aVoxel.GetPointIds().SetId(2,2) aVoxel.GetPointIds().SetId(3,3) aVoxel.GetPointIds().SetId(4,4) aVoxel.GetPointIds().SetId(5,5) aVoxel.GetPointIds().SetId(6,6) aVoxel.GetPointIds().SetId(7,7) aVoxelGrid = vtk.vtkUnstructuredGrid() aVoxelGrid.Allocate(1,1) aVoxelGrid.InsertNextCell(aVoxel.GetCellType(),aVoxel.GetPointIds()) aVoxelGrid.SetPoints(voxelPoints) aVoxelMapper = vtk.vtkDataSetMapper() aVoxelMapper.SetInputData(aVoxelGrid) aVoxelActor = vtk.vtkActor() aVoxelActor.SetMapper(aVoxelMapper) aVoxelActor.GetProperty().BackfaceCullingOn() # Hexahedron hexahedronPoints = vtk.vtkPoints() hexahedronPoints.SetNumberOfPoints(8) hexahedronPoints.InsertPoint(0,0,0,0) hexahedronPoints.InsertPoint(1,1,0,0) hexahedronPoints.InsertPoint(2,1,1,0) hexahedronPoints.InsertPoint(3,0,1,0) hexahedronPoints.InsertPoint(4,0,0,1) hexahedronPoints.InsertPoint(5,1,0,1) hexahedronPoints.InsertPoint(6,1,1,1) hexahedronPoints.InsertPoint(7,0,1,1) aHexahedron = vtk.vtkHexahedron() aHexahedron.GetPointIds().SetId(0,0) aHexahedron.GetPointIds().SetId(1,1) aHexahedron.GetPointIds().SetId(2,2) aHexahedron.GetPointIds().SetId(3,3) aHexahedron.GetPointIds().SetId(4,4) aHexahedron.GetPointIds().SetId(5,5) aHexahedron.GetPointIds().SetId(6,6) aHexahedron.GetPointIds().SetId(7,7) aHexahedronGrid = vtk.vtkUnstructuredGrid() aHexahedronGrid.Allocate(1,1) aHexahedronGrid.InsertNextCell(aHexahedron.GetCellType(),aHexahedron.GetPointIds()) aHexahedronGrid.SetPoints(hexahedronPoints) aHexahedronMapper = vtk.vtkDataSetMapper() aHexahedronMapper.SetInputData(aHexahedronGrid) aHexahedronActor = vtk.vtkActor() aHexahedronActor.SetMapper(aHexahedronMapper) aHexahedronActor.AddPosition(2,0,0) aHexahedronActor.GetProperty().BackfaceCullingOn() # Tetra tetraPoints = vtk.vtkPoints() tetraPoints.SetNumberOfPoints(4) tetraPoints.InsertPoint(0,0,0,0) tetraPoints.InsertPoint(1,1,0,0) tetraPoints.InsertPoint(2,.5,1,0) tetraPoints.InsertPoint(3,.5,.5,1) aTetra = vtk.vtkTetra() aTetra.GetPointIds().SetId(0,0) aTetra.GetPointIds().SetId(1,1) aTetra.GetPointIds().SetId(2,2) aTetra.GetPointIds().SetId(3,3) aTetraGrid = vtk.vtkUnstructuredGrid() aTetraGrid.Allocate(1,1) aTetraGrid.InsertNextCell(aTetra.GetCellType(),aTetra.GetPointIds()) aTetraGrid.SetPoints(tetraPoints) aTetraMapper = vtk.vtkDataSetMapper() aTetraMapper.SetInputData(aTetraGrid) aTetraActor = vtk.vtkActor() aTetraActor.SetMapper(aTetraMapper) aTetraActor.AddPosition(4,0,0) aTetraActor.GetProperty().BackfaceCullingOn() # Wedge wedgePoints = vtk.vtkPoints() wedgePoints.SetNumberOfPoints(6) wedgePoints.InsertPoint(0,0,1,0) wedgePoints.InsertPoint(1,0,0,0) wedgePoints.InsertPoint(2,0,.5,.5) wedgePoints.InsertPoint(3,1,1,0) wedgePoints.InsertPoint(4,1,0,0) wedgePoints.InsertPoint(5,1,.5,.5) aWedge = vtk.vtkWedge() aWedge.GetPointIds().SetId(0,0) aWedge.GetPointIds().SetId(1,1) aWedge.GetPointIds().SetId(2,2) aWedge.GetPointIds().SetId(3,3) aWedge.GetPointIds().SetId(4,4) aWedge.GetPointIds().SetId(5,5) aWedgeGrid = vtk.vtkUnstructuredGrid() aWedgeGrid.Allocate(1,1) aWedgeGrid.InsertNextCell(aWedge.GetCellType(),aWedge.GetPointIds()) aWedgeGrid.SetPoints(wedgePoints) aWedgeMapper = vtk.vtkDataSetMapper() aWedgeMapper.SetInputData(aWedgeGrid) aWedgeActor = vtk.vtkActor() aWedgeActor.SetMapper(aWedgeMapper) aWedgeActor.AddPosition(6,0,0) aWedgeActor.GetProperty().BackfaceCullingOn() # Pyramid pyramidPoints = vtk.vtkPoints() pyramidPoints.SetNumberOfPoints(5) pyramidPoints.InsertPoint(0,0,0,0) pyramidPoints.InsertPoint(1,1,0,0) pyramidPoints.InsertPoint(2,1,1,0) pyramidPoints.InsertPoint(3,0,1,0) pyramidPoints.InsertPoint(4,.5,.5,1) aPyramid = vtk.vtkPyramid() aPyramid.GetPointIds().SetId(0,0) aPyramid.GetPointIds().SetId(1,1) aPyramid.GetPointIds().SetId(2,2) aPyramid.GetPointIds().SetId(3,3) aPyramid.GetPointIds().SetId(4,4) aPyramidGrid = vtk.vtkUnstructuredGrid() aPyramidGrid.Allocate(1,1) aPyramidGrid.InsertNextCell(aPyramid.GetCellType(),aPyramid.GetPointIds()) aPyramidGrid.SetPoints(pyramidPoints) aPyramidMapper = vtk.vtkDataSetMapper() aPyramidMapper.SetInputData(aPyramidGrid) aPyramidActor = vtk.vtkActor() aPyramidActor.SetMapper(aPyramidMapper) aPyramidActor.AddPosition(8,0,0) aPyramidActor.GetProperty().BackfaceCullingOn() # Pixel pixelPoints = vtk.vtkPoints() pixelPoints.SetNumberOfPoints(4) pixelPoints.InsertPoint(0,0,0,0) pixelPoints.InsertPoint(1,1,0,0) pixelPoints.InsertPoint(2,0,1,0) pixelPoints.InsertPoint(3,1,1,0) aPixel = vtk.vtkPixel() aPixel.GetPointIds().SetId(0,0) aPixel.GetPointIds().SetId(1,1) aPixel.GetPointIds().SetId(2,2) aPixel.GetPointIds().SetId(3,3) aPixelGrid = vtk.vtkUnstructuredGrid() aPixelGrid.Allocate(1,1) aPixelGrid.InsertNextCell(aPixel.GetCellType(),aPixel.GetPointIds()) aPixelGrid.SetPoints(pixelPoints) aPixelMapper = vtk.vtkDataSetMapper() aPixelMapper.SetInputData(aPixelGrid) aPixelActor = vtk.vtkActor() aPixelActor.SetMapper(aPixelMapper) aPixelActor.AddPosition(0,0,2) aPixelActor.GetProperty().BackfaceCullingOn() # Quad quadPoints = vtk.vtkPoints() quadPoints.SetNumberOfPoints(4) quadPoints.InsertPoint(0,0,0,0) quadPoints.InsertPoint(1,1,0,0) quadPoints.InsertPoint(2,1,1,0) quadPoints.InsertPoint(3,0,1,0) aQuad = vtk.vtkQuad() aQuad.GetPointIds().SetId(0,0) aQuad.GetPointIds().SetId(1,1) aQuad.GetPointIds().SetId(2,2) aQuad.GetPointIds().SetId(3,3) aQuadGrid = vtk.vtkUnstructuredGrid() aQuadGrid.Allocate(1,1) aQuadGrid.InsertNextCell(aQuad.GetCellType(),aQuad.GetPointIds()) aQuadGrid.SetPoints(quadPoints) aQuadMapper = vtk.vtkDataSetMapper() aQuadMapper.SetInputData(aQuadGrid) aQuadActor = vtk.vtkActor() aQuadActor.SetMapper(aQuadMapper) aQuadActor.AddPosition(2,0,2) aQuadActor.GetProperty().BackfaceCullingOn() # Triangle trianglePoints = vtk.vtkPoints() trianglePoints.SetNumberOfPoints(3) trianglePoints.InsertPoint(0,0,0,0) trianglePoints.InsertPoint(1,1,0,0) trianglePoints.InsertPoint(2,.5,.5,0) aTriangle = vtk.vtkTriangle() aTriangle.GetPointIds().SetId(0,0) aTriangle.GetPointIds().SetId(1,1) aTriangle.GetPointIds().SetId(2,2) aTriangleGrid = vtk.vtkUnstructuredGrid() aTriangleGrid.Allocate(1,1) aTriangleGrid.InsertNextCell(aTriangle.GetCellType(),aTriangle.GetPointIds()) aTriangleGrid.SetPoints(trianglePoints) aTriangleMapper = vtk.vtkDataSetMapper() aTriangleMapper.SetInputData(aTriangleGrid) aTriangleActor = vtk.vtkActor() aTriangleActor.SetMapper(aTriangleMapper) aTriangleActor.AddPosition(4,0,2) aTriangleActor.GetProperty().BackfaceCullingOn() # Polygon polygonPoints = vtk.vtkPoints() polygonPoints.SetNumberOfPoints(4) polygonPoints.InsertPoint(0,0,0,0) polygonPoints.InsertPoint(1,1,0,0) polygonPoints.InsertPoint(2,1,1,0) polygonPoints.InsertPoint(3,0,1,0) aPolygon = vtk.vtkPolygon() aPolygon.GetPointIds().SetNumberOfIds(4) aPolygon.GetPointIds().SetId(0,0) aPolygon.GetPointIds().SetId(1,1) aPolygon.GetPointIds().SetId(2,2) aPolygon.GetPointIds().SetId(3,3) aPolygonGrid = vtk.vtkUnstructuredGrid() aPolygonGrid.Allocate(1,1) aPolygonGrid.InsertNextCell(aPolygon.GetCellType(),aPolygon.GetPointIds()) aPolygonGrid.SetPoints(polygonPoints) aPolygonMapper = vtk.vtkDataSetMapper() aPolygonMapper.SetInputData(aPolygonGrid) aPolygonActor = vtk.vtkActor() aPolygonActor.SetMapper(aPolygonMapper) aPolygonActor.AddPosition(6,0,2) aPolygonActor.GetProperty().BackfaceCullingOn() # Triangle Strip triangleStripPoints = vtk.vtkPoints() triangleStripPoints.SetNumberOfPoints(5) triangleStripPoints.InsertPoint(0,0,1,0) triangleStripPoints.InsertPoint(1,0,0,0) triangleStripPoints.InsertPoint(2,1,1,0) triangleStripPoints.InsertPoint(3,1,0,0) triangleStripPoints.InsertPoint(4,2,1,0) aTriangleStrip = vtk.vtkTriangleStrip() aTriangleStrip.GetPointIds().SetNumberOfIds(5) aTriangleStrip.GetPointIds().SetId(0,0) aTriangleStrip.GetPointIds().SetId(1,1) aTriangleStrip.GetPointIds().SetId(2,2) aTriangleStrip.GetPointIds().SetId(3,3) aTriangleStrip.GetPointIds().SetId(4,4) aTriangleStripGrid = vtk.vtkUnstructuredGrid() aTriangleStripGrid.Allocate(1,1) aTriangleStripGrid.InsertNextCell(aTriangleStrip.GetCellType(),aTriangleStrip.GetPointIds()) aTriangleStripGrid.SetPoints(triangleStripPoints) aTriangleStripMapper = vtk.vtkDataSetMapper() aTriangleStripMapper.SetInputData(aTriangleStripGrid) aTriangleStripActor = vtk.vtkActor() aTriangleStripActor.SetMapper(aTriangleStripMapper) aTriangleStripActor.AddPosition(8,0,2) aTriangleStripActor.GetProperty().BackfaceCullingOn() # Line linePoints = vtk.vtkPoints() linePoints.SetNumberOfPoints(2) linePoints.InsertPoint(0,0,0,0) linePoints.InsertPoint(1,1,1,0) aLine = vtk.vtkLine() aLine.GetPointIds().SetId(0,0) aLine.GetPointIds().SetId(1,1) aLineGrid = vtk.vtkUnstructuredGrid() aLineGrid.Allocate(1,1) aLineGrid.InsertNextCell(aLine.GetCellType(),aLine.GetPointIds()) aLineGrid.SetPoints(linePoints) aLineMapper = vtk.vtkDataSetMapper() aLineMapper.SetInputData(aLineGrid) aLineActor = vtk.vtkActor() aLineActor.SetMapper(aLineMapper) aLineActor.AddPosition(0,0,4) aLineActor.GetProperty().BackfaceCullingOn() # Poly line polyLinePoints = vtk.vtkPoints() polyLinePoints.SetNumberOfPoints(3) polyLinePoints.InsertPoint(0,0,0,0) polyLinePoints.InsertPoint(1,1,1,0) polyLinePoints.InsertPoint(2,1,0,0) aPolyLine = vtk.vtkPolyLine() aPolyLine.GetPointIds().SetNumberOfIds(3) aPolyLine.GetPointIds().SetId(0,0) aPolyLine.GetPointIds().SetId(1,1) aPolyLine.GetPointIds().SetId(2,2) aPolyLineGrid = vtk.vtkUnstructuredGrid() aPolyLineGrid.Allocate(1,1) aPolyLineGrid.InsertNextCell(aPolyLine.GetCellType(),aPolyLine.GetPointIds()) aPolyLineGrid.SetPoints(polyLinePoints) aPolyLineMapper = vtk.vtkDataSetMapper() aPolyLineMapper.SetInputData(aPolyLineGrid) aPolyLineActor = vtk.vtkActor() aPolyLineActor.SetMapper(aPolyLineMapper) aPolyLineActor.AddPosition(2,0,4) aPolyLineActor.GetProperty().BackfaceCullingOn() # Vertex vertexPoints = vtk.vtkPoints() vertexPoints.SetNumberOfPoints(1) vertexPoints.InsertPoint(0,0,0,0) aVertex = vtk.vtkVertex() aVertex.GetPointIds().SetId(0,0) aVertexGrid = vtk.vtkUnstructuredGrid() aVertexGrid.Allocate(1,1) aVertexGrid.InsertNextCell(aVertex.GetCellType(),aVertex.GetPointIds()) aVertexGrid.SetPoints(vertexPoints) aVertexMapper = vtk.vtkDataSetMapper() aVertexMapper.SetInputData(aVertexGrid) aVertexActor = vtk.vtkActor() aVertexActor.SetMapper(aVertexMapper) aVertexActor.AddPosition(0,0,6) aVertexActor.GetProperty().BackfaceCullingOn() # Poly Vertex polyVertexPoints = vtk.vtkPoints() polyVertexPoints.SetNumberOfPoints(3) polyVertexPoints.InsertPoint(0,0,0,0) polyVertexPoints.InsertPoint(1,1,0,0) polyVertexPoints.InsertPoint(2,1,1,0) aPolyVertex = vtk.vtkPolyVertex() aPolyVertex.GetPointIds().SetNumberOfIds(3) aPolyVertex.GetPointIds().SetId(0,0) aPolyVertex.GetPointIds().SetId(1,1) aPolyVertex.GetPointIds().SetId(2,2) aPolyVertexGrid = vtk.vtkUnstructuredGrid() aPolyVertexGrid.Allocate(1,1) aPolyVertexGrid.InsertNextCell(aPolyVertex.GetCellType(),aPolyVertex.GetPointIds()) aPolyVertexGrid.SetPoints(polyVertexPoints) aPolyVertexMapper = vtk.vtkDataSetMapper() aPolyVertexMapper.SetInputData(aPolyVertexGrid) aPolyVertexActor = vtk.vtkActor() aPolyVertexActor.SetMapper(aPolyVertexMapper) aPolyVertexActor.AddPosition(2,0,6) aPolyVertexActor.GetProperty().BackfaceCullingOn() # Pentagonal prism pentaPoints = vtk.vtkPoints() pentaPoints.SetNumberOfPoints(10) pentaPoints.InsertPoint(0,0.25,0.0,0.0) pentaPoints.InsertPoint(1,0.75,0.0,0.0) pentaPoints.InsertPoint(2,1.0,0.5,0.0) pentaPoints.InsertPoint(3,0.5,1.0,0.0) pentaPoints.InsertPoint(4,0.0,0.5,0.0) pentaPoints.InsertPoint(5,0.25,0.0,1.0) pentaPoints.InsertPoint(6,0.75,0.0,1.0) pentaPoints.InsertPoint(7,1.0,0.5,1.0) pentaPoints.InsertPoint(8,0.5,1.0,1.0) pentaPoints.InsertPoint(9,0.0,0.5,1.0) aPenta = vtk.vtkPentagonalPrism() aPenta.GetPointIds().SetId(0,0) aPenta.GetPointIds().SetId(1,1) aPenta.GetPointIds().SetId(2,2) aPenta.GetPointIds().SetId(3,3) aPenta.GetPointIds().SetId(4,4) aPenta.GetPointIds().SetId(5,5) aPenta.GetPointIds().SetId(6,6) aPenta.GetPointIds().SetId(7,7) aPenta.GetPointIds().SetId(8,8) aPenta.GetPointIds().SetId(9,9) aPentaGrid = vtk.vtkUnstructuredGrid() aPentaGrid.Allocate(1,1) aPentaGrid.InsertNextCell(aPenta.GetCellType(),aPenta.GetPointIds()) aPentaGrid.SetPoints(pentaPoints) aPentaMapper = vtk.vtkDataSetMapper() aPentaMapper.SetInputData(aPentaGrid) aPentaActor = vtk.vtkActor() aPentaActor.SetMapper(aPentaMapper) aPentaActor.AddPosition(10,0,0) aPentaActor.GetProperty().BackfaceCullingOn() # Hexagonal prism hexaPoints = vtk.vtkPoints() hexaPoints.SetNumberOfPoints(12) hexaPoints.InsertPoint(0,0.0,0.0,0.0) hexaPoints.InsertPoint(1,0.5,0.0,0.0) hexaPoints.InsertPoint(2,1.0,0.5,0.0) hexaPoints.InsertPoint(3,1.0,1.0,0.0) hexaPoints.InsertPoint(4,0.5,1.0,0.0) hexaPoints.InsertPoint(5,0.0,0.5,0.0) hexaPoints.InsertPoint(6,0.0,0.0,1.0) hexaPoints.InsertPoint(7,0.5,0.0,1.0) hexaPoints.InsertPoint(8,1.0,0.5,1.0) hexaPoints.InsertPoint(9,1.0,1.0,1.0) hexaPoints.InsertPoint(10,0.5,1.0,1.0) hexaPoints.InsertPoint(11,0.0,0.5,1.0) aHexa = vtk.vtkHexagonalPrism() aHexa.GetPointIds().SetId(0,0) aHexa.GetPointIds().SetId(1,1) aHexa.GetPointIds().SetId(2,2) aHexa.GetPointIds().SetId(3,3) aHexa.GetPointIds().SetId(4,4) aHexa.GetPointIds().SetId(5,5) aHexa.GetPointIds().SetId(6,6) aHexa.GetPointIds().SetId(7,7) aHexa.GetPointIds().SetId(8,8) aHexa.GetPointIds().SetId(9,9) aHexa.GetPointIds().SetId(10,10) aHexa.GetPointIds().SetId(11,11) aHexaGrid = vtk.vtkUnstructuredGrid() aHexaGrid.Allocate(1,1) aHexaGrid.InsertNextCell(aHexa.GetCellType(),aHexa.GetPointIds()) aHexaGrid.SetPoints(hexaPoints) aHexaMapper = vtk.vtkDataSetMapper() aHexaMapper.SetInputData(aHexaGrid) aHexaActor = vtk.vtkActor() aHexaActor.SetMapper(aHexaMapper) aHexaActor.AddPosition(12,0,0) aHexaActor.GetProperty().BackfaceCullingOn() ren1.SetBackground(.1,.2,.4) ren1.AddActor(aVoxelActor) aVoxelActor.GetProperty().SetDiffuseColor(1,0,0) ren1.AddActor(aHexahedronActor) aHexahedronActor.GetProperty().SetDiffuseColor(1,1,0) ren1.AddActor(aTetraActor) aTetraActor.GetProperty().SetDiffuseColor(0,1,0) ren1.AddActor(aWedgeActor) aWedgeActor.GetProperty().SetDiffuseColor(0,1,1) ren1.AddActor(aPyramidActor) aPyramidActor.GetProperty().SetDiffuseColor(1,0,1) ren1.AddActor(aPixelActor) aPixelActor.GetProperty().SetDiffuseColor(0,1,1) ren1.AddActor(aQuadActor) aQuadActor.GetProperty().SetDiffuseColor(1,0,1) ren1.AddActor(aTriangleActor) aTriangleActor.GetProperty().SetDiffuseColor(.3,1,.5) ren1.AddActor(aPolygonActor) aPolygonActor.GetProperty().SetDiffuseColor(1,.4,.5) ren1.AddActor(aTriangleStripActor) aTriangleStripActor.GetProperty().SetDiffuseColor(.3,.7,1) ren1.AddActor(aLineActor) aLineActor.GetProperty().SetDiffuseColor(.2,1,1) ren1.AddActor(aPolyLineActor) aPolyLineActor.GetProperty().SetDiffuseColor(1,1,1) ren1.AddActor(aVertexActor) aVertexActor.GetProperty().SetDiffuseColor(1,1,1) ren1.AddActor(aPolyVertexActor) aPolyVertexActor.GetProperty().SetDiffuseColor(1,1,1) ren1.AddActor(aPentaActor) aPentaActor.GetProperty().SetDiffuseColor(.2,.4,.7) ren1.AddActor(aHexaActor) aHexaActor.GetProperty().SetDiffuseColor(.7,.5,1) ren1.ResetCamera() ren1.GetActiveCamera().Azimuth(30) ren1.GetActiveCamera().Elevation(20) ren1.GetActiveCamera().Dolly(1.25) ren1.ResetCameraClippingRange() renWin.Render() cellPicker = vtk.vtkCellPicker() pointPicker = vtk.vtkPointPicker() worldPicker = vtk.vtkWorldPointPicker() cellCount = 0 pointCount = 0 ren1.IsInViewport(0,0) x = 0 while x <= 265: y = 100 while y <= 200: cellPicker.Pick(x,y,0,ren1) pointPicker.Pick(x,y,0,ren1) worldPicker.Pick(x,y,0,ren1) if (cellPicker.GetCellId() != "-1"): cellCount = cellCount + 1 pass if (pointPicker.GetPointId() != "-1"): pointCount = pointCount + 1 pass y = y + 6 x = x + 6 # render the image # iren.Initialize() # --- end of script --
HopeFOAM/HopeFOAM
ThirdParty-0.1/ParaView-5.0.1/VTK/Rendering/Core/Testing/Python/pickCells.py
Python
gpl-3.0
16,540
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright (C) 2015 Therp BV (<http://therp.nl>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import copy from openerp import models from openerp.addons.account.report.account_financial_report import\ report_account_common class report_account_common_horizontal(report_account_common): def __init__(self, cr, uid, name, context=None): super(report_account_common_horizontal, self).__init__( cr, uid, name, context=context) self.localcontext.update({ 'get_left_lines': self.get_left_lines, 'get_right_lines': self.get_right_lines, }) def get_lines(self, data, side=None): data = copy.deepcopy(data) if data['form']['used_context'] is None: data['form']['used_context'] = {} data['form']['used_context'].update( account_financial_report_horizontal_side=side) return super(report_account_common_horizontal, self).get_lines( data) def get_left_lines(self, data): return self.get_lines(data, side='left') def get_right_lines(self, data): return self.get_lines(data, side='right') class ReportFinancial(models.AbstractModel): _inherit = 'report.account.report_financial' _wrapped_report_class = report_account_common_horizontal
Ehtaga/account-financial-reporting
account_financial_report_horizontal/report/report_financial.py
Python
agpl-3.0
2,192
#!/usr/bin/env python # Copyright (C) 2017 Francisco Acosta <[email protected]> # # This file is subject to the terms and conditions of the GNU Lesser # General Public License v2.1. See the file LICENSE in the top level # directory for more details. import os import sys sys.path.append(os.path.join(os.environ['RIOTBASE'], 'dist/tools/testrunner')) import testrunner from datetime import datetime class InvalidTimeout(Exception): pass def testfunc(child): exp_diff1 = 1000000 exp_diff5 = 5000000 exp_diff10 = 10000000 child.expect(u"This test will print \"Slept for X sec...\" every 1, 5 and 10 seconds.\r\n") child.expect(u"\r\n") child.expect(u"<======== If using pyterm, this is the time when started.") child.expect(u"\r\n") m = 9 while (m): n = 3 while (n): if n == 3: exp_diff = exp_diff1 if n == 2: exp_diff = exp_diff5 elif n == 1: exp_diff = exp_diff10 start = datetime.now() child.expect(u"Slept for \\d+ sec...", timeout=11) stop = datetime.now() diff = (stop - start) diff = (diff.seconds * 1000000) + diff.microseconds # fail within 5% of expected if diff > (exp_diff + (exp_diff1 * 0.05)) or \ diff < (exp_diff - (exp_diff1 * 0.05)): raise InvalidTimeout("Invalid timeout %d (expected %d)" % (diff, exp_diff)); else: print("Timed out correctly: %d (expected %d)" % (diff, exp_diff)) n = n - 1 m = m -1 child.expect(u"Test end.", timeout=15) if __name__ == "__main__": sys.exit(testrunner.run(testfunc))
dailab/RIOT
tests/xtimer_usleep/tests/01-run.py
Python
lgpl-2.1
1,745
import unittest import pysal from pysal.core.IOHandlers.arcgis_swm import ArcGISSwmIO import tempfile import os class test_ArcGISSwmIO(unittest.TestCase): def setUp(self): self.test_file = test_file = pysal.examples.get_path('ohio.swm') self.obj = ArcGISSwmIO(test_file, 'r') def test_close(self): f = self.obj f.close() self.failUnlessRaises(ValueError, f.read) def test_read(self): w = self.obj.read() self.assertEqual(88, w.n) self.assertEqual(5.25, w.mean_neighbors) self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values()) def test_seek(self): self.test_read() self.failUnlessRaises(StopIteration, self.obj.read) self.obj.seek(0) self.test_read() def test_write(self): w = self.obj.read() f = tempfile.NamedTemporaryFile( suffix='.swm', dir=pysal.examples.get_path('')) fname = f.name f.close() o = pysal.open(fname, 'w') o.write(w) o.close() wnew = pysal.open(fname, 'r').read() self.assertEqual(wnew.pct_nonzero, w.pct_nonzero) os.remove(fname) if __name__ == '__main__': unittest.main()
badjr/pysal
pysal/core/IOHandlers/tests/test_arcgis_swm.py
Python
bsd-3-clause
1,219
import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) import codecs from pattern.vector import Document, PORTER, LEMMA # A Document is a "bag-of-words" that splits a string into words and counts them. # A list of words or dictionary of (word, count)-items can also be given. # Words (or more generally "features") and their word count ("feature weights") # can be used to compare documents. The word count in a document is normalized # between 0.0-1.0 so that shorted documents can be compared to longer documents. # Words can be stemmed or lemmatized before counting them. # The purpose of stemming is to bring variant forms a word together. # For example, "conspiracy" and "conspired" are both stemmed to "conspir". # Nowadays, lemmatization is usually preferred over stemming, # e.g., "conspiracies" => "conspiracy", "conspired" => "conspire". s = """ The shuttle Discovery, already delayed three times by technical problems and bad weather, was grounded again Friday, this time by a potentially dangerous gaseous hydrogen leak in a vent line attached to the ship's external tank. The Discovery was initially scheduled to make its 39th and final flight last Monday, bearing fresh supplies and an intelligent robot for the International Space Station. But complications delayed the flight from Monday to Friday, when the hydrogen leak led NASA to conclude that the shuttle would not be ready to launch before its flight window closed this Monday. """ # With threshold=1, only words that occur more than once are counted. # With stopwords=False, words like "the", "and", "I", "is" are ignored. document = Document(s, threshold=1, stopwords=False) print document.words print # The /corpus folder contains texts mined from Wikipedia. # Below is the mining script (we already executed it for you): #import os, codecs #from pattern.web import Wikipedia # #w = Wikipedia() #for q in ( # "badger", "bear", "dog", "dolphin", "lion", "parakeet", # "rabbit", "shark", "sparrow", "tiger", "wolf"): # s = w.search(q, cached=True) # s = s.plaintext() # print os.path.join("corpus2", q+".txt") # f = codecs.open(os.path.join("corpus2", q+".txt"), "w", encoding="utf-8") # f.write(s) # f.close() # Loading a document from a text file: f = os.path.join(os.path.dirname(__file__), "corpus", "wolf.txt") s = codecs.open(f, encoding="utf-8").read() document = Document(s, name="wolf", stemmer=PORTER) print document print document.keywords(top=10) # (weight, feature)-items. print # Same document, using lemmatization instead of stemming (slower): document = Document(s, name="wolf", stemmer=LEMMA) print document print document.keywords(top=10) print # In summary, a document is a bag-of-words representation of a text. # Bag-of-words means that the word order is discarded. # The dictionary of words (features) and their normalized word count (weights) # is also called the document vector: document = Document("a black cat and a white cat", stopwords=True) print document.words print document.vector.features for feature, weight in document.vector.items(): print feature, weight # Document vectors can be bundled into a Model (next example).
krishna11888/ai
third_party/pattern/examples/05-vector/01-document.py
Python
gpl-2.0
3,205
""" CMSIS-DAP Interface Firmware Copyright (c) 2009-2013 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Extract and patch the interface without bootloader """ from options import get_options from paths import get_interface_path, TMP_DIR from utils import gen_binary, is_lpc, split_path from os.path import join if __name__ == '__main__': options = get_options() in_path = get_interface_path(options.interface, options.target, bootloader=False) _, name, _ = split_path(in_path) out_path = join(TMP_DIR, name + '.bin') print '\nELF: %s' % in_path gen_binary(in_path, out_path, is_lpc(options.interface)) print "\nBINARY: %s" % out_path
flyhung/CMSIS-DAP
tools/get_binary.py
Python
apache-2.0
1,166
""" Read/Write AMQP frames over network transports. 2009-01-14 Barry Pederson <[email protected]> """ # Copyright (C) 2009 Barry Pederson <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 import re import socket # # See if Python 2.6+ SSL support is available # try: import ssl HAVE_PY26_SSL = True except: HAVE_PY26_SSL = False try: bytes except: # Python 2.5 and lower bytes = str from struct import pack, unpack AMQP_PORT = 5672 # Yes, Advanced Message Queuing Protocol Protocol is redundant AMQP_PROTOCOL_HEADER = 'AMQP\x01\x01\x09\x01'.encode('latin_1') # Match things like: [fe80::1]:5432, from RFC 2732 IPV6_LITERAL = re.compile(r'\[([\.0-9a-f:]+)\](?::(\d+))?') class _AbstractTransport(object): """ Common superclass for TCP and SSL transports """ def __init__(self, host, connect_timeout): msg = 'socket.getaddrinfo() for %s returned an empty list' % host port = AMQP_PORT m = IPV6_LITERAL.match(host) if m: host = m.group(1) if m.group(2): port = int(m.group(2)) else: if ':' in host: host, port = host.rsplit(':', 1) port = int(port) self.sock = None for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM, socket.SOL_TCP): af, socktype, proto, canonname, sa = res try: self.sock = socket.socket(af, socktype, proto) self.sock.settimeout(connect_timeout) self.sock.connect(sa) except socket.error, msg: self.sock.close() self.sock = None continue break if not self.sock: # Didn't connect, return the most recent error message raise socket.error, msg self.sock.settimeout(None) self.sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) self._setup_transport() self._write(AMQP_PROTOCOL_HEADER) def __del__(self): self.close() def _read(self, n): """ Read exactly n bytes from the peer """ raise NotImplementedError('Must be overriden in subclass') def _setup_transport(self): """ Do any additional initialization of the class (used by the subclasses). """ pass def _shutdown_transport(self): """ Do any preliminary work in shutting down the connection. """ pass def _write(self, s): """ Completely write a string to the peer. """ raise NotImplementedError('Must be overriden in subclass') def close(self): if self.sock is not None: self._shutdown_transport() # Call shutdown first to make sure that pending messages # reach the AMQP broker if the program exits after # calling this method. self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() self.sock = None def read_frame(self): """ Read an AMQP frame. """ frame_type, channel, size = unpack('>BHI', self._read(7)) payload = self._read(size) ch = ord(self._read(1)) if ch == 206: # '\xce' return frame_type, channel, payload else: raise Exception('Framing Error, received 0x%02x while expecting 0xce' % ch) def write_frame(self, frame_type, channel, payload): """ Write out an AMQP frame. """ size = len(payload) self._write(pack('>BHI%dsB' % size, frame_type, channel, size, payload, 0xce)) class SSLTransport(_AbstractTransport): """ Transport that works over SSL """ def __init__(self, host, connect_timeout, ssl): if isinstance(ssl, dict): self.sslopts = ssl self.sslobj = None super(SSLTransport, self).__init__(host, connect_timeout) def _setup_transport(self): """ Wrap the socket in an SSL object, either the new Python 2.6 version, or the older Python 2.5 and lower version. """ if HAVE_PY26_SSL: if hasattr(self, 'sslopts'): self.sslobj = ssl.wrap_socket(self.sock, **self.sslopts) else: self.sslobj = ssl.wrap_socket(self.sock) self.sslobj.do_handshake() else: self.sslobj = socket.ssl(self.sock) def _shutdown_transport(self): """ Unwrap a Python 2.6 SSL socket, so we can call shutdown() """ if HAVE_PY26_SSL and (self.sslobj is not None): self.sock = self.sslobj.unwrap() self.sslobj = None def _read(self, n): """ It seems that SSL Objects read() method may not supply as much as you're asking for, at least with extremely large messages. somewhere > 16K - found this in the test_channel.py test_large unittest. """ result = self.sslobj.read(n) while len(result) < n: s = self.sslobj.read(n - len(result)) if not s: raise IOError('Socket closed') result += s return result def _write(self, s): """ Write a string out to the SSL socket fully. """ while s: n = self.sslobj.write(s) if not n: raise IOError('Socket closed') s = s[n:] class TCPTransport(_AbstractTransport): """ Transport that deals directly with TCP socket. """ def _setup_transport(self): """ Setup to _write() directly to the socket, and do our own buffered reads. """ self._write = self.sock.sendall self._read_buffer = bytes() def _read(self, n): """ Read exactly n bytes from the socket """ while len(self._read_buffer) < n: s = self.sock.recv(65536) if not s: raise IOError('Socket closed') self._read_buffer += s result = self._read_buffer[:n] self._read_buffer = self._read_buffer[n:] return result def create_transport(host, connect_timeout, ssl=False): """ Given a few parameters from the Connection constructor, select and create a subclass of _AbstractTransport. """ if ssl: return SSLTransport(host, connect_timeout, ssl) else: return TCPTransport(host, connect_timeout)
mzdaniel/oh-mainline
vendor/packages/amqplib/amqplib/client_0_8/transport.py
Python
agpl-3.0
7,349
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # # Copyright (c) 2013-2015 Noviat nv/sa (www.noviat.com). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import account from . import res_partner from . import ir_actions
damdam-s/account-financial-tools
account_move_line_search_extension/__init__.py
Python
agpl-3.0
1,056
# -*- coding: utf-8 -*- from odoo.tests.common import HttpCase from odoo.exceptions import ValidationError class AccountingTestCase(HttpCase): """ This class extends the base TransactionCase, in order to test the accounting with localization setups. It is configured to run the tests after the installation of all modules, and will SKIP TESTS ifit cannot find an already configured accounting (which means no localization module has been installed). """ post_install = True at_install = False def setUp(self): super(AccountingTestCase, self).setUp() domain = [('company_id', '=', self.env.ref('base.main_company').id)] if not self.env['account.account'].search_count(domain): self.skipTest("No Chart of account found") def check_complete_move(self, move, theorical_lines): for aml in move.line_ids: line = (aml.name, round(aml.debit, 2), round(aml.credit, 2)) if line in theorical_lines: theorical_lines.remove(line) else: raise ValidationError('Unexpected journal item. (label: %s, debit: %s, credit: %s)' % (aml.name, round(aml.debit, 2), round(aml.credit, 2))) if theorical_lines: raise ValidationError('Remaining theorical line (not found). %s)' % ([(aml[0], aml[1], aml[2]) for aml in theorical_lines])) return True def ensure_account_property(self, property_name): '''Ensure the ir.property targetting an account.account passed as parameter exists. In case it's not: create it with a random account. This is useful when testing with partially defined localization (missing stock properties for example) :param property_name: The name of the property. ''' company_id = self.env.user.company_id field_id = self.env['ir.model.fields'].search( [('model', '=', 'product.template'), ('name', '=', property_name)], limit=1) property_id = self.env['ir.property'].search([ ('company_id', '=', company_id.id), ('name', '=', property_name), ('res_id', '=', None), ('fields_id', '=', field_id.id)], limit=1) account_id = self.env['account.account'].search([('company_id', '=', company_id.id)], limit=1) value_reference = 'account.account,%d' % account_id.id if property_id and not property_id.value_reference: property_id.value_reference = value_reference else: self.env['ir.property'].create({ 'name': property_name, 'company_id': company_id.id, 'fields_id': field_id.id, 'value_reference': value_reference, })
Aravinthu/odoo
addons/account/tests/account_test_classes.py
Python
agpl-3.0
2,749
from sentry.testutils.cases import RuleTestCase from sentry.rules.conditions.first_seen_event import FirstSeenEventCondition class FirstSeenEventConditionTest(RuleTestCase): rule_cls = FirstSeenEventCondition def test_applies_correctly(self): rule = self.get_rule() self.assertPasses(rule, self.event, is_new=True) self.assertDoesNotPass(rule, self.event, is_new=False)
felixbuenemann/sentry
tests/sentry/rules/conditions/test_first_seen_event.py
Python
bsd-3-clause
407
""" Contains CheesePreprocessor """ #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from ...preprocessors.base import Preprocessor #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class CheesePreprocessor(Preprocessor): """ Adds a cheese tag to the resources object """ def __init__(self, **kw): """ Public constructor """ super(CheesePreprocessor, self).__init__(**kw) def preprocess(self, nb, resources): """ Sphinx preprocessing to apply on each notebook. Parameters ---------- nb : NotebookNode Notebook being converted resources : dictionary Additional resources used in the conversion process. Allows preprocessors to pass variables into the Jinja engine. """ resources['cheese'] = 'real' return nb, resources
unnikrishnankgs/va
venv/lib/python3.5/site-packages/nbconvert/exporters/tests/cheese.py
Python
bsd-2-clause
1,485
"""generated file, don't modify or your data will be lost""" try: __import__('pkg_resources').declare_namespace(__name__) except ImportError: pass
GbalsaC/bitnamiP
venv/lib/python2.7/site-packages/logilab/__init__.py
Python
agpl-3.0
155
# -*- coding: utf-8 -*- # # Copyright (C) 2013-2017 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # from __future__ import absolute_import import os import re import sys try: import ssl except ImportError: # pragma: no cover ssl = None if sys.version_info[0] < 3: # pragma: no cover from StringIO import StringIO string_types = basestring, text_type = unicode from types import FileType as file_type import __builtin__ as builtins import ConfigParser as configparser from ._backport import shutil from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, pathname2url, ContentTooShortError, splittype) def quote(s): if isinstance(s, unicode): s = s.encode('utf-8') return _quote(s) import urllib2 from urllib2 import (Request, urlopen, URLError, HTTPError, HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, HTTPRedirectHandler, build_opener) if ssl: from urllib2 import HTTPSHandler import httplib import xmlrpclib import Queue as queue from HTMLParser import HTMLParser import htmlentitydefs raw_input = raw_input from itertools import ifilter as filter from itertools import ifilterfalse as filterfalse _userprog = None def splituser(host): """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" global _userprog if _userprog is None: import re _userprog = re.compile('^(.*)@(.*)$') match = _userprog.match(host) if match: return match.group(1, 2) return None, host else: # pragma: no cover from io import StringIO string_types = str, text_type = str from io import TextIOWrapper as file_type import builtins import configparser import shutil from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, unquote, urlsplit, urlunsplit, splittype) from urllib.request import (urlopen, urlretrieve, Request, url2pathname, pathname2url, HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, HTTPRedirectHandler, build_opener) if ssl: from urllib.request import HTTPSHandler from urllib.error import HTTPError, URLError, ContentTooShortError import http.client as httplib import urllib.request as urllib2 import xmlrpc.client as xmlrpclib import queue from html.parser import HTMLParser import html.entities as htmlentitydefs raw_input = input from itertools import filterfalse filter = filter try: from ssl import match_hostname, CertificateError except ImportError: # pragma: no cover class CertificateError(ValueError): pass def _dnsname_match(dn, hostname, max_wildcards=1): """Matching according to RFC 6125, section 6.4.3 http://tools.ietf.org/html/rfc6125#section-6.4.3 """ pats = [] if not dn: return False parts = dn.split('.') leftmost, remainder = parts[0], parts[1:] wildcards = leftmost.count('*') if wildcards > max_wildcards: # Issue #17980: avoid denials of service by refusing more # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( "too many wildcards in certificate DNS name: " + repr(dn)) # speed up common case w/o wildcards if not wildcards: return dn.lower() == hostname.lower() # RFC 6125, section 6.4.3, subitem 1. # The client SHOULD NOT attempt to match a presented identifier in which # the wildcard character comprises a label other than the left-most label. if leftmost == '*': # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. pats.append('[^.]+') elif leftmost.startswith('xn--') or hostname.startswith('xn--'): # RFC 6125, section 6.4.3, subitem 3. # The client SHOULD NOT attempt to match a presented identifier # where the wildcard character is embedded within an A-label or # U-label of an internationalized domain name. pats.append(re.escape(leftmost)) else: # Otherwise, '*' matches any dotless string, e.g. www* pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) # add the remaining fragments, ignore any wildcards for frag in remainder: pats.append(re.escape(frag)) pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) return pat.match(hostname) def match_hostname(cert, hostname): """Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 rules are followed, but IP addresses are not accepted for *hostname*. CertificateError is raised on failure. On success, the function returns nothing. """ if not cert: raise ValueError("empty or no certificate, match_hostname needs a " "SSL socket or SSL context with either " "CERT_OPTIONAL or CERT_REQUIRED") dnsnames = [] san = cert.get('subjectAltName', ()) for key, value in san: if key == 'DNS': if _dnsname_match(value, hostname): return dnsnames.append(value) if not dnsnames: # The subject is only checked when there is no dNSName entry # in subjectAltName for sub in cert.get('subject', ()): for key, value in sub: # XXX according to RFC 2818, the most specific Common Name # must be used. if key == 'commonName': if _dnsname_match(value, hostname): return dnsnames.append(value) if len(dnsnames) > 1: raise CertificateError("hostname %r " "doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames)))) elif len(dnsnames) == 1: raise CertificateError("hostname %r " "doesn't match %r" % (hostname, dnsnames[0])) else: raise CertificateError("no appropriate commonName or " "subjectAltName fields were found") try: from types import SimpleNamespace as Container except ImportError: # pragma: no cover class Container(object): """ A generic container for when multiple values need to be returned """ def __init__(self, **kwargs): self.__dict__.update(kwargs) try: from shutil import which except ImportError: # pragma: no cover # Implementation from Python 3.3 def which(cmd, mode=os.F_OK | os.X_OK, path=None): """Given a command, mode, and a PATH string, return the path which conforms to the given mode on the PATH, or None if there is no such file. `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result of os.environ.get("PATH"), or can be overridden with a custom search path. """ # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. def _access_check(fn, mode): return (os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)) # If we're given a path with a directory part, look it up directly rather # than referring to PATH directories. This includes checking relative to the # current directory, e.g. ./script if os.path.dirname(cmd): if _access_check(cmd, mode): return cmd return None if path is None: path = os.environ.get("PATH", os.defpath) if not path: return None path = path.split(os.pathsep) if sys.platform == "win32": # The current directory takes precedence on Windows. if not os.curdir in path: path.insert(0, os.curdir) # PATHEXT is necessary to check on Windows. pathext = os.environ.get("PATHEXT", "").split(os.pathsep) # See if the given file matches any of the expected path extensions. # This will allow us to short circuit when given "python.exe". # If it does match, only test that one, otherwise we have to try # others. if any(cmd.lower().endswith(ext.lower()) for ext in pathext): files = [cmd] else: files = [cmd + ext for ext in pathext] else: # On other platforms you don't have things like PATHEXT to tell you # what file suffixes are executable, so just pass on cmd as-is. files = [cmd] seen = set() for dir in path: normdir = os.path.normcase(dir) if not normdir in seen: seen.add(normdir) for thefile in files: name = os.path.join(dir, thefile) if _access_check(name, mode): return name return None # ZipFile is a context manager in 2.7, but not in 2.6 from zipfile import ZipFile as BaseZipFile if hasattr(BaseZipFile, '__enter__'): # pragma: no cover ZipFile = BaseZipFile else: # pragma: no cover from zipfile import ZipExtFile as BaseZipExtFile class ZipExtFile(BaseZipExtFile): def __init__(self, base): self.__dict__.update(base.__dict__) def __enter__(self): return self def __exit__(self, *exc_info): self.close() # return None, so if an exception occurred, it will propagate class ZipFile(BaseZipFile): def __enter__(self): return self def __exit__(self, *exc_info): self.close() # return None, so if an exception occurred, it will propagate def open(self, *args, **kwargs): base = BaseZipFile.open(self, *args, **kwargs) return ZipExtFile(base) try: from platform import python_implementation except ImportError: # pragma: no cover def python_implementation(): """Return a string identifying the Python implementation.""" if 'PyPy' in sys.version: return 'PyPy' if os.name == 'java': return 'Jython' if sys.version.startswith('IronPython'): return 'IronPython' return 'CPython' try: import sysconfig except ImportError: # pragma: no cover from ._backport import sysconfig try: callable = callable except NameError: # pragma: no cover from collections.abc import Callable def callable(obj): return isinstance(obj, Callable) try: fsencode = os.fsencode fsdecode = os.fsdecode except AttributeError: # pragma: no cover # Issue #99: on some systems (e.g. containerised), # sys.getfilesystemencoding() returns None, and we need a real value, # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and # sys.getfilesystemencoding(): the return value is "the user’s preference # according to the result of nl_langinfo(CODESET), or None if the # nl_langinfo(CODESET) failed." _fsencoding = sys.getfilesystemencoding() or 'utf-8' if _fsencoding == 'mbcs': _fserrors = 'strict' else: _fserrors = 'surrogateescape' def fsencode(filename): if isinstance(filename, bytes): return filename elif isinstance(filename, text_type): return filename.encode(_fsencoding, _fserrors) else: raise TypeError("expect bytes or str, not %s" % type(filename).__name__) def fsdecode(filename): if isinstance(filename, text_type): return filename elif isinstance(filename, bytes): return filename.decode(_fsencoding, _fserrors) else: raise TypeError("expect bytes or str, not %s" % type(filename).__name__) try: from tokenize import detect_encoding except ImportError: # pragma: no cover from codecs import BOM_UTF8, lookup import re cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") def _get_normal_name(orig_enc): """Imitates get_normal_name in tokenizer.c.""" # Only care about the first 12 characters. enc = orig_enc[:12].lower().replace("_", "-") if enc == "utf-8" or enc.startswith("utf-8-"): return "utf-8" if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): return "iso-8859-1" return orig_enc def detect_encoding(readline): """ The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. """ try: filename = readline.__self__.name except AttributeError: filename = None bom_found = False encoding = None default = 'utf-8' def read_or_stop(): try: return readline() except StopIteration: return b'' def find_cookie(line): try: # Decode as UTF-8. Either the line is an encoding declaration, # in which case it should be pure ASCII, or it must be UTF-8 # per default encoding. line_string = line.decode('utf-8') except UnicodeDecodeError: msg = "invalid or missing encoding declaration" if filename is not None: msg = '{} for {!r}'.format(msg, filename) raise SyntaxError(msg) matches = cookie_re.findall(line_string) if not matches: return None encoding = _get_normal_name(matches[0]) try: codec = lookup(encoding) except LookupError: # This behaviour mimics the Python interpreter if filename is None: msg = "unknown encoding: " + encoding else: msg = "unknown encoding for {!r}: {}".format(filename, encoding) raise SyntaxError(msg) if bom_found: if codec.name != 'utf-8': # This behaviour mimics the Python interpreter if filename is None: msg = 'encoding problem: utf-8' else: msg = 'encoding problem for {!r}: utf-8'.format(filename) raise SyntaxError(msg) encoding += '-sig' return encoding first = read_or_stop() if first.startswith(BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if not first: return default, [] encoding = find_cookie(first) if encoding: return encoding, [first] second = read_or_stop() if not second: return default, [first] encoding = find_cookie(second) if encoding: return encoding, [first, second] return default, [first, second] # For converting & <-> &amp; etc. try: from html import escape except ImportError: from cgi import escape if sys.version_info[:2] < (3, 4): unescape = HTMLParser().unescape else: from html import unescape try: from collections import ChainMap except ImportError: # pragma: no cover from collections import MutableMapping try: from reprlib import recursive_repr as _recursive_repr except ImportError: def _recursive_repr(fillvalue='...'): ''' Decorator to make a repr function return fillvalue for a recursive call ''' def decorating_function(user_function): repr_running = set() def wrapper(self): key = id(self), get_ident() if key in repr_running: return fillvalue repr_running.add(key) try: result = user_function(self) finally: repr_running.discard(key) return result # Can't use functools.wraps() here because of bootstrap issues wrapper.__module__ = getattr(user_function, '__module__') wrapper.__doc__ = getattr(user_function, '__doc__') wrapper.__name__ = getattr(user_function, '__name__') wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) return wrapper return decorating_function class ChainMap(MutableMapping): ''' A ChainMap groups multiple dicts (or other mappings) together to create a single, updateable view. The underlying mappings are stored in a list. That list is public and can accessed or updated using the *maps* attribute. There is no other state. Lookups search the underlying mappings successively until a key is found. In contrast, writes, updates, and deletions only operate on the first mapping. ''' def __init__(self, *maps): '''Initialize a ChainMap by setting *maps* to the given mappings. If no mappings are provided, a single empty dictionary is used. ''' self.maps = list(maps) or [{}] # always at least one map def __missing__(self, key): raise KeyError(key) def __getitem__(self, key): for mapping in self.maps: try: return mapping[key] # can't use 'key in mapping' with defaultdict except KeyError: pass return self.__missing__(key) # support subclasses that define __missing__ def get(self, key, default=None): return self[key] if key in self else default def __len__(self): return len(set().union(*self.maps)) # reuses stored hash values if possible def __iter__(self): return iter(set().union(*self.maps)) def __contains__(self, key): return any(key in m for m in self.maps) def __bool__(self): return any(self.maps) @_recursive_repr() def __repr__(self): return '{0.__class__.__name__}({1})'.format( self, ', '.join(map(repr, self.maps))) @classmethod def fromkeys(cls, iterable, *args): 'Create a ChainMap with a single dict created from the iterable.' return cls(dict.fromkeys(iterable, *args)) def copy(self): 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' return self.__class__(self.maps[0].copy(), *self.maps[1:]) __copy__ = copy def new_child(self): # like Django's Context.push() 'New ChainMap with a new dict followed by all previous maps.' return self.__class__({}, *self.maps) @property def parents(self): # like Django's Context.pop() 'New ChainMap from maps[1:].' return self.__class__(*self.maps[1:]) def __setitem__(self, key, value): self.maps[0][key] = value def __delitem__(self, key): try: del self.maps[0][key] except KeyError: raise KeyError('Key not found in the first mapping: {!r}'.format(key)) def popitem(self): 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' try: return self.maps[0].popitem() except KeyError: raise KeyError('No keys found in the first mapping.') def pop(self, key, *args): 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' try: return self.maps[0].pop(key, *args) except KeyError: raise KeyError('Key not found in the first mapping: {!r}'.format(key)) def clear(self): 'Clear maps[0], leaving maps[1:] intact.' self.maps[0].clear() try: from importlib.util import cache_from_source # Python >= 3.4 except ImportError: # pragma: no cover try: from imp import cache_from_source except ImportError: # pragma: no cover def cache_from_source(path, debug_override=None): assert path.endswith('.py') if debug_override is None: debug_override = __debug__ if debug_override: suffix = 'c' else: suffix = 'o' return path + suffix try: from collections import OrderedDict except ImportError: # pragma: no cover ## {{{ http://code.activestate.com/recipes/576693/ (r9) # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. # Passes Python2.7's test suite and incorporates all the latest updates. try: from thread import get_ident as _get_ident except ImportError: from dummy_thread import get_ident as _get_ident try: from _abcoll import KeysView, ValuesView, ItemsView except ImportError: pass class OrderedDict(dict): 'Dictionary that remembers insertion order' # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. # Big-O running times for all methods are the same as for regular dictionaries. # The internal self.__map dictionary maps keys to links in a doubly linked list. # The circular doubly linked list starts and ends with a sentinel element. # The sentinel element never gets deleted (this simplifies the algorithm). # Each link is stored as a list of length three: [PREV, NEXT, KEY]. def __init__(self, *args, **kwds): '''Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__root except AttributeError: self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) def __setitem__(self, key, value, dict_setitem=dict.__setitem__): 'od.__setitem__(i, y) <==> od[i]=y' # Setting a new item creates a new link which goes at the end of the linked # list, and the inherited dictionary is updated with the new key/value pair. if key not in self: root = self.__root last = root[0] last[1] = root[0] = self.__map[key] = [last, root, key] dict_setitem(self, key, value) def __delitem__(self, key, dict_delitem=dict.__delitem__): 'od.__delitem__(y) <==> del od[y]' # Deleting an existing item uses self.__map to find the link which is # then removed by updating the links in the predecessor and successor nodes. dict_delitem(self, key) link_prev, link_next, key = self.__map.pop(key) link_prev[1] = link_next link_next[0] = link_prev def __iter__(self): 'od.__iter__() <==> iter(od)' root = self.__root curr = root[1] while curr is not root: yield curr[2] curr = curr[1] def __reversed__(self): 'od.__reversed__() <==> reversed(od)' root = self.__root curr = root[0] while curr is not root: yield curr[2] curr = curr[0] def clear(self): 'od.clear() -> None. Remove all items from od.' try: for node in self.__map.itervalues(): del node[:] root = self.__root root[:] = [root, root, None] self.__map.clear() except AttributeError: pass dict.clear(self) def popitem(self, last=True): '''od.popitem() -> (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. ''' if not self: raise KeyError('dictionary is empty') root = self.__root if last: link = root[0] link_prev = link[0] link_prev[1] = root root[0] = link_prev else: link = root[1] link_next = link[1] root[1] = link_next link_next[0] = root key = link[2] del self.__map[key] value = dict.pop(self, key) return key, value # -- the following methods do not depend on the internal structure -- def keys(self): 'od.keys() -> list of keys in od' return list(self) def values(self): 'od.values() -> list of values in od' return [self[key] for key in self] def items(self): 'od.items() -> list of (key, value) pairs in od' return [(key, self[key]) for key in self] def iterkeys(self): 'od.iterkeys() -> an iterator over the keys in od' return iter(self) def itervalues(self): 'od.itervalues -> an iterator over the values in od' for k in self: yield self[k] def iteritems(self): 'od.iteritems -> an iterator over the (key, value) items in od' for k in self: yield (k, self[k]) def update(*args, **kwds): '''od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' 'arguments (%d given)' % (len(args),)) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] # Make progressively weaker assumptions about "other" other = () if len(args) == 2: other = args[1] if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, 'keys'): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value __update = update # let subclasses override update without breaking __init__ __marker = object() def pop(self, key, default=__marker): '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' if key in self: result = self[key] del self[key] return result if default is self.__marker: raise KeyError(key) return default def setdefault(self, key, default=None): 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' if key in self: return self[key] self[key] = default return default def __repr__(self, _repr_running=None): 'od.__repr__() <==> repr(od)' if not _repr_running: _repr_running = {} call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] def __reduce__(self): 'Return state information for pickling' items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def copy(self): 'od.copy() -> a shallow copy of od' return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None). ''' d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. ''' if isinstance(other, OrderedDict): return len(self)==len(other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): return not self == other # -- the following methods are only used in Python 2.7 -- def viewkeys(self): "od.viewkeys() -> a set-like object providing a view on od's keys" return KeysView(self) def viewvalues(self): "od.viewvalues() -> an object providing a view on od's values" return ValuesView(self) def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self) try: from logging.config import BaseConfigurator, valid_ident except ImportError: # pragma: no cover IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) def valid_ident(s): m = IDENTIFIER.match(s) if not m: raise ValueError('Not a valid Python identifier: %r' % s) return True # The ConvertingXXX classes are wrappers around standard Python containers, # and they serve to convert any suitable values in the container. The # conversion converts base dicts, lists and tuples to their wrapped # equivalents, whereas strings which match a conversion format are converted # appropriately. # # Each wrapper should have a configurator attribute holding the actual # configurator to use for conversion. class ConvertingDict(dict): """A converting dictionary wrapper.""" def __getitem__(self, key): value = dict.__getitem__(self, key) result = self.configurator.convert(value) #If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result def get(self, key, default=None): value = dict.get(self, key, default) result = self.configurator.convert(value) #If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result def pop(self, key, default=None): value = dict.pop(self, key, default) result = self.configurator.convert(value) if value is not result: if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result class ConvertingList(list): """A converting list wrapper.""" def __getitem__(self, key): value = list.__getitem__(self, key) result = self.configurator.convert(value) #If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result def pop(self, idx=-1): value = list.pop(self, idx) result = self.configurator.convert(value) if value is not result: if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self return result class ConvertingTuple(tuple): """A converting tuple wrapper.""" def __getitem__(self, key): value = tuple.__getitem__(self, key) result = self.configurator.convert(value) if value is not result: if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result class BaseConfigurator(object): """ The configurator base class which defines some useful defaults. """ CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') DIGIT_PATTERN = re.compile(r'^\d+$') value_converters = { 'ext' : 'ext_convert', 'cfg' : 'cfg_convert', } # We might want to use a different one, e.g. importlib importer = staticmethod(__import__) def __init__(self, config): self.config = ConvertingDict(config) self.config.configurator = self def resolve(self, s): """ Resolve strings to objects using standard import and attribute syntax. """ name = s.split('.') used = name.pop(0) try: found = self.importer(used) for frag in name: used += '.' + frag try: found = getattr(found, frag) except AttributeError: self.importer(used) found = getattr(found, frag) return found except ImportError: e, tb = sys.exc_info()[1:] v = ValueError('Cannot resolve %r: %s' % (s, e)) v.__cause__, v.__traceback__ = e, tb raise v def ext_convert(self, value): """Default converter for the ext:// protocol.""" return self.resolve(value) def cfg_convert(self, value): """Default converter for the cfg:// protocol.""" rest = value m = self.WORD_PATTERN.match(rest) if m is None: raise ValueError("Unable to convert %r" % value) else: rest = rest[m.end():] d = self.config[m.groups()[0]] #print d, rest while rest: m = self.DOT_PATTERN.match(rest) if m: d = d[m.groups()[0]] else: m = self.INDEX_PATTERN.match(rest) if m: idx = m.groups()[0] if not self.DIGIT_PATTERN.match(idx): d = d[idx] else: try: n = int(idx) # try as number first (most likely) d = d[n] except TypeError: d = d[idx] if m: rest = rest[m.end():] else: raise ValueError('Unable to convert ' '%r at %r' % (value, rest)) #rest should be empty return d def convert(self, value): """ Convert values to an appropriate type. dicts, lists and tuples are replaced by their converting alternatives. Strings are checked to see if they have a conversion format and are converted if they do. """ if not isinstance(value, ConvertingDict) and isinstance(value, dict): value = ConvertingDict(value) value.configurator = self elif not isinstance(value, ConvertingList) and isinstance(value, list): value = ConvertingList(value) value.configurator = self elif not isinstance(value, ConvertingTuple) and\ isinstance(value, tuple): value = ConvertingTuple(value) value.configurator = self elif isinstance(value, string_types): m = self.CONVERT_PATTERN.match(value) if m: d = m.groupdict() prefix = d['prefix'] converter = self.value_converters.get(prefix, None) if converter: suffix = d['suffix'] converter = getattr(self, converter) value = converter(suffix) return value def configure_custom(self, config): """Configure an object with a user-supplied factory.""" c = config.pop('()') if not callable(c): c = self.resolve(c) props = config.pop('.', None) # Check for valid identifiers kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) result = c(**kwargs) if props: for name, value in props.items(): setattr(result, name, value) return result def as_tuple(self, value): """Utility function which converts lists to tuples.""" if isinstance(value, list): value = tuple(value) return value
RalfBarkow/Zettelkasten
venv/lib/python3.9/site-packages/pip/_vendor/distlib/compat.py
Python
gpl-3.0
41,408
name1_0_1_0_0_4_0 = None name1_0_1_0_0_4_1 = None name1_0_1_0_0_4_2 = None name1_0_1_0_0_4_3 = None name1_0_1_0_0_4_4 = None
asedunov/intellij-community
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_0/_pkg1_0_1/_pkg1_0_1_0/_pkg1_0_1_0_0/_mod1_0_1_0_0_4.py
Python
apache-2.0
128
import datetime from decimal import Decimal import types import six def is_protected_type(obj): """Determine if the object instance is of a protected type. Objects of protected types are preserved as-is when passed to force_unicode(strings_only=True). """ return isinstance(obj, ( six.integer_types + (types.NoneType, datetime.datetime, datetime.date, datetime.time, float, Decimal)) ) def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first, saves 30-40% when s is an instance of # six.text_type. This function gets called often in that setting. if isinstance(s, six.text_type): return s if strings_only and is_protected_type(s): return s try: if not isinstance(s, six.string_types): if hasattr(s, '__unicode__'): s = s.__unicode__() else: if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding, errors) else: s = six.text_type(s) else: s = six.text_type(bytes(s), encoding, errors) else: # Note: We use .decode() here, instead of six.text_type(s, # encoding, errors), so that if s is a SafeBytes, it ends up being # a SafeText at the end. s = s.decode(encoding, errors) except UnicodeDecodeError as e: if not isinstance(s, Exception): raise UnicodeDecodeError(*e.args) else: # If we get to here, the caller has passed in an Exception # subclass populated with non-ASCII bytestring data without a # working unicode method. Try to handle this without raising a # further exception by individually forcing the exception args # to unicode. s = ' '.join([force_unicode(arg, encoding, strings_only, errors) for arg in s]) return s
unnikrishnankgs/va
venv/lib/python3.5/site-packages/external/org_mozilla_bleach/bleach/encoding.py
Python
bsd-2-clause
2,277
# (c) Copyright 2014 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Fake HP client exceptions to use when mocking HP clients.""" class UnsupportedVersion(Exception): """Unsupported version of the client.""" pass class ClientException(Exception): """The base exception class for these fake exceptions.""" _error_code = None _error_desc = None _error_ref = None _debug1 = None _debug2 = None def __init__(self, error=None): if error: if 'code' in error: self._error_code = error['code'] if 'desc' in error: self._error_desc = error['desc'] if 'ref' in error: self._error_ref = error['ref'] if 'debug1' in error: self._debug1 = error['debug1'] if 'debug2' in error: self._debug2 = error['debug2'] def get_code(self): return self._error_code def get_description(self): return self._error_desc def get_ref(self): return self._error_ref def __str__(self): formatted_string = self.message if self.http_status: formatted_string += " (HTTP %s)" % self.http_status if self._error_code: formatted_string += " %s" % self._error_code if self._error_desc: formatted_string += " - %s" % self._error_desc if self._error_ref: formatted_string += " - %s" % self._error_ref if self._debug1: formatted_string += " (1: '%s')" % self._debug1 if self._debug2: formatted_string += " (2: '%s')" % self._debug2 return formatted_string class HTTPConflict(Exception): http_status = 409 message = "Conflict" def __init__(self, error=None): if error and 'message' in error: self._error_desc = error['message'] def get_description(self): return self._error_desc class HTTPNotFound(Exception): http_status = 404 message = "Not found" class HTTPForbidden(ClientException): http_status = 403 message = "Forbidden" class HTTPBadRequest(Exception): http_status = 400 message = "Bad request" class HTTPServerError(Exception): http_status = 500 message = "Error" def __init__(self, error=None): if error and 'message' in error: self._error_desc = error['message'] def get_description(self): return self._error_desc
nikesh-mahalka/cinder
cinder/tests/unit/fake_hp_client_exceptions.py
Python
apache-2.0
3,077
# -*- coding: utf-8 -*- # (c) 2018 Matt Martz <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type from ansible.module_utils.six import PY3 from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var def test_wrap_var_text(): assert isinstance(wrap_var(u'foo'), AnsibleUnsafeText) def test_wrap_var_bytes(): assert isinstance(wrap_var(b'foo'), AnsibleUnsafeBytes) def test_wrap_var_string(): if PY3: assert isinstance(wrap_var('foo'), AnsibleUnsafeText) else: assert isinstance(wrap_var('foo'), AnsibleUnsafeBytes) def test_wrap_var_dict(): assert isinstance(wrap_var(dict(foo='bar')), dict) assert not isinstance(wrap_var(dict(foo='bar')), AnsibleUnsafe) assert isinstance(wrap_var(dict(foo=u'bar'))['foo'], AnsibleUnsafeText) def test_wrap_var_dict_None(): assert wrap_var(dict(foo=None))['foo'] is None assert not isinstance(wrap_var(dict(foo=None))['foo'], AnsibleUnsafe) def test_wrap_var_list(): assert isinstance(wrap_var(['foo']), list) assert not isinstance(wrap_var(['foo']), AnsibleUnsafe) assert isinstance(wrap_var([u'foo'])[0], AnsibleUnsafeText) def test_wrap_var_list_None(): assert wrap_var([None])[0] is None assert not isinstance(wrap_var([None])[0], AnsibleUnsafe) def test_wrap_var_set(): assert isinstance(wrap_var(set(['foo'])), set) assert not isinstance(wrap_var(set(['foo'])), AnsibleUnsafe) for item in wrap_var(set([u'foo'])): assert isinstance(item, AnsibleUnsafeText) def test_wrap_var_set_None(): for item in wrap_var(set([None])): assert item is None assert not isinstance(item, AnsibleUnsafe) def test_wrap_var_tuple(): assert isinstance(wrap_var(('foo',)), tuple) assert not isinstance(wrap_var(('foo',)), AnsibleUnsafe) assert isinstance(wrap_var(('foo',))[0], AnsibleUnsafe) def test_wrap_var_tuple_None(): assert wrap_var((None,))[0] is None assert not isinstance(wrap_var((None,))[0], AnsibleUnsafe) def test_wrap_var_None(): assert wrap_var(None) is None assert not isinstance(wrap_var(None), AnsibleUnsafe) def test_wrap_var_unsafe_text(): assert isinstance(wrap_var(AnsibleUnsafeText(u'foo')), AnsibleUnsafeText) def test_wrap_var_unsafe_bytes(): assert isinstance(wrap_var(AnsibleUnsafeBytes(b'foo')), AnsibleUnsafeBytes) def test_wrap_var_no_ref(): thing = { 'foo': { 'bar': 'baz' }, 'bar': ['baz', 'qux'], 'baz': ('qux',), 'none': None, 'text': 'text', } wrapped_thing = wrap_var(thing) thing is not wrapped_thing thing['foo'] is not wrapped_thing['foo'] thing['bar'][0] is not wrapped_thing['bar'][0] thing['baz'][0] is not wrapped_thing['baz'][0] thing['none'] is not wrapped_thing['none'] thing['text'] is not wrapped_thing['text'] def test_AnsibleUnsafeText(): assert isinstance(AnsibleUnsafeText(u'foo'), AnsibleUnsafe) def test_AnsibleUnsafeBytes(): assert isinstance(AnsibleUnsafeBytes(b'foo'), AnsibleUnsafe)
azaghal/ansible
test/units/utils/test_unsafe_proxy.py
Python
gpl-3.0
3,234
#!/usr/bin/env python """Sample Input Reader for map job.""" import random import string import time from mapreduce import context from mapreduce import errors from mapreduce import operation from mapreduce.api import map_job # pylint: disable=invalid-name # Counter name for number of bytes read. COUNTER_IO_READ_BYTES = "io-read-bytes" # Counter name for milliseconds spent reading data. COUNTER_IO_READ_MSEC = "io-read-msec" class SampleInputReader(map_job.InputReader): """A sample InputReader that generates random strings as output. Primary usage is to as an example InputReader that can be use for test purposes. """ # Total number of entries this reader should generate. COUNT = "count" # Length of the generated strings. STRING_LENGTH = "string_length" # The default string length if one is not specified. _DEFAULT_STRING_LENGTH = 10 def __init__(self, count, string_length): """Initialize input reader. Args: count: number of entries this shard should generate. string_length: the length of generated random strings. """ self._count = count self._string_length = string_length def __iter__(self): ctx = context.get() while self._count: self._count -= 1 start_time = time.time() content = "".join(random.choice(string.ascii_lowercase) for _ in range(self._string_length)) if ctx: operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx) yield content @classmethod def from_json(cls, state): """Inherit docs.""" return cls(state[cls.COUNT], state[cls.STRING_LENGTH]) def to_json(self): """Inherit docs.""" return {self.COUNT: self._count, self.STRING_LENGTH: self._string_length} @classmethod def split_input(cls, job_config): """Inherit docs.""" params = job_config.input_reader_params count = params[cls.COUNT] string_length = params.get(cls.STRING_LENGTH, cls._DEFAULT_STRING_LENGTH) shard_count = job_config.shard_count count_per_shard = count // shard_count mr_input_readers = [ cls(count_per_shard, string_length) for _ in range(shard_count)] left = count - count_per_shard*shard_count if left > 0: mr_input_readers.append(cls(left, string_length)) return mr_input_readers @classmethod def validate(cls, job_config): """Inherit docs.""" super(SampleInputReader, cls).validate(job_config) params = job_config.input_reader_params # Validate count. if cls.COUNT not in params: raise errors.BadReaderParamsError("Must specify %s" % cls.COUNT) if not isinstance(params[cls.COUNT], int): raise errors.BadReaderParamsError("%s should be an int but is %s" % (cls.COUNT, type(params[cls.COUNT]))) if params[cls.COUNT] <= 0: raise errors.BadReaderParamsError("%s should be a positive int") # Validate string length. if cls.STRING_LENGTH in params and not ( isinstance(params[cls.STRING_LENGTH], int) and params[cls.STRING_LENGTH] > 0): raise errors.BadReaderParamsError("%s should be a positive int " "but is %s" % (cls.STRING_LENGTH, params[cls.STRING_LENGTH]))
sahiljain/catapult
third_party/mapreduce/mapreduce/api/map_job/sample_input_reader.py
Python
bsd-3-clause
3,468
#!/usr/bin/env python2 # # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.mininode import CTransaction, NetworkThread from test_framework.blocktools import create_coinbase, create_block from test_framework.comptool import TestInstance, TestManager from test_framework.script import CScript, OP_1NEGATE, OP_NOP2, OP_DROP from binascii import hexlify, unhexlify import cStringIO import time def cltv_invalidate(tx): '''Modify the signature in vin 0 of the tx to fail CLTV Prepends -1 CLTV DROP in the scriptSig itself. ''' tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP2, OP_DROP] + list(CScript(tx.vin[0].scriptSig))) ''' This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY) Connect to a single node. Mine 2 (version 3) blocks (save the coinbases for later). Generate 98 more version 3 blocks, verify the node accepts. Mine 749 version 4 blocks, verify the node accepts. Check that the new CLTV rules are not enforced on the 750th version 4 block. Check that the new CLTV rules are enforced on the 751st version 4 block. Mine 199 new version blocks. Mine 1 old-version block. Mine 1 new version block. Mine 1 old version block, see that the node rejects. ''' class BIP65Test(ComparisonTestFramework): def __init__(self): self.num_nodes = 1 def setup_network(self): # Must set the blockversion for this test self.nodes = start_nodes(1, self.options.tmpdir, extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=3']], binary=[self.options.testbinary]) def run_test(self): test = TestManager(self, self.options.tmpdir) test.add_all_connections(self.nodes) NetworkThread().start() # Start up network handling in another thread test.run() def create_transaction(self, node, coinbase, to_address, amount): from_txid = node.getblock(coinbase)['tx'][0] inputs = [{ "txid" : from_txid, "vout" : 0}] outputs = { to_address : amount } rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransaction(rawtx) tx = CTransaction() f = cStringIO.StringIO(unhexlify(signresult['hex'])) tx.deserialize(f) return tx def get_tests(self): self.coinbase_blocks = self.nodes[0].setgenerate(True, 2) self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) self.nodeaddress = self.nodes[0].getnewaddress() self.last_block_time = time.time() ''' 98 more version 3 blocks ''' test_blocks = [] for i in xrange(98): block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1) block.nVersion = 3 block.rehash() block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.tip = block.sha256 yield TestInstance(test_blocks, sync_every_block=False) ''' Mine 749 version 4 blocks ''' test_blocks = [] for i in xrange(749): block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1) block.nVersion = 4 block.rehash() block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.tip = block.sha256 yield TestInstance(test_blocks, sync_every_block=False) ''' Check that the new CLTV rules are not enforced in the 750th version 3 block. ''' spendtx = self.create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0) cltv_invalidate(spendtx) spendtx.rehash() block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1) block.nVersion = 4 block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() self.last_block_time += 1 self.tip = block.sha256 yield TestInstance([[block, True]]) ''' Check that the new CLTV rules are enforced in the 751st version 4 block. ''' spendtx = self.create_transaction(self.nodes[0], self.coinbase_blocks[1], self.nodeaddress, 1.0) cltv_invalidate(spendtx) spendtx.rehash() block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) block.nVersion = 4 block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() self.last_block_time += 1 yield TestInstance([[block, False]]) ''' Mine 199 new version blocks on last valid tip ''' test_blocks = [] for i in xrange(199): block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) block.nVersion = 4 block.rehash() block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.tip = block.sha256 yield TestInstance(test_blocks, sync_every_block=False) ''' Mine 1 old version block ''' block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) block.nVersion = 3 block.rehash() block.solve() self.last_block_time += 1 self.tip = block.sha256 yield TestInstance([[block, True]]) ''' Mine 1 new version block ''' block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) block.nVersion = 4 block.rehash() block.solve() self.last_block_time += 1 self.tip = block.sha256 yield TestInstance([[block, True]]) ''' Mine 1 old version block, should be invalid ''' block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) block.nVersion = 3 block.rehash() block.solve() self.last_block_time += 1 yield TestInstance([[block, False]]) if __name__ == '__main__': BIP65Test().main()
octocoin-project/octocoin
qa/rpc-tests/bip65-cltv-p2p.py
Python
mit
6,411
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Kairo Araujo <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: installp author: - Kairo Araujo (@kairoaraujo) short_description: Manage packages on AIX description: - Manage packages using 'installp' on AIX version_added: '2.8' options: accept_license: description: - Whether to accept the license for the package(s). type: bool default: no name: description: - One or more packages to install or remove. - Use C(all) to install all packages available on informed C(repository_path). type: list required: true aliases: [ pkg ] repository_path: description: - Path with AIX packages (required to install). type: path state: description: - Whether the package needs to be present on or absent from the system. type: str choices: [ absent, present ] default: present notes: - If the package is already installed, even the package/fileset is new, the module will not install it. ''' EXAMPLES = r''' - name: Install package foo installp: name: foo repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Install bos.sysmgt that includes bos.sysmgt.nim.master, bos.sysmgt.nim.spot installp: name: bos.sysmgt repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Install bos.sysmgt.nim.master only installp: name: bos.sysmgt.nim.master repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Install bos.sysmgt.nim.master and bos.sysmgt.nim.spot installp: name: bos.sysmgt.nim.master, bos.sysmgt.nim.spot repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Remove packages bos.sysmgt.nim.master installp: name: bos.sysmgt.nim.master state: absent ''' RETURN = r''' # ''' import os import re from ansible.module_utils.basic import AnsibleModule def _check_new_pkg(module, package, repository_path): """ Check if the package of fileset is correct name and repository path. :param module: Ansible module arguments spec. :param package: Package/fileset name. :param repository_path: Repository package path. :return: Bool, package information. """ if os.path.isdir(repository_path): installp_cmd = module.get_bin_path('installp', True) rc, package_result, err = module.run_command("%s -l -MR -d %s" % (installp_cmd, repository_path)) if rc != 0: module.fail_json(msg="Failed to run installp.", rc=rc, err=err) if package == 'all': pkg_info = "All packages on dir" return True, pkg_info else: pkg_info = {} for line in package_result.splitlines(): if re.findall(package, line): pkg_name = line.split()[0].strip() pkg_version = line.split()[1].strip() pkg_info[pkg_name] = pkg_version return True, pkg_info return False, None else: module.fail_json(msg="Repository path %s is not valid." % repository_path) def _check_installed_pkg(module, package, repository_path): """ Check the package on AIX. It verifies if the package is installed and informations :param module: Ansible module parameters spec. :param package: Package/fileset name. :param repository_path: Repository package path. :return: Bool, package data. """ lslpp_cmd = module.get_bin_path('lslpp', True) rc, lslpp_result, err = module.run_command("%s -lcq %s*" % (lslpp_cmd, package)) if rc == 1: package_state = ' '.join(err.split()[-2:]) if package_state == 'not installed.': return False, None else: module.fail_json(msg="Failed to run lslpp.", rc=rc, err=err) if rc != 0: module.fail_json(msg="Failed to run lslpp.", rc=rc, err=err) pkg_data = {} full_pkg_data = lslpp_result.splitlines() for line in full_pkg_data: pkg_name, fileset, level = line.split(':')[0:3] pkg_data[pkg_name] = fileset, level return True, pkg_data def remove(module, installp_cmd, packages): repository_path = None remove_count = 0 removed_pkgs = [] not_found_pkg = [] for package in packages: pkg_check, dummy = _check_installed_pkg(module, package, repository_path) if pkg_check: if not module.check_mode: rc, remove_out, err = module.run_command("%s -u %s" % (installp_cmd, package)) if rc != 0: module.fail_json(msg="Failed to run installp.", rc=rc, err=err) remove_count += 1 removed_pkgs.append(package) else: not_found_pkg.append(package) if remove_count > 0: if len(not_found_pkg) > 1: not_found_pkg.insert(0, "Package(s) not found: ") changed = True msg = "Packages removed: %s. %s " % (' '.join(removed_pkgs), ' '.join(not_found_pkg)) else: changed = False msg = ("No packages removed, all packages not found: %s" % ' '.join(not_found_pkg)) return changed, msg def install(module, installp_cmd, packages, repository_path, accept_license): installed_pkgs = [] not_found_pkgs = [] already_installed_pkgs = {} accept_license_param = { True: '-Y', False: '', } # Validate if package exists on repository path. for package in packages: pkg_check, pkg_data = _check_new_pkg(module, package, repository_path) # If package exists on repository path, check if package is installed. if pkg_check: pkg_check_current, pkg_info = _check_installed_pkg(module, package, repository_path) # If package is already installed. if pkg_check_current: # Check if package is a package and not a fileset, get version # and add the package into already installed list if package in pkg_info.keys(): already_installed_pkgs[package] = pkg_info[package][1] else: # If the package is not a package but a fileset, confirm # and add the fileset/package into already installed list for key in pkg_info.keys(): if package in pkg_info[key]: already_installed_pkgs[package] = pkg_info[key][1] else: if not module.check_mode: rc, out, err = module.run_command("%s -a %s -X -d %s %s" % (installp_cmd, accept_license_param[accept_license], repository_path, package)) if rc != 0: module.fail_json(msg="Failed to run installp", rc=rc, err=err) installed_pkgs.append(package) else: not_found_pkgs.append(package) if len(installed_pkgs) > 0: installed_msg = (" Installed: %s." % ' '.join(installed_pkgs)) else: installed_msg = '' if len(not_found_pkgs) > 0: not_found_msg = (" Not found: %s." % ' '.join(not_found_pkgs)) else: not_found_msg = '' if len(already_installed_pkgs) > 0: already_installed_msg = (" Already installed: %s." % already_installed_pkgs) else: already_installed_msg = '' if len(installed_pkgs) > 0: changed = True msg = ("%s%s%s" % (installed_msg, not_found_msg, already_installed_msg)) else: changed = False msg = ("No packages installed.%s%s%s" % (installed_msg, not_found_msg, already_installed_msg)) return changed, msg def main(): module = AnsibleModule( argument_spec=dict( name=dict(type='list', required=True, aliases=['pkg']), repository_path=dict(type='path'), accept_license=dict(type='bool', default=False), state=dict(type='str', default='present', choices=['absent', 'present']), ), supports_check_mode=True, ) name = module.params['name'] repository_path = module.params['repository_path'] accept_license = module.params['accept_license'] state = module.params['state'] installp_cmd = module.get_bin_path('installp', True) if state == 'present': if repository_path is None: module.fail_json(msg="repository_path is required to install package") changed, msg = install(module, installp_cmd, name, repository_path, accept_license) elif state == 'absent': changed, msg = remove(module, installp_cmd, name) else: module.fail_json(changed=False, msg="Unexpected state.") module.exit_json(changed=changed, msg=msg) if __name__ == '__main__': main()
andmos/ansible
lib/ansible/modules/packaging/os/installp.py
Python
gpl-3.0
9,242
"""Local settings and globals.""" import sys from os.path import normpath, join from .base import * # Import secrets -- not needed #sys.path.append( # abspath(join(PROJECT_ROOT, '../secrets/TimelineJS/stg')) #) #from secrets import * # Set static URL STATIC_URL = '/static'
deenjohn/TimelineJS
website/core/settings/loc.py
Python
mpl-2.0
279
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for Chromium browser resources. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details about the presubmit API built into depot_tools, and see http://www.chromium.org/developers/web-development-style-guide for the rules we're checking against here. """ import os import struct class InvalidPNGException(Exception): pass class ResourceScaleFactors(object): """Verifier of image dimensions for Chromium resources. This class verifies the image dimensions of resources in the various resource subdirectories. Attributes: paths: An array of tuples giving the folders to check and their relevant scale factors. For example: [(100, 'default_100_percent'), (200, 'default_200_percent')] """ def __init__(self, input_api, output_api, paths): """ Initializes ResourceScaleFactors with paths.""" self.input_api = input_api self.output_api = output_api self.paths = paths def RunChecks(self): """Verifies the scale factors of resources being added or modified. Returns: An array of presubmit errors if any images were detected not having the correct dimensions. """ def ImageSize(filename): with open(filename, 'rb', buffering=0) as f: data = f.read(24) if data[:8] != '\x89PNG\r\n\x1A\n' or data[12:16] != 'IHDR': raise InvalidPNGException return struct.unpack('>ii', data[16:24]) # Returns a list of valid scaled image sizes. The valid sizes are the # floor and ceiling of (base_size * scale_percent / 100). This is equivalent # to requiring that the actual scaled size is less than one pixel away from # the exact scaled size. def ValidSizes(base_size, scale_percent): return sorted(set([(base_size * scale_percent) / 100, (base_size * scale_percent + 99) / 100])) repository_path = self.input_api.os_path.relpath( self.input_api.PresubmitLocalPath(), self.input_api.change.RepositoryRoot()) results = [] # Check for affected files in any of the paths specified. affected_files = self.input_api.AffectedFiles(include_deletes=False) files = [] for f in affected_files: for path_spec in self.paths: path_root = self.input_api.os_path.join( repository_path, path_spec[1]) if (f.LocalPath().endswith('.png') and f.LocalPath().startswith(path_root)): # Only save the relative path from the resource directory. relative_path = self.input_api.os_path.relpath(f.LocalPath(), path_root) if relative_path not in files: files.append(relative_path) corrupt_png_error = ('Corrupt PNG in file %s. Note that binaries are not ' 'correctly uploaded to the code review tool and must be directly ' 'submitted using the dcommit command.') for f in files: base_image = self.input_api.os_path.join(self.paths[0][1], f) if not os.path.exists(base_image): results.append(self.output_api.PresubmitError( 'Base image %s does not exist' % self.input_api.os_path.join( repository_path, base_image))) continue try: base_dimensions = ImageSize(base_image) except InvalidPNGException: results.append(self.output_api.PresubmitError(corrupt_png_error % self.input_api.os_path.join(repository_path, base_image))) continue # Find all scaled versions of the base image and verify their sizes. for i in range(1, len(self.paths)): image_path = self.input_api.os_path.join(self.paths[i][1], f) if not os.path.exists(image_path): continue # Ensure that each image for a particular scale factor is the # correct scale of the base image. try: scaled_dimensions = ImageSize(image_path) except InvalidPNGException: results.append(self.output_api.PresubmitError(corrupt_png_error % self.input_api.os_path.join(repository_path, image_path))) continue for dimension_name, base_size, scaled_size in zip( ('width', 'height'), base_dimensions, scaled_dimensions): valid_sizes = ValidSizes(base_size, self.paths[i][0]) if scaled_size not in valid_sizes: results.append(self.output_api.PresubmitError( 'Image %s has %s %d, expected to be %s' % ( self.input_api.os_path.join(repository_path, image_path), dimension_name, scaled_size, ' or '.join(map(str, valid_sizes))))) return results
guorendong/iridium-browser-ubuntu
ui/resources/resource_check/resource_scale_factors.py
Python
bsd-3-clause
4,859
############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import osv from openerp import netsvc from openerp.tools.translate import _ class procurement_order(osv.osv): _inherit = 'procurement.order' def check_buy(self, cr, uid, ids, context=None): for procurement in self.browse(cr, uid, ids, context=context): for line in procurement.product_id.flow_pull_ids: if line.location_id==procurement.location_id: return line.type_proc=='buy' return super(procurement_order, self).check_buy(cr, uid, ids) def check_produce(self, cr, uid, ids, context=None): for procurement in self.browse(cr, uid, ids, context=context): for line in procurement.product_id.flow_pull_ids: if line.location_id==procurement.location_id: return line.type_proc=='produce' return super(procurement_order, self).check_produce(cr, uid, ids) def check_move(self, cr, uid, ids, context=None): for procurement in self.browse(cr, uid, ids, context=context): for line in procurement.product_id.flow_pull_ids: if line.location_id==procurement.location_id: return (line.type_proc=='move') and (line.location_src_id) return False def action_move_create(self, cr, uid, ids, context=None): proc_obj = self.pool.get('procurement.order') move_obj = self.pool.get('stock.move') picking_obj=self.pool.get('stock.picking') wf_service = netsvc.LocalService("workflow") for proc in proc_obj.browse(cr, uid, ids, context=context): line = None for line in proc.product_id.flow_pull_ids: if line.location_id == proc.location_id: break assert line, 'Line cannot be False if we are on this state of the workflow' origin = (proc.origin or proc.name or '').split(':')[0] +':'+line.name picking_id = picking_obj.create(cr, uid, { 'origin': origin, 'company_id': line.company_id and line.company_id.id or False, 'type': line.picking_type, 'stock_journal_id': line.journal_id and line.journal_id.id or False, 'move_type': 'one', 'partner_id': line.partner_address_id.id, 'note': _('Picking for pulled procurement coming from original location %s, pull rule %s, via original Procurement %s (#%d)') % (proc.location_id.name, line.name, proc.name, proc.id), 'invoice_state': line.invoice_state, }) move_id = move_obj.create(cr, uid, { 'name': line.name, 'picking_id': picking_id, 'company_id': line.company_id and line.company_id.id or False, 'product_id': proc.product_id.id, 'date': proc.date_planned, 'product_qty': proc.product_qty, 'product_uom': proc.product_uom.id, 'product_uos_qty': (proc.product_uos and proc.product_uos_qty)\ or proc.product_qty, 'product_uos': (proc.product_uos and proc.product_uos.id)\ or proc.product_uom.id, 'partner_id': line.partner_address_id.id, 'location_id': line.location_src_id.id, 'location_dest_id': line.location_id.id, 'move_dest_id': proc.move_id and proc.move_id.id or False, # to verif, about history ? 'tracking_id': False, 'cancel_cascade': line.cancel_cascade, 'state': 'confirmed', 'note': _('Move for pulled procurement coming from original location %s, pull rule %s, via original Procurement %s (#%d)') % (proc.location_id.name, line.name, proc.name, proc.id), }) if proc.move_id and proc.move_id.state in ('confirmed'): move_obj.write(cr,uid, [proc.move_id.id], { 'state':'waiting' }, context=context) proc_id = proc_obj.create(cr, uid, { 'name': line.name, 'origin': origin, 'note': _('Pulled procurement coming from original location %s, pull rule %s, via original Procurement %s (#%d)') % (proc.location_id.name, line.name, proc.name, proc.id), 'company_id': line.company_id and line.company_id.id or False, 'date_planned': proc.date_planned, 'product_id': proc.product_id.id, 'product_qty': proc.product_qty, 'product_uom': proc.product_uom.id, 'product_uos_qty': (proc.product_uos and proc.product_uos_qty)\ or proc.product_qty, 'product_uos': (proc.product_uos and proc.product_uos.id)\ or proc.product_uom.id, 'location_id': line.location_src_id.id, 'procure_method': line.procure_method, 'move_id': move_id, }) wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'stock.picking', picking_id, 'button_confirm', cr) wf_service.trg_validate(uid, 'procurement.order', proc_id, 'button_confirm', cr) if proc.move_id: move_obj.write(cr, uid, [proc.move_id.id], {'location_id':proc.location_id.id}) msg = _('Pulled from another location.') self.write(cr, uid, [proc.id], {'state':'running', 'message': msg}) self.message_post(cr, uid, [proc.id], body=msg, context=context) # trigger direct processing (the new procurement shares the same planned date as the original one, which is already being processed) wf_service.trg_validate(uid, 'procurement.order', proc_id, 'button_check', cr) return False procurement_order() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
jeffery9/mixprint_addons
stock_location/procurement_pull.py
Python
agpl-3.0
6,951
def f(x): """ Returns ------- object """ return 42
smmribeiro/intellij-community
python/testData/intentions/returnTypeInNewNumpyDocString_after.py
Python
apache-2.0
75
# cs.???? = currentstate, any variable on the status tab in the planner can be used. # Script = options are # Script.Sleep(ms) # Script.ChangeParam(name,value) # Script.GetParam(name) # Script.ChangeMode(mode) - same as displayed in mode setup screen 'AUTO' # Script.WaitFor(string,timeout) # Script.SendRC(channel,pwm,sendnow) # print 'Start Script' for chan in range(1,9): Script.SendRC(chan,1500,False) Script.SendRC(3,Script.GetParam('RC3_MIN'),True) Script.Sleep(5000) while cs.lat == 0: print 'Waiting for GPS' Script.Sleep(1000) print 'Got GPS' jo = 10 * 13 print jo Script.SendRC(3,1000,False) Script.SendRC(4,2000,True) cs.messages.Clear() Script.WaitFor('ARMING MOTORS',30000) Script.SendRC(4,1500,True) print 'Motors Armed!' Script.SendRC(3,1700,True) while cs.alt < 50: Script.Sleep(50) Script.SendRC(5,2000,True) # acro Script.SendRC(1,2000,False) # roll Script.SendRC(3,1370,True) # throttle while cs.roll > -45: # top hald 0 - 180 Script.Sleep(5) while cs.roll < -45: # -180 - -45 Script.Sleep(5) Script.SendRC(5,1500,False) # stabalise Script.SendRC(1,1500,True) # level roll Script.Sleep(2000) # 2 sec to stabalise Script.SendRC(3,1300,True) # throttle back to land thro = 1350 # will decend while cs.alt > 0.1: Script.Sleep(300) Script.SendRC(3,1000,False) Script.SendRC(4,1000,True) Script.WaitFor('DISARMING MOTORS',30000) Script.SendRC(4,1500,True) print 'Roll complete'
vizual54/MissionPlanner
Scripts/example1.py
Python
gpl-3.0
1,491
# Copyright (c) 2007, Robert Coup <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of Distance nor the names of its contributors may be used # to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # """ Distance and Area objects to allow for sensible and convenient calculation and conversions. Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio Inspired by GeoPy (http://exogen.case.edu/projects/geopy/) and Geoff Biggs' PhD work on dimensioned units for robotics. """ __all__ = ['A', 'Area', 'D', 'Distance'] from decimal import Decimal from functools import total_ordering from django.utils import six NUMERIC_TYPES = six.integer_types + (float, Decimal) AREA_PREFIX = "sq_" def pretty_name(obj): return obj.__name__ if obj.__class__ == type else obj.__class__.__name__ @total_ordering class MeasureBase(object): STANDARD_UNIT = None ALIAS = {} UNITS = {} LALIAS = {} def __init__(self, default_unit=None, **kwargs): value, self._default_unit = self.default_units(kwargs) setattr(self, self.STANDARD_UNIT, value) if default_unit and isinstance(default_unit, six.string_types): self._default_unit = default_unit def _get_standard(self): return getattr(self, self.STANDARD_UNIT) def _set_standard(self, value): setattr(self, self.STANDARD_UNIT, value) standard = property(_get_standard, _set_standard) def __getattr__(self, name): if name in self.UNITS: return self.standard / self.UNITS[name] else: raise AttributeError('Unknown unit type: %s' % name) def __repr__(self): return '%s(%s=%s)' % (pretty_name(self), self._default_unit, getattr(self, self._default_unit)) def __str__(self): return '%s %s' % (getattr(self, self._default_unit), self._default_unit) # **** Comparison methods **** def __eq__(self, other): if isinstance(other, self.__class__): return self.standard == other.standard else: return NotImplemented def __lt__(self, other): if isinstance(other, self.__class__): return self.standard < other.standard else: return NotImplemented # **** Operators methods **** def __add__(self, other): if isinstance(other, self.__class__): return self.__class__(default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard + other.standard)}) else: raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)}) def __iadd__(self, other): if isinstance(other, self.__class__): self.standard += other.standard return self else: raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)}) def __sub__(self, other): if isinstance(other, self.__class__): return self.__class__(default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard - other.standard)}) else: raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)}) def __isub__(self, other): if isinstance(other, self.__class__): self.standard -= other.standard return self else: raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)}) def __mul__(self, other): if isinstance(other, NUMERIC_TYPES): return self.__class__(default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard * other)}) else: raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)}) def __imul__(self, other): if isinstance(other, NUMERIC_TYPES): self.standard *= float(other) return self else: raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)}) def __rmul__(self, other): return self * other def __truediv__(self, other): if isinstance(other, self.__class__): return self.standard / other.standard if isinstance(other, NUMERIC_TYPES): return self.__class__(default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard / other)}) else: raise TypeError('%(class)s must be divided with number or %(class)s' % {"class": pretty_name(self)}) def __div__(self, other): # Python 2 compatibility return type(self).__truediv__(self, other) def __itruediv__(self, other): if isinstance(other, NUMERIC_TYPES): self.standard /= float(other) return self else: raise TypeError('%(class)s must be divided with number' % {"class": pretty_name(self)}) def __idiv__(self, other): # Python 2 compatibility return type(self).__itruediv__(self, other) def __bool__(self): return bool(self.standard) def __nonzero__(self): # Python 2 compatibility return type(self).__bool__(self) def default_units(self, kwargs): """ Return the unit value and the default units specified from the given keyword arguments dictionary. """ val = 0.0 default_unit = self.STANDARD_UNIT for unit, value in six.iteritems(kwargs): if not isinstance(value, float): value = float(value) if unit in self.UNITS: val += self.UNITS[unit] * value default_unit = unit elif unit in self.ALIAS: u = self.ALIAS[unit] val += self.UNITS[u] * value default_unit = u else: lower = unit.lower() if lower in self.UNITS: val += self.UNITS[lower] * value default_unit = lower elif lower in self.LALIAS: u = self.LALIAS[lower] val += self.UNITS[u] * value default_unit = u else: raise AttributeError('Unknown unit type: %s' % unit) return val, default_unit @classmethod def unit_attname(cls, unit_str): """ Retrieves the unit attribute name for the given unit string. For example, if the given unit string is 'metre', 'm' would be returned. An exception is raised if an attribute cannot be found. """ lower = unit_str.lower() if unit_str in cls.UNITS: return unit_str elif lower in cls.UNITS: return lower elif lower in cls.LALIAS: return cls.LALIAS[lower] else: raise Exception('Could not find a unit keyword associated with "%s"' % unit_str) class Distance(MeasureBase): STANDARD_UNIT = "m" UNITS = { 'chain': 20.1168, 'chain_benoit': 20.116782, 'chain_sears': 20.1167645, 'british_chain_benoit': 20.1167824944, 'british_chain_sears': 20.1167651216, 'british_chain_sears_truncated': 20.116756, 'cm': 0.01, 'british_ft': 0.304799471539, 'british_yd': 0.914398414616, 'clarke_ft': 0.3047972654, 'clarke_link': 0.201166195164, 'fathom': 1.8288, 'ft': 0.3048, 'german_m': 1.0000135965, 'gold_coast_ft': 0.304799710181508, 'indian_yd': 0.914398530744, 'inch': 0.0254, 'km': 1000.0, 'link': 0.201168, 'link_benoit': 0.20116782, 'link_sears': 0.20116765, 'm': 1.0, 'mi': 1609.344, 'mm': 0.001, 'nm': 1852.0, 'nm_uk': 1853.184, 'rod': 5.0292, 'sears_yd': 0.91439841, 'survey_ft': 0.304800609601, 'um': 0.000001, 'yd': 0.9144, } # Unit aliases for `UNIT` terms encountered in Spatial Reference WKT. ALIAS = { 'centimeter': 'cm', 'foot': 'ft', 'inches': 'inch', 'kilometer': 'km', 'kilometre': 'km', 'meter': 'm', 'metre': 'm', 'micrometer': 'um', 'micrometre': 'um', 'millimeter': 'mm', 'millimetre': 'mm', 'mile': 'mi', 'yard': 'yd', 'British chain (Benoit 1895 B)': 'british_chain_benoit', 'British chain (Sears 1922)': 'british_chain_sears', 'British chain (Sears 1922 truncated)': 'british_chain_sears_truncated', 'British foot (Sears 1922)': 'british_ft', 'British foot': 'british_ft', 'British yard (Sears 1922)': 'british_yd', 'British yard': 'british_yd', "Clarke's Foot": 'clarke_ft', "Clarke's link": 'clarke_link', 'Chain (Benoit)': 'chain_benoit', 'Chain (Sears)': 'chain_sears', 'Foot (International)': 'ft', 'German legal metre': 'german_m', 'Gold Coast foot': 'gold_coast_ft', 'Indian yard': 'indian_yd', 'Link (Benoit)': 'link_benoit', 'Link (Sears)': 'link_sears', 'Nautical Mile': 'nm', 'Nautical Mile (UK)': 'nm_uk', 'US survey foot': 'survey_ft', 'U.S. Foot': 'survey_ft', 'Yard (Indian)': 'indian_yd', 'Yard (Sears)': 'sears_yd' } LALIAS = {k.lower(): v for k, v in ALIAS.items()} def __mul__(self, other): if isinstance(other, self.__class__): return Area(default_unit=AREA_PREFIX + self._default_unit, **{AREA_PREFIX + self.STANDARD_UNIT: (self.standard * other.standard)}) elif isinstance(other, NUMERIC_TYPES): return self.__class__(default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard * other)}) else: raise TypeError('%(distance)s must be multiplied with number or %(distance)s' % { "distance": pretty_name(self.__class__), }) class Area(MeasureBase): STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT # Getting the square units values and the alias dictionary. UNITS = {'%s%s' % (AREA_PREFIX, k): v ** 2 for k, v in Distance.UNITS.items()} ALIAS = {k: '%s%s' % (AREA_PREFIX, v) for k, v in Distance.ALIAS.items()} LALIAS = {k.lower(): v for k, v in ALIAS.items()} def __truediv__(self, other): if isinstance(other, NUMERIC_TYPES): return self.__class__(default_unit=self._default_unit, **{self.STANDARD_UNIT: (self.standard / other)}) else: raise TypeError('%(class)s must be divided by a number' % {"class": pretty_name(self)}) def __div__(self, other): # Python 2 compatibility return type(self).__truediv__(self, other) # Shortcuts D = Distance A = Area
Vvucinic/Wander
venv_2_7/lib/python2.7/site-packages/Django-1.9-py2.7.egg/django/contrib/gis/measure.py
Python
artistic-2.0
12,272
#!/usr/bin/env python # -*- coding: utf-8 -*- # # GuessIt - A library for guessing information from filenames # Copyright (c) 2012 Nicolas Wack <[email protected]> # # GuessIt is free software; you can redistribute it and/or modify it under # the terms of the Lesser GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # GuessIt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Lesser GNU General Public License for more details. # # You should have received a copy of the Lesser GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import unicode_literals from guessit import Guess from guessit.transfo import SingleNodeGuesser from guessit.patterns import video_rexps, sep import re import logging log = logging.getLogger(__name__) def guess_video_rexps(string): string = '-' + string + '-' for rexp, confidence, span_adjust in video_rexps: match = re.search(sep + rexp + sep, string, re.IGNORECASE) if match: metadata = match.groupdict() # is this the better place to put it? (maybe, as it is at least # the soonest that we can catch it) if metadata.get('cdNumberTotal', -1) is None: del metadata['cdNumberTotal'] span = (match.start() + span_adjust[0], match.end() + span_adjust[1] - 2) return (Guess(metadata, confidence=confidence, raw=string[span[0]:span[1]]), span) return None, None def process(mtree): SingleNodeGuesser(guess_video_rexps, None, log).process(mtree)
loulich/Couchpotato
libs/guessit/transfo/guess_video_rexps.py
Python
gpl-3.0
1,837
#!/usr/bin/env python # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # # ansible-vault is a script that encrypts/decrypts YAML files. See # https://docs.ansible.com/playbooks_vault.html for more details. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import sys import time import os def main(args): path = os.path.abspath(args[1]) fo = open(path, 'r+') content = fo.readlines() content.append('faux editor added at %s\n' % time.time()) fo.seek(0) fo.write(''.join(content)) fo.close() return 0 if __name__ == '__main__': sys.exit(main(sys.argv[:]))
EvanK/ansible
test/integration/targets/vault/faux-editor.py
Python
gpl-3.0
1,212
# $Id: admonitions.py 7681 2013-07-12 07:52:27Z milde $ # Author: David Goodger <[email protected]> # Copyright: This module has been placed in the public domain. """ Admonition directives. """ __docformat__ = 'reStructuredText' from docutils.parsers.rst import Directive from docutils.parsers.rst import states, directives from docutils.parsers.rst.roles import set_classes from docutils import nodes class BaseAdmonition(Directive): final_argument_whitespace = True option_spec = {'class': directives.class_option, 'name': directives.unchanged} has_content = True node_class = None """Subclasses must set this to the appropriate admonition node class.""" def run(self): set_classes(self.options) self.assert_has_content() text = '\n'.join(self.content) admonition_node = self.node_class(text, **self.options) self.add_name(admonition_node) if self.node_class is nodes.admonition: title_text = self.arguments[0] textnodes, messages = self.state.inline_text(title_text, self.lineno) title = nodes.title(title_text, '', *textnodes) title.source, title.line = ( self.state_machine.get_source_and_line(self.lineno)) admonition_node += title admonition_node += messages if not 'classes' in self.options: admonition_node['classes'] += ['admonition-' + nodes.make_id(title_text)] self.state.nested_parse(self.content, self.content_offset, admonition_node) return [admonition_node] class Admonition(BaseAdmonition): required_arguments = 1 node_class = nodes.admonition class Attention(BaseAdmonition): node_class = nodes.attention class Caution(BaseAdmonition): node_class = nodes.caution class Danger(BaseAdmonition): node_class = nodes.danger class Error(BaseAdmonition): node_class = nodes.error class Hint(BaseAdmonition): node_class = nodes.hint class Important(BaseAdmonition): node_class = nodes.important class Note(BaseAdmonition): node_class = nodes.note class Tip(BaseAdmonition): node_class = nodes.tip class Warning(BaseAdmonition): node_class = nodes.warning
JulienMcJay/eclock
windows/Python27/Lib/site-packages/docutils/parsers/rst/directives/admonitions.py
Python
gpl-2.0
2,413
#!/usr/bin/env python import os import re import subprocess import sys import tempfile CC = "gcc" CFLAGS = "-fmax-errors=4 -std=c99 -pipe -D_POSIX_C_SOURCE=200809L -W -Wall -Wno-unused-variable -Wno-unused-parameter -Wno-unused-label -Wno-unused-value -Wno-unused-but-set-variable -Wno-unused-function -Wno-main".split(" ") class Table(): pass class TestMode(): pass_ = 0 fail_compile_parse = 1 fail_compile_sem = 2 fail_compile_ice = 3 fail_c = 4 fail_run = 5 fail_output = 6 fail_other = 7 disable = 8 test_modes = [TestMode.pass_, TestMode.fail_compile_parse, TestMode.fail_compile_sem, TestMode.fail_compile_ice, TestMode.fail_c, TestMode.fail_run, TestMode.fail_output, TestMode.fail_other, TestMode.disable] test_mode_names = { TestMode.pass_: ("pass", "Passed"), TestMode.fail_compile_parse: ("fail_compile_parse", "Compilation failed (parsing)"), TestMode.fail_compile_sem: ("fail_compile_sem", "Compilation failed (semantics)"), TestMode.fail_compile_ice: ("fail_compile_ice", "Compilation failed (ICE)"), TestMode.fail_c: ("fail_c", "C compilation/linking failed"), TestMode.fail_run: ("fail_run", "Run failed"), TestMode.fail_output: ("fail_output", "Output mismatched"), TestMode.fail_other: ("fail_other", "Expected failure didn't happen"), TestMode.disable: ("disable", "Disabled"), } test_stats = dict([(m, 0) for m in test_modes]) test_mode_values = {} for m, (s, _) in test_mode_names.iteritems(): test_mode_values[s] = m def pick(v, m): if v not in m: raise Exception("Unknown value '%s'" % v) return m[v] def run_test(filename): testname = os.path.basename(filename) print("Test '%s'..." % testname) workdir = tempfile.mkdtemp(prefix="boringtest") tempfiles = [] src = open(filename) headers = Table() headers.mode = TestMode.pass_ headers.is_expr = False headers.stdout = None while True: hline = src.readline() if not hline: break m = re.match("(?://|/\*) ([A-Z]+):(.*)", hline) if not m: break name, value = m.group(1), m.group(2) value = value.strip() if name == "TEST": headers.mode = pick(value, test_mode_values) elif name == "TYPE": headers.is_expr = pick(value, {"normal": False, "expr": True}) elif name == "STDOUT": term = value + "*/" stdout = "" while True: line = src.readline() if not line: raise Exception("unterminated STDOUT header") if line.strip() == term: break stdout += line headers.stdout = stdout else: raise Exception("Unknown header '%s'" % name) src.close() def do_run(): if headers.mode == TestMode.disable: return TestMode.disable # make is for fags tc = os.path.join(workdir, "t.c") tcf = open(tc, "w") tempfiles.append(tc) res = subprocess.call(["./main", "cg_c", filename], stdout=tcf) tcf.close() if res != 0: if res == 1: return TestMode.fail_compile_parse if res == 2: return TestMode.fail_compile_sem return TestMode.fail_compile_ice t = os.path.join(workdir, "t") tempfiles.append(t) res = subprocess.call([CC] + CFLAGS + [tc, "-o", t]) if res != 0: return TestMode.fail_c p = subprocess.Popen([t], stdout=subprocess.PIPE) output, _ = p.communicate() res = p.wait() if res != 0: return TestMode.fail_run if headers.stdout is not None and headers.stdout != output: print("Program output: >\n%s<\nExpected: >\n%s<" % (output, headers.stdout)) return TestMode.fail_output return TestMode.pass_ actual_res = do_run() for f in tempfiles: try: os.unlink(f) except OSError: pass os.rmdir(workdir) res = actual_res if res == TestMode.disable: pass elif res == headers.mode: res = TestMode.pass_ else: if headers.mode != TestMode.pass_: res = TestMode.fail_other test_stats[res] += 1 print("Test '%s': %s (expected %s, got %s)" % (testname, test_mode_names[res][0], test_mode_names[headers.mode][0], test_mode_names[actual_res][0])) def run_tests(list_file_name): base = os.path.dirname(list_file_name) for f in [x.strip() for x in open(argv[1])]: run_test(os.path.join(base, f)) print("SUMMARY:") test_sum = 0 for m in test_modes: print(" %s: %d" % (test_mode_names[m][1], test_stats[m])) test_sum += test_stats[m] passed_tests = test_stats[TestMode.pass_] failed_tests = test_sum - passed_tests - test_stats[TestMode.disable] print("Passed/failed: %s/%d" % (passed_tests, failed_tests)) if failed_tests: print("OMG OMG OMG ------- Some tests have failed ------- OMG OMG OMG") sys.exit(1) if __name__ == "__main__": argv = sys.argv if len(argv) != 2: print("Usage: %s tests.lst" % argv[0]) sys.exit(1) #subprocess.check_call(["make", "main"]) run_tests(argv[1])
wm4/boringlang
run_tests.py
Python
isc
5,430
# ignore-together.py - a distributed ignore list engine for IRC. from __future__ import print_function import os import sys import yaml weechat_is_fake = False try: import weechat except: class FakeWeechat: def command(self, cmd): print(cmd) weechat = FakeWeechat() weechat_is_fake = True class IgnoreRule: """An ignore rule. This provides instrumentation for converting ignore rules into weechat filters. It handles both types of ignore-together ignore rules. """ def __init__(self, ignorelist, rulename, rationale, typename='ignore', hostmasks=[], accountnames=[], patterns=[]): self.ignorelist = ignorelist self.rulename = rulename.replace(' ', '_') self.rationale = rationale self.typename = typename self.hostmasks = hostmasks self.accountnames = accountnames self.patterns = patterns def install(self): "Install an ignore rule." subrule_ctr = 0 if self.typename == 'ignore': for pattern in self.hostmasks: weechat.command('/filter add ignore-together.{ignorelist}.{rulename}.{ctr} irc.* * {pattern}'.format( ignorelist=self.ignorelist.name, rulename=self.rulename, ctr=subrule_ctr, pattern=pattern)) subrule_ctr += 1 # XXX - accountnames elif self.typename == 'message': for pattern in self.patterns: weechat.command('/filter add ignore-together.{ignorelist}.{rulename}.{ctr} irc.* * {pattern}'.format( ignorelist=self.ignorelist.name, rulename=self.rulename, ctr=subrule_ctr, pattern=pattern)) subrule_ctr += 1 def uninstall(self): "Uninstall an ignore rule." subrule_ctr = 0 if self.typename == 'ignore': for pattern in self.hostmasks: weechat.command('/filter del ignore-together.{ignorelist}.{rulename}.{ctr}'.format( ignorelist=self.ignorelist.name, rulename=self.rulename, ctr=subrule_ctr)) subrule_ctr += 1 elif self.typename == 'message': for pattern in self.patterns: weechat.command('/filter del ignore-together.{ignorelist}.{rulename}.{ctr}'.format( ignorelist=self.ignorelist.name, rulename=self.rulename, ctr=subrule_ctr)) subrule_ctr += 1 class IgnoreRuleSet: """A downloaded collection of rules. Handles merging updates vs current state, and so on.""" def __init__(self, name, uri): self.name = name self.uri = uri self.rules = [] def load(self): def build_rules(s): for k, v in s.items(): self.rules.append(IgnoreRule(self, k, v.get('rationale', '???'), v.get('type', 'ignore'), v.get('hostmasks', []), v.get('accountnames', []), v.get('patterns', []))) def test_load_cb(payload): build_rules(yaml.load(payload)) if weechat_is_fake: d = open(self.uri, 'r') return test_load_cb(d.read()) def install(self): [r.install() for r in self.rules] def uninstall(self): [r.uninstall() for r in self.rules] rules = {}
kaniini/ignore-together
ignoretogether.py
Python
isc
3,250
import re from exchange.constants import ( CURRENCIES, CURRENCY_NAMES, DEFAULT_CURRENCY, CURRENCY_EUR, CURRENCY_UAH, CURRENCY_USD, CURRENCY_SESSION_KEY) def round_number(value, decimal_places=2, down=False): assert decimal_places > 0 factor = 1.0 ** decimal_places sign = -1 if value < 0 else 1 return int(value * factor + sign * (0 if down else 0.5)) / factor def format_number(value): append_comma = lambda match_object: "%s," % match_object.group(0) value = "%.2f" % float(value) value = re.sub("(\d)(?=(\d{3})+\.)", append_comma, value) return value def format_price(price, round_price=False): price = float(price) return format_number(round_number(price) if round_price else price) def format_printable_price(price, currency=DEFAULT_CURRENCY): return '%s %s' % (format_price(price), dict(CURRENCIES)[currency]) def get_currency_from_session(session): currency = session.get(CURRENCY_SESSION_KEY) or DEFAULT_CURRENCY return int(currency) def get_price_factory(rates, src, dst): if src == dst: return lambda p: p name = lambda c: CURRENCY_NAMES[c] if src == CURRENCY_UAH: return lambda p: p / getattr(rates, name(dst)) if dst == CURRENCY_UAH: return lambda p: p * getattr(rates, name(src)) if src == CURRENCY_USD and dst == CURRENCY_EUR: return lambda p: p * rates.usd_eur if src == CURRENCY_EUR and dst == CURRENCY_USD: return lambda p: p / rates.usd_eur raise ValueError('Unknown currencies')
pmaigutyak/mp-shop
exchange/utils.py
Python
isc
1,571
""" Util classes ------------ Classes which represent data types useful for the package pySpatialTools. """ ## Spatial elements collectors from spatialelements import SpatialElementsCollection, Locations ## Membership relations from Membership import Membership
tgquintela/pySpatialTools
pySpatialTools/utils/util_classes/__init__.py
Python
mit
266
""" This module is to support *bbox_inches* option in savefig command. """ from __future__ import (absolute_import, division, print_function, unicode_literals) import six import warnings from matplotlib.transforms import Bbox, TransformedBbox, Affine2D def adjust_bbox(fig, bbox_inches, fixed_dpi=None): """ Temporarily adjust the figure so that only the specified area (bbox_inches) is saved. It modifies fig.bbox, fig.bbox_inches, fig.transFigure._boxout, and fig.patch. While the figure size changes, the scale of the original figure is conserved. A function which restores the original values are returned. """ origBbox = fig.bbox origBboxInches = fig.bbox_inches _boxout = fig.transFigure._boxout asp_list = [] locator_list = [] for ax in fig.axes: pos = ax.get_position(original=False).frozen() locator_list.append(ax.get_axes_locator()) asp_list.append(ax.get_aspect()) def _l(a, r, pos=pos): return pos ax.set_axes_locator(_l) ax.set_aspect("auto") def restore_bbox(): for ax, asp, loc in zip(fig.axes, asp_list, locator_list): ax.set_aspect(asp) ax.set_axes_locator(loc) fig.bbox = origBbox fig.bbox_inches = origBboxInches fig.transFigure._boxout = _boxout fig.transFigure.invalidate() fig.patch.set_bounds(0, 0, 1, 1) if fixed_dpi is not None: tr = Affine2D().scale(fixed_dpi) dpi_scale = fixed_dpi / fig.dpi else: tr = Affine2D().scale(fig.dpi) dpi_scale = 1. _bbox = TransformedBbox(bbox_inches, tr) fig.bbox_inches = Bbox.from_bounds(0, 0, bbox_inches.width, bbox_inches.height) x0, y0 = _bbox.x0, _bbox.y0 w1, h1 = fig.bbox.width * dpi_scale, fig.bbox.height * dpi_scale fig.transFigure._boxout = Bbox.from_bounds(-x0, -y0, w1, h1) fig.transFigure.invalidate() fig.bbox = TransformedBbox(fig.bbox_inches, tr) fig.patch.set_bounds(x0 / w1, y0 / h1, fig.bbox.width / w1, fig.bbox.height / h1) return restore_bbox def process_figure_for_rasterizing(fig, bbox_inches_restore, fixed_dpi=None): """ This need to be called when figure dpi changes during the drawing (e.g., rasterizing). It recovers the bbox and re-adjust it with the new dpi. """ bbox_inches, restore_bbox = bbox_inches_restore restore_bbox() r = adjust_bbox(figure, bbox_inches, fixed_dpi) return bbox_inches, r
yavalvas/yav_com
build/matplotlib/lib/matplotlib/tight_bbox.py
Python
mit
2,604
# -*- coding: utf-8 -*- r""" Bending of collimating mirror ----------------------------- Uses :mod:`shadow` backend. File: `\\examples\\withShadow\\03\\03_DCM_energy.py` Influence onto energy resolution ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pictures after monochromator, :ref:`type 2 of global normalization<globalNorm>`. The nominal radius is 7.4 km. Watch the energy distribution when the bending radius is smaller or greater than the nominal one. +---------+---------+---------+---------+ | |VCMR1| | |VCMR2| | |VCMR3| | | +---------+---------+---------+ |VCMR4| | | |VCMR7| | |VCMR6| | |VCMR5| | | +---------+---------+---------+---------+ .. |VCMR1| image:: _images/03VCM_R0496453_norm2.* :scale: 35 % .. |VCMR2| image:: _images/03VCM_R0568297_norm2.* :scale: 35 % .. |VCMR3| image:: _images/03VCM_R0650537_norm2.* :scale: 35 % .. |VCMR4| image:: _images/03VCM_R0744680_norm2.* :scale: 35 % :align: middle .. |VCMR5| image:: _images/03VCM_R0852445_norm2.* :scale: 35 % .. |VCMR6| image:: _images/03VCM_R0975806_norm2.* :scale: 35 % .. |VCMR7| image:: _images/03VCM_R1117020_norm2.* :scale: 35 % Influence onto focusing ~~~~~~~~~~~~~~~~~~~~~~~ Pictures at the sample position, :ref:`type 1 of global normalization<globalNorm>` +----------+----------+----------+----------+ | |VCMRF1| | |VCMRF2| | |VCMRF3| | | +----------+----------+----------+ |VCMRF4| | | |VCMRF7| | |VCMRF6| | |VCMRF5| | | +----------+----------+----------+----------+ .. |VCMRF1| image:: _images/04VCM_R0496453_norm1.* :scale: 35 % .. |VCMRF2| image:: _images/04VCM_R0568297_norm1.* :scale: 35 % .. |VCMRF3| image:: _images/04VCM_R0650537_norm1.* :scale: 35 % .. |VCMRF4| image:: _images/04VCM_R0744680_norm1.* :scale: 35 % :align: middle .. |VCMRF5| image:: _images/04VCM_R0852445_norm1.* :scale: 35 % .. |VCMRF6| image:: _images/04VCM_R0975806_norm1.* :scale: 35 % .. |VCMRF7| image:: _images/04VCM_R1117020_norm1.* :scale: 35 % """ __author__ = "Konstantin Klementiev" __date__ = "1 Mar 2012" import sys sys.path.append(r"c:\Alba\Ray-tracing\with Python") import numpy as np import xrt.plotter as xrtp import xrt.runner as xrtr import xrt.backends.shadow as shadow def main(): plot1 = xrtp.XYCPlot('star.03') plot1.caxis.offset = 6000 plot2 = xrtp.XYCPlot('star.04') plot2.caxis.offset = 6000 plot1.xaxis.limits = [-15, 15] plot1.yaxis.limits = [-15, 15] plot1.yaxis.factor *= -1 plot2.xaxis.limits = [-1, 1] plot2.yaxis.limits = [-1, 1] plot2.yaxis.factor *= -1 textPanel1 = plot1.fig.text( 0.89, 0.82, '', transform=plot1.fig.transFigure, size=14, color='r', ha='center') textPanel2 = plot2.fig.text( 0.89, 0.82, '', transform=plot2.fig.transFigure, size=14, color='r', ha='center') #========================================================================== threads = 4 #========================================================================== start01 = shadow.files_in_tmp_subdirs('start.01', threads) start04 = shadow.files_in_tmp_subdirs('start.04', threads) rmaj0 = 476597.0 shadow.modify_input(start04, ('R_MAJ', str(rmaj0))) angle = 4.7e-3 tIncidence = 90 - angle * 180 / np.pi shadow.modify_input( start01, ('T_INCIDENCE', str(tIncidence)), ('T_REFLECTION', str(tIncidence))) shadow.modify_input( start04, ('T_INCIDENCE', str(tIncidence)), ('T_REFLECTION', str(tIncidence))) rmirr0 = 744680. def plot_generator(): for rmirr in np.logspace(-1., 1., 7, base=1.5) * rmirr0: shadow.modify_input(start01, ('RMIRR', str(rmirr))) filename = 'VCM_R%07i' % rmirr filename03 = '03' + filename filename04 = '04' + filename plot1.title = filename03 plot2.title = filename04 plot1.saveName = [filename03 + '.pdf', filename03 + '.png'] plot2.saveName = [filename04 + '.pdf', filename04 + '.png'] # plot1.persistentName = filename03 + '.pickle' # plot2.persistentName = filename04 + '.pickle' textToSet = 'collimating\nmirror\n$R =$ %.1f km' % (rmirr * 1e-5) textPanel1.set_text(textToSet) textPanel2.set_text(textToSet) yield def after(): # import subprocess # subprocess.call(["python", "05-VFM-bending.py"], # cwd='/home/kklementiev/Alba/Ray-tracing/with Python/05-VFM-bending') pass xrtr.run_ray_tracing( [plot1, plot2], repeats=640, updateEvery=2, energyRange=[5998, 6002], generator=plot_generator, threads=threads, globalNorm=True, afterScript=after, backend='shadow') #this is necessary to use multiprocessing in Windows, otherwise the new Python #contexts cannot be initialized: if __name__ == '__main__': main()
kklmn/xrt
examples/withShadow/04_06/04_dE_VCM_bending.py
Python
mit
4,894
from django.db import models import warnings from django.utils import timezone import requests from image_cropping import ImageRatioField class CompMember(models.Model): """A member of compsoc""" class Meta: verbose_name = 'CompSoc Member' verbose_name_plural = 'CompSoc Members' index = models.IntegerField(blank=False, help_text="This field is present just for ordering members based on their posts. President = 2, VPs = 1, Gen. Sec. = 0, Everyone else = -1", default=-1) name = models.CharField(max_length=50, help_text='Enter your full name') image = models.ImageField(blank=False, upload_to='member_images/', help_text='Please select a display image for yourself. This is necessary.') cropping = ImageRatioField('image', '500x500') alumni = models.BooleanField(default=False, help_text='Are you an alumni?') role = models.CharField(max_length=100, help_text="Enter your post if you hold one. If not, enter 'Member'") batch_of = models.CharField(max_length=4, default='2015', help_text='Enter the year you will graduate') social_link = models.CharField(blank=True, max_length=256, help_text='Enter a link to your Facebook, Twitter, GitHub or any other social network profile. You can leave this blank if you wish!') def get_social_link(self): ''' Returns the social_link if present. Otherwise, sends javascript:void(0) ''' if self.social_link == '': return 'javascript:void(0)' else: return self.social_link def __str__(self): return self.name class Variable(models.Model): ##NOTE: This should not be used anymore def __str__(self): warnings.warn('''You are using a "General Variable". Stop doing that. This is bad design on Arjoonn's part so don't fall into the same trap. If you are using this for Orfik, that has already been fixed. If you are using this for logos, same thing. Over a few cycles this entire table will be removed. ''') return self.name name = models.CharField(max_length=100) time = models.DateTimeField() # Receive the pre_delete signal and delete the image associated with the model instance. from django.db.models.signals import pre_delete from django.dispatch.dispatcher import receiver @receiver(pre_delete, sender=CompMember) def compsoc_member_delete(sender, instance, **kwargs): # Pass false so ImageField doesn't save the model. instance.image.delete(False)
compsoc-ssc/compsocssc
general/models.py
Python
mit
2,517
import complexism as cx import complexism.agentbased.statespace as ss import epidag as dag dbp = cx.read_dbp_script(cx.load_txt('../scripts/SIR_BN.txt')) pc = dag.quick_build_parameter_core(cx.load_txt('../scripts/pSIR.txt')) dc = dbp.generate_model('M1', **pc.get_samplers()) ag = ss.StSpAgent('Helen', dc['Sus'], pc) model = cx.SingleIndividualABM('M1', ag) model.add_observing_attribute('State') print(cx.simulate(model, None, 0, 10, 1))
TimeWz667/Kamanian
example/OOP/O2.4 SS, Single agent model.py
Python
mit
446
from __future__ import division, print_function from abc import ABCMeta, abstractmethod import matplotlib as mpl mpl.use('TkAgg') from matplotlib.ticker import MaxNLocator, Formatter, Locator from matplotlib.widgets import Slider, Button import matplotlib.patches as patches import matplotlib.pyplot as plt from matplotlib.colors import LinearSegmentedColormap from tadtool.tad import GenomicRegion, sub_matrix_regions, sub_data_regions, \ data_array, insulation_index, sub_vector_regions, sub_regions, \ call_tads_insulation_index, directionality_index, call_tads_directionality_index, normalised_insulation_index import math import copy import numpy as np from bisect import bisect_left from future.utils import string_types try: import Tkinter as tk import tkFileDialog as filedialog except ImportError: import tkinter as tk from tkinter import filedialog class BasePlotter(object): __metaclass__ = ABCMeta def __init__(self, title): self._ax = None self.cax = None self.title = title @abstractmethod def _plot(self, region=None, **kwargs): raise NotImplementedError("Subclasses need to override _plot function") @abstractmethod def plot(self, region=None, **kwargs): raise NotImplementedError("Subclasses need to override plot function") @property def fig(self): return self._ax.figure @property def ax(self): if not self._ax: _, self._ax = plt.subplots() return self._ax @ax.setter def ax(self, value): self._ax = value class GenomeCoordFormatter(Formatter): """ Process axis tick labels to give nice representations of genomic coordinates """ def __init__(self, chromosome, display_scale=True): """ :param chromosome: :class:`~kaic.data.genomic.GenomicRegion` or string :param display_scale: Boolean Display distance scale at bottom right """ if isinstance(chromosome, GenomicRegion): self.chromosome = chromosome.chromosome else: self.chromosome = chromosome self.display_scale = display_scale def _format_val(self, x, prec_offset=0): if x == 0: oom_loc = 0 else: oom_loc = int(math.floor(math.log10(abs(x)))) view_range = self.axis.axes.get_xlim() oom_range = int(math.floor(math.log10(abs(view_range[1] - view_range[0])))) if oom_loc >= 3: return "{:.{prec}f}kb".format(x/1000, prec=max(0, 3 + prec_offset - oom_range)) return "{:.0f}b".format(x) def __call__(self, x, pos=None): """ Return label for tick at coordinate x. Relative position of ticks can be specified with pos. First tick gets chromosome name. """ s = self._format_val(x, prec_offset=1) if pos == 0 or x == 0: return "{}:{}".format(self.chromosome, s) return s def get_offset(self): """ Return information about the distances between tick bars and the size of the view window. Is called by matplotlib and displayed in lower right corner of plots. """ if not self.display_scale: return "" view_range = self.axis.axes.get_xlim() view_dist = abs(view_range[1] - view_range[0]) tick_dist = self.locs[2] - self.locs[1] minor_tick_dist = tick_dist/5 minor_tick_dist_str = self._format_val(minor_tick_dist, prec_offset=2) tick_dist_str = self._format_val(tick_dist, prec_offset=1) view_dist_str = self._format_val(view_dist) return "{}|{}|{}".format(minor_tick_dist_str, tick_dist_str, view_dist_str) class GenomeCoordLocator(MaxNLocator): """ Choose locations of genomic coordinate ticks on the plot axis. Behaves like default Matplotlib locator, except that it always places a tick at the start and the end of the window. """ def __call__(self): vmin, vmax = self.axis.get_view_interval() ticks = self.tick_values(vmin, vmax) # Make sure that first and last tick are the start # and the end of the genomic range plotted. If next # ticks are too close, remove them. ticks[0] = vmin ticks[-1] = vmax if ticks[1] - vmin < (vmax - vmin)/(self._nbins*3): ticks = np.delete(ticks, 1) if vmax - ticks[-2] < (vmax - vmin)/(self._nbins*3): ticks = np.delete(ticks, -2) return self.raise_if_exceeds(np.array(ticks)) class MinorGenomeCoordLocator(Locator): """ Choose locations of minor tick marks between major tick labels. Modification of the Matplotlib AutoMinorLocator, except that it uses the distance between 2nd and 3rd major mark as reference, instead of 2nd and 3rd. """ def __init__(self, n): self.ndivs = n def __call__(self): majorlocs = self.axis.get_majorticklocs() try: majorstep = majorlocs[2] - majorlocs[1] except IndexError: # Need at least two major ticks to find minor tick locations # TODO: Figure out a way to still be able to display minor # ticks without two major ticks visible. For now, just display # no ticks at all. majorstep = 0 if self.ndivs is None: if majorstep == 0: # TODO: Need a better way to figure out ndivs ndivs = 1 else: x = int(np.round(10 ** (np.log10(majorstep) % 1))) if x in [1, 5, 10]: ndivs = 5 else: ndivs = 4 else: ndivs = self.ndivs minorstep = majorstep / ndivs vmin, vmax = self.axis.get_view_interval() if vmin > vmax: vmin, vmax = vmax, vmin if len(majorlocs) > 0: t0 = majorlocs[1] tmin = ((vmin - t0) // minorstep + 1) * minorstep tmax = ((vmax - t0) // minorstep + 1) * minorstep locs = np.arange(tmin, tmax, minorstep) + t0 cond = np.abs((locs - t0) % majorstep) > minorstep / 10.0 locs = locs.compress(cond) else: locs = [] return self.raise_if_exceeds(np.array(locs)) class BasePlotter1D(BasePlotter): __metaclass__ = ABCMeta def __init__(self, title): BasePlotter.__init__(self, title=title) def plot(self, region=None, ax=None, **kwargs): if isinstance(region, string_types): region = GenomicRegion.from_string(region) if ax: self.ax = ax # set genome tick formatter self.ax.xaxis.set_major_formatter(GenomeCoordFormatter(region)) self.ax.xaxis.set_major_locator(GenomeCoordLocator(nbins=5)) self.ax.xaxis.set_minor_locator(MinorGenomeCoordLocator(n=5)) self.ax.set_title(self.title) self._plot(region, **kwargs) self.ax.set_xlim(region.start, region.end) return self.fig, self.ax def prepare_normalization(norm="lin", vmin=None, vmax=None): if isinstance(norm, mpl.colors.Normalize): norm.vmin = vmin norm.vmax = vmax return norm if norm == "log": return mpl.colors.LogNorm(vmin=vmin, vmax=vmax) elif norm == "lin": return mpl.colors.Normalize(vmin=vmin, vmax=vmax) else: raise ValueError("'{}'' not a valid normalization method.".format(norm)) class BasePlotterHic(object): __metaclass__ = ABCMeta def __init__(self, hic_matrix, regions=None, colormap='RdBu', norm="log", vmin=None, vmax=None, show_colorbar=True, blend_masked=False): if regions is None: for i in range(hic_matrix.shape[0]): regions.append(GenomicRegion(chromosome='', start=i, end=i)) self.regions = regions self.hic_matrix = hic_matrix self.colormap = copy.copy(mpl.cm.get_cmap(colormap)) if blend_masked: self.colormap.set_bad(self.colormap(0)) self._vmin = vmin self._vmax = vmax self.norm = prepare_normalization(norm=norm, vmin=vmin, vmax=vmax) self.colorbar = None self.slider = None self.show_colorbar = show_colorbar def add_colorbar(self, ax=None): ax = self.cax if ax is None else ax cmap_data = mpl.cm.ScalarMappable(norm=self.norm, cmap=self.colormap) cmap_data.set_array([self.vmin, self.vmax]) self.colorbar = plt.colorbar(cmap_data, cax=ax, orientation="vertical") @property def vmin(self): return self._vmin if self._vmin else np.nanmin(self.hic_matrix) @property def vmax(self): return self._vmax if self._vmax else np.nanmax(self.hic_matrix) class HicPlot(BasePlotter1D, BasePlotterHic): def __init__(self, hic_matrix, regions=None, title='', colormap='viridis', max_dist=None, norm="log", vmin=None, vmax=None, show_colorbar=True, blend_masked=False): BasePlotter1D.__init__(self, title=title) BasePlotterHic.__init__(self, hic_matrix, regions=regions, colormap=colormap, vmin=vmin, vmax=vmax, show_colorbar=show_colorbar, norm=norm, blend_masked=blend_masked) self.max_dist = max_dist self.hicmesh = None def _plot(self, region=None, cax=None): if region is None: raise ValueError("Cannot plot triangle plot for whole genome.") hm, sr = sub_matrix_regions(self.hic_matrix, self.regions, region) hm[np.tril_indices(hm.shape[0])] = np.nan # Remove part of matrix further away than max_dist if self.max_dist is not None: for i in range(hm.shape[0]): i_region = sr[i] for j in range(hm.shape[1]): j_region = sr[j] if j_region.start-i_region.end > self.max_dist: hm[i, j] = np.nan hm_masked = np.ma.MaskedArray(hm, mask=np.isnan(hm)) # prepare an array of the corner coordinates of the Hic-matrix # Distances have to be scaled by sqrt(2), because the diagonals of the bins # are sqrt(2)*len(bin_size) sqrt2 = math.sqrt(2) bin_coords = np.r_[[(x.start - 1) for x in sr], sr[-1].end]/sqrt2 X, Y = np.meshgrid(bin_coords, bin_coords) # rotatate coordinate matrix 45 degrees sin45 = math.sin(math.radians(45)) X_, Y_ = X*sin45 + Y*sin45, X*sin45 - Y*sin45 # shift x coords to correct start coordinate and center the diagonal directly on the # x-axis X_ -= X_[1, 0] - (sr[0].start - 1) Y_ -= .5*np.min(Y_) + .5*np.max(Y_) # create plot self.hicmesh = self.ax.pcolormesh(X_, Y_, hm_masked, cmap=self.colormap, norm=self.norm) # set limits and aspect ratio #self.ax.set_aspect(aspect="equal") ylim_max = 0.5*(region.end-region.start) if self.max_dist is not None and self.max_dist/2 < ylim_max: ylim_max = self.max_dist/2 self.ax.set_ylim(0, ylim_max) # remove y ticks self.ax.set_yticks([]) # hide background patch self.ax.patch.set_visible(False) if self.show_colorbar: self.add_colorbar(cax) def set_clim(self, vmin, vmax): self.hicmesh.set_clim(vmin=vmin, vmax=vmax) if self.colorbar is not None: self.colorbar.vmin = vmin self.colorbar.vmax = vmax self.colorbar.draw_all() class DataArrayPlot(BasePlotter1D): def __init__(self, data, window_sizes=None, regions=None, title='', midpoint=None, colormap='coolwarm_r', vmax=None, current_window_size=0, log_y=True): if regions is None: regions = [] for i in range(data.shape[1]): regions.append(GenomicRegion(chromosome='', start=i, end=i)) self.regions = regions BasePlotter1D.__init__(self, title=title) self.da = data if window_sizes is None: window_sizes = [] try: l = len(data) except TypeError: l = data.shape[0] for i in range(l): window_sizes.append(i) self.window_sizes = window_sizes self.colormap = colormap self.midpoint = midpoint self.mesh = None self.vmax = vmax self.window_size_line = None self.current_window_size = current_window_size self.log_y = log_y def _plot(self, region=None, cax=None): da_sub, regions_sub = sub_data_regions(self.da, self.regions, region) da_sub_masked = np.ma.MaskedArray(da_sub, mask=np.isnan(da_sub)) bin_coords = np.r_[[(x.start - 1) for x in regions_sub], regions_sub[-1].end] x, y = np.meshgrid(bin_coords, self.window_sizes) self.mesh = self.ax.pcolormesh(x, y, da_sub_masked, cmap=self.colormap, vmax=self.vmax) self.colorbar = plt.colorbar(self.mesh, cax=cax, orientation="vertical") self.window_size_line = self.ax.axhline(self.current_window_size, color='red') if self.log_y: self.ax.set_yscale("log") self.ax.set_ylim((np.nanmin(self.window_sizes), np.nanmax(self.window_sizes))) def set_clim(self, vmin, vmax): self.mesh.set_clim(vmin=vmin, vmax=vmax) if self.colorbar is not None: self.colorbar.vmin = vmin self.colorbar.vmax = vmax self.colorbar.draw_all() def update(self, window_size): self.window_size_line.set_ydata(window_size) class TADPlot(BasePlotter1D): def __init__(self, regions, title='', color='black'): BasePlotter1D.__init__(self, title=title) self.regions = regions self.color = color self.current_region = None def _plot(self, region=None, cax=None): self.current_region = region try: sr, start_ix, end_ix = sub_regions(self.regions, region) trans = self.ax.get_xaxis_transform() for r in sr: region_patch = patches.Rectangle( (r.start, .2), width=abs(r.end - r.start), height=.6, transform=trans, facecolor=self.color, edgecolor='white', linewidth=2. ) self.ax.add_patch(region_patch) except ValueError: pass self.ax.axis('off') def update(self, regions): self.regions = regions self.ax.cla() self.plot(region=self.current_region, ax=self.ax) class DataLinePlot(BasePlotter1D): def __init__(self, data, regions=None, title='', init_row=0, is_symmetric=False): BasePlotter1D.__init__(self, title=title) if regions is None: regions = [] for i in range(len(data)): regions.append(GenomicRegion(chromosome='', start=i, end=i)) self.init_row = init_row self.data = data self.sr = None self.da_sub = None self.regions = regions self.current_region = None self.line = None self.current_ix = init_row self.current_cutoff = None self.cutoff_line = None self.cutoff_line_mirror = None self.is_symmetric = is_symmetric def _new_region(self, region): self.current_region = region self.da_sub, self.sr = sub_data_regions(self.data, self.regions, region) def _plot(self, region=None, cax=None): self._new_region(region) bin_coords = [(x.start - 1) for x in self.sr] ds = self.da_sub[self.init_row] self.line, = self.ax.plot(bin_coords, ds) if not self.is_symmetric: self.current_cutoff = (self.ax.get_ylim()[1] - self.ax.get_ylim()[0]) / 2 + self.ax.get_ylim()[0] else: self.current_cutoff = self.ax.get_ylim()[1]/ 2 self.ax.axhline(0.0, linestyle='dashed', color='grey') self.cutoff_line = self.ax.axhline(self.current_cutoff, color='r') if self.is_symmetric: self.cutoff_line_mirror = self.ax.axhline(-1*self.current_cutoff, color='r') self.ax.set_ylim((np.nanmin(ds), np.nanmax(ds))) def update(self, ix=None, cutoff=None, region=None, update_canvas=True): if region is not None: self._new_region(region) if ix is not None and ix != self.current_ix: ds = self.da_sub[ix] self.current_ix = ix self.line.set_ydata(ds) self.ax.set_ylim((np.nanmin(ds), np.nanmax(ds))) if cutoff is None: if not self.is_symmetric: self.update(cutoff=(self.ax.get_ylim()[1]-self.ax.get_ylim()[0])/2 + self.ax.get_ylim()[0], update_canvas=False) else: self.update(cutoff=self.ax.get_ylim()[1] / 2, update_canvas=False) if update_canvas: self.fig.canvas.draw() if cutoff is not None and cutoff != self.current_cutoff: if self.is_symmetric: self.current_cutoff = abs(cutoff) else: self.current_cutoff = cutoff self.cutoff_line.set_ydata(self.current_cutoff) if self.is_symmetric: self.cutoff_line_mirror.set_ydata(-1*self.current_cutoff) if update_canvas: self.fig.canvas.draw() class TADtoolPlot(object): def __init__(self, hic_matrix, regions=None, data=None, window_sizes=None, norm='lin', max_dist=3000000, max_percentile=99.99, algorithm='insulation', matrix_colormap=None, data_colormap=None, log_data=True): self.hic_matrix = hic_matrix if regions is None: regions = [] for i in range(hic_matrix.shape[0]): regions.append(GenomicRegion(chromosome='', start=i, end=i)) self.regions = regions self.norm = norm self.fig = None self.max_dist = max_dist self.algorithm = algorithm self.svmax = None self.min_value = np.nanmin(self.hic_matrix[np.nonzero(self.hic_matrix)]) self.min_value_data = None self.hic_plot = None self.tad_plot = None self.data_plot = None self.line_plot = None self.sdata = None self.data_ax = None self.line_ax = None self.da = None self.ws = None self.current_window_size = None self.window_size_text = None self.tad_cutoff_text = None self.max_percentile = max_percentile self.tad_regions = None self.current_da_ix = None self.button_save_tads = None self.button_save_vector = None self.button_save_matrix = None self.log_data = log_data if algorithm == 'insulation': self.tad_algorithm = insulation_index self.tad_calling_algorithm = call_tads_insulation_index self.is_symmetric = False if matrix_colormap is None: self.matrix_colormap = LinearSegmentedColormap.from_list('myreds', ['white', 'red']) if data_colormap is None: self.data_plot_color = 'plasma' elif algorithm == 'ninsulation': self.tad_algorithm = normalised_insulation_index self.tad_calling_algorithm = call_tads_insulation_index self.is_symmetric = True if matrix_colormap is None: self.matrix_colormap = LinearSegmentedColormap.from_list('myreds', ['white', 'red']) if data_colormap is None: self.data_plot_color = LinearSegmentedColormap.from_list('myreds', ['blue', 'white', 'red']) elif algorithm == 'directionality': self.tad_algorithm = directionality_index self.tad_calling_algorithm = call_tads_directionality_index self.is_symmetric = True if matrix_colormap is None: self.matrix_colormap = LinearSegmentedColormap.from_list('myreds', ['white', 'red']) if data_colormap is None: self.data_plot_color = LinearSegmentedColormap.from_list('myreds', ['blue', 'white', 'red']) if data is None: self.da, self.ws = data_array(hic_matrix=self.hic_matrix, regions=self.regions, tad_method=self.tad_algorithm, window_sizes=window_sizes) else: self.da = data if window_sizes is None: raise ValueError("window_sizes parameter cannot be None when providing data!") self.ws = window_sizes def vmax_slider_update(self, val): self.hic_plot.set_clim(self.min_value, val) def data_slider_update(self, val): if self.is_symmetric: self.data_plot.set_clim(-1*val, val) else: self.data_plot.set_clim(self.min_value_data, val) def on_click_save_tads(self, event): tk.Tk().withdraw() # Close the root window save_path = filedialog.asksaveasfilename() if save_path is not None: with open(save_path, 'w') as o: for region in self.tad_regions: o.write("%s\t%d\t%d\n" % (region.chromosome, region.start-1, region.end)) def on_click_save_vector(self, event): tk.Tk().withdraw() # Close the root window save_path = filedialog.asksaveasfilename() if save_path is not None: da_sub = self.da[self.current_da_ix] with open(save_path, 'w') as o: for i, region in enumerate(self.regions): o.write("%s\t%d\t%d\t.\t%e\n" % (region.chromosome, region.start-1, region.end, da_sub[i])) def on_click_save_matrix(self, event): tk.Tk().withdraw() # Close the root window save_path = filedialog.asksaveasfilename() if save_path is not None: with open(save_path, 'w') as o: # write regions for i, region in enumerate(self.regions): o.write("%s:%d-%d" % (region.chromosome, region.start-1, region.end)) if i < len(self.regions)-1: o.write("\t") else: o.write("\n") # write matrix n_rows = self.da.shape[0] n_cols = self.da.shape[1] for i in range(n_rows): window_size = self.ws[i] o.write("%d\t" % window_size) for j in range(n_cols): o.write("%e" % self.da[i, j]) if j < n_cols-1: o.write("\t") else: o.write("\n") def plot(self, region=None): # set up plotting grid self.fig = plt.figure(figsize=(10, 10)) # main plots grid_size = (32, 15) hic_vmax_slider_ax = plt.subplot2grid(grid_size, (0, 0), colspan=13) hic_ax = plt.subplot2grid(grid_size, (1, 0), rowspan=9, colspan=13) hp_cax = plt.subplot2grid(grid_size, (1, 14), rowspan=9, colspan=1) tad_ax = plt.subplot2grid(grid_size, (10, 0), rowspan=1, colspan=13, sharex=hic_ax) line_ax = plt.subplot2grid(grid_size, (12, 0), rowspan=6, colspan=13, sharex=hic_ax) line_cax = plt.subplot2grid(grid_size, (12, 13), rowspan=6, colspan=2) data_vmax_slider_ax = plt.subplot2grid(grid_size, (19, 0), colspan=13) data_ax = plt.subplot2grid(grid_size, (20, 0), rowspan=9, colspan=13, sharex=hic_ax) da_cax = plt.subplot2grid(grid_size, (20, 14), rowspan=9, colspan=1) # buttons save_tads_ax = plt.subplot2grid(grid_size, (31, 0), rowspan=1, colspan=4) self.button_save_tads = Button(save_tads_ax, 'Save TADs') self.button_save_tads.on_clicked(self.on_click_save_tads) save_vector_ax = plt.subplot2grid(grid_size, (31, 5), rowspan=1, colspan=4) self.button_save_vector = Button(save_vector_ax, 'Save current values') self.button_save_vector.on_clicked(self.on_click_save_vector) save_matrix_ax = plt.subplot2grid(grid_size, (31, 10), rowspan=1, colspan=4) self.button_save_matrix = Button(save_matrix_ax, 'Save matrix') self.button_save_matrix.on_clicked(self.on_click_save_matrix) # add subplot content max_value = np.nanpercentile(self.hic_matrix, self.max_percentile) init_value = .2*max_value # HI-C VMAX SLIDER self.svmax = Slider(hic_vmax_slider_ax, 'vmax', self.min_value, max_value, valinit=init_value, color='grey') self.svmax.on_changed(self.vmax_slider_update) # HI-C self.hic_plot = HicPlot(self.hic_matrix, self.regions, max_dist=self.max_dist, norm=self.norm, vmax=init_value, vmin=self.min_value, colormap=self.matrix_colormap) self.hic_plot.plot(region, ax=hic_ax, cax=hp_cax) # generate data array self.min_value_data = np.nanmin(self.da[np.nonzero(self.da)]) max_value_data = np.nanpercentile(self.da, self.max_percentile) init_value_data = .5*max_value_data # LINE PLOT da_ix = int(self.da.shape[0]/2) self.current_da_ix = da_ix self.line_plot = DataLinePlot(self.da, regions=self.regions, init_row=da_ix, is_symmetric=self.is_symmetric) self.line_plot.plot(region, ax=line_ax) self.line_ax = line_ax # line info self.current_window_size = self.ws[da_ix] line_cax.text(.1, .8, 'Window size', fontweight='bold') self.window_size_text = line_cax.text(.3, .6, str(self.current_window_size)) line_cax.text(.1, .4, 'TAD cutoff', fontweight='bold') self.tad_cutoff_text = line_cax.text(.3, .2, "%.5f" % self.line_plot.current_cutoff) line_cax.axis('off') # TAD PLOT self.tad_regions = self.tad_calling_algorithm(self.da[da_ix], self.line_plot.current_cutoff, self.regions) self.tad_plot = TADPlot(self.tad_regions) self.tad_plot.plot(region=region, ax=tad_ax) # DATA ARRAY self.data_plot = DataArrayPlot(self.da, self.ws, self.regions, vmax=init_value_data, colormap=self.data_plot_color, current_window_size=self.ws[da_ix], log_y=self.log_data) self.data_plot.plot(region, ax=data_ax, cax=da_cax) # DATA ARRAY SLIDER if self.is_symmetric: self.sdata = Slider(data_vmax_slider_ax, 'vmax', 0.0, max_value_data, valinit=init_value_data, color='grey') else: self.sdata = Slider(data_vmax_slider_ax, 'vmax', self.min_value_data, max_value_data, valinit=init_value_data, color='grey') self.sdata.on_changed(self.data_slider_update) self.data_slider_update(init_value_data) # clean up hic_ax.xaxis.set_visible(False) line_ax.xaxis.set_visible(False) # enable hover self.data_ax = data_ax cid = self.fig.canvas.mpl_connect('button_press_event', self.on_click) return self.fig, (hic_vmax_slider_ax, hic_ax, line_ax, data_ax, hp_cax, da_cax) def on_click(self, event): if event.inaxes == self.data_ax or event.inaxes == self.line_ax: if event.inaxes == self.data_ax: ws_ix = bisect_left(self.ws, event.ydata) self.current_window_size = self.ws[ws_ix] self.current_da_ix = ws_ix self.data_plot.update(window_size=self.ws[ws_ix]) self.line_plot.update(ix=ws_ix, update_canvas=False) self.tad_cutoff_text.set_text("%.5f" % self.line_plot.current_cutoff) self.window_size_text.set_text(str(self.current_window_size)) elif event.inaxes == self.line_ax: if self.is_symmetric: self.line_plot.update(cutoff=abs(event.ydata), update_canvas=False) else: self.line_plot.update(cutoff=abs(event.ydata), update_canvas=False) self.tad_cutoff_text.set_text("%.5f" % self.line_plot.current_cutoff) # update TADs self.tad_regions = self.tad_calling_algorithm(self.da[self.current_da_ix], self.line_plot.current_cutoff, self.regions) self.tad_plot.update(self.tad_regions) self.fig.canvas.draw()
vaquerizaslab/tadtool
tadtool/plot.py
Python
mit
28,848
import cProfile from pathlib import Path def main(args, results_dir: Path, scenario_dir: Path): try: scenario_dir.mkdir(parents=True) except FileExistsError: pass cProfile.runctx( 'from dmprsim.scenarios.python_profile import main;' 'main(args, results_dir, scenario_dir)', globals=globals(), locals=locals(), filename=str(results_dir / 'profile.pstats'), )
reisub-de/dmpr-simulator
dmprsim/analyze/profile_core.py
Python
mit
432
from django.http import HttpResponse from django.core.servers.basehttp import FileWrapper from django.contrib.auth.models import User from django.shortcuts import render_to_response, redirect, get_object_or_404 from requests import get from urllib import urlretrieve from common.models import Repository from common.util import get_context def cgit_url(user_name, repo_name, method, path, query=None): url = 'http://localhost:8080/view' if method == 'summary': base = '%s/%s/%s' %(url, user_name, repo_name) else: base = '%s/%s/%s/%s' %(url, user_name, repo_name, method) if path is not None: base = '%s/%s' %(base, path) if query is not None and len(query)>1: base = "%s?%s" % (base, query) print base return base def cumulative_path(path): if path is None or len(path) == 0: return path c = [path[0]] for part in path[1:]: c.append('%s/%s'%(c[-1], part)) return c def view_index(request): return redirect('index') def user_index(request, user_name): return redirect('repo_list', user_name) def repo_plain(request, user_name, repo_name, path, prefix='plain'): user = request.user owner = get_object_or_404(User, username=user_name) repo = get_object_or_404(Repository, owner=owner, name=repo_name) collaborators = repo.collaborators.all() access = repo.user_access(user) if access is None: return HttpResponse('Not authorized', status=401) query = request.GET.urlencode() print query url = cgit_url(user_name, repo_name, prefix, path, query) (fname, info) = urlretrieve(url) response = HttpResponse(FileWrapper(open(fname)), content_type='text/plain') return response def repo_snapshot(request, user_name, repo_name, path): user = request.user owner = get_object_or_404(User, username=user_name) repo = get_object_or_404(Repository, owner=owner, name=repo_name) collaborators = repo.collaborators.all() access = repo.user_access(user) if access is None: return HttpResponse('Not authorized', status=401) query = request.GET.urlencode() filename = path.split('/')[-1] url = cgit_url(user_name, repo_name, 'snapshot', path, query) (fname, info) = urlretrieve(url) response = HttpResponse(FileWrapper(open(fname)), content_type='application/force-download') response['Content-Disposition'] = 'attachment; filename="%s"' % filename return response def repo_browse(request, user_name, repo_name, method='summary', path=None): user = request.user owner = get_object_or_404(User, username=user_name) repo = get_object_or_404(Repository, owner=owner, name=repo_name) collaborators = repo.collaborators.all() access = repo.user_access(user) if access is None: return HttpResponse('Not authorized', status=401) commit_id = request.GET.get('id') q = request.GET.get('q', '') qtype = request.GET.get('qt', 'grep') messages = { 'grep' : 'Log Message', 'author': 'Author', 'committer' : 'Committer', 'range' : 'Range' } search_text = messages.get(qtype, messages['grep']) if method == 'tree': file_path = path.split('/') path_parts = cumulative_path(file_path) file_path = zip(file_path, path_parts) else: file_path = None query = request.GET.urlencode() url = cgit_url(user_name, repo_name, method, path, query) text = get(url) context = get_context(request, {'owner': owner, 'repo_html':text.text, 'repo':repo, 'access':access, 'id':commit_id, 'method':method, 'q':q, 'qtype':qtype, 'search_text':search_text, 'file_path':file_path}) return render_to_response('viewer/repo_view.html', context)
vault/bugit
viewer/views.py
Python
mit
3,804
""" The MIT License (MIT) Copyright (c) 2016 Louis-Philippe Querel [email protected] Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from utility.abstract_override import AbstractOverride class JdkOverride(AbstractOverride): def _get_override_format(self): return 'JAVA_HOME="%s"' def _get_default_format(self): return '' def __init__(self, *args): AbstractOverride.__init__(self, *args) self.name = "JDK"
louisq/staticguru
utility/jdk_override.py
Python
mit
1,445
# -*- coding: utf-8 -*- """ Created on Thu Sep 21 16:29:34 2017 @author: ishort """ import math """ procedure to generate Gaussian of unit area when passed a FWHM""" #IDL: PRO GAUSS2,FWHM,LENGTH,NGAUS def gauss2(fwhm, length): #length=length*1l & FWHM=FWHM*1l #NGAUS=FLTARR(LENGTH) ngaus = [0.0 for i in range(length)] #This expression for CHAR comes from requiring f(x=0.5*FWHM)=0.5*f(x=0): #CHAR=-1d0*ALOG(0.5d0)/(0.5d0*0.5d0*FWHM*FWHM) char = -1.0 * math.log(0.5) / (0.5*0.5*fwhm*fwhm) #This expression for AMP (amplitude) comes from requiring that the #area under the gaussian is unity: #AMP=SQRT(CHAR/PI) amp = math.sqrt(char/math.pi) #FOR CNT=0l,(LENGTH-1) DO BEGIN # X=(CNT-LENGTH/2)*1.d0 # NGAUS(CNT)=AMP*EXP(-CHAR*X^2) #ENDFOR for cnt in range(length): x = 1.0 * (cnt - length/2) ngaus[cnt] = amp * math.exp(-1.0*char*x*x) return ngaus
sevenian3/ChromaStarPy
Gauss2.py
Python
mit
996
from . import config from django.shortcuts import render from mwoauth import ConsumerToken, Handshaker, tokens def requests_handshaker(): consumer_key = config.OAUTH_CONSUMER_KEY consumer_secret = config.OAUTH_CONSUMER_SECRET consumer_token = ConsumerToken(consumer_key, consumer_secret) return Handshaker("https://meta.wikimedia.org/w/index.php", consumer_token) def get_username(request): handshaker = requests_handshaker() if 'access_token_key' in request.session: access_key = request.session['access_token_key'].encode('utf-8') access_secret = request.session['access_token_secret'].encode('utf-8') access_token = tokens.AccessToken(key=access_key, secret=access_secret) return handshaker.identify(access_token)['username'] else: return None
harej/requestoid
authentication.py
Python
mit
819
# -*- coding: utf-8 -*- """ Request Management System - Controllers """ prefix = request.controller resourcename = request.function if prefix not in deployment_settings.modules: session.error = T("Module disabled!") redirect(URL(r=request, c="default", f="index")) # Options Menu (available in all Functions' Views) menu = [ [T("Home"), False, URL(r=request, f="index")], [T("Requests"), False, URL(r=request, f="req"), [ [T("List"), False, URL(r=request, f="req")], [T("Add"), False, URL(r=request, f="req", args="create")], # @ToDo Search by priority, status, location #[T("Search"), False, URL(r=request, f="req", args="search")], ]], [T("All Requested Items"), False, URL(r=request, f="ritem")], ] if session.rcvars: if "hms_hospital" in session.rcvars: hospital = db.hms_hospital query = (hospital.id == session.rcvars["hms_hospital"]) selection = db(query).select(hospital.id, hospital.name, limitby=(0, 1)).first() if selection: menu_hospital = [ [selection.name, False, URL(r=request, c="hms", f="hospital", args=[selection.id])] ] menu.extend(menu_hospital) if "cr_shelter" in session.rcvars: shelter = db.cr_shelter query = (shelter.id == session.rcvars["cr_shelter"]) selection = db(query).select(shelter.id, shelter.name, limitby=(0, 1)).first() if selection: menu_shelter = [ [selection.name, False, URL(r=request, c="cr", f="shelter", args=[selection.id])] ] menu.extend(menu_shelter) response.menu_options = menu def index(): """ Module's Home Page Default to the rms_req list view. """ request.function = "req" request.args = [] return req() #module_name = deployment_settings.modules[prefix].name_nice #response.title = module_name #return dict(module_name=module_name, a=1) def req(): """ RESTful CRUD controller """ resourcename = request.function # check again in case we're coming from index() tablename = "%s_%s" % (prefix, resourcename) table = db[tablename] # Pre-processor def prep(r): response.s3.cancel = r.here() if r.representation in shn_interactive_view_formats and r.method != "delete": # Don't send the locations list to client (pulled by AJAX instead) r.table.location_id.requires = IS_NULL_OR(IS_ONE_OF_EMPTY(db, "gis_location.id")) #if r.method == "create" and not r.component: # listadd arrives here as method=None if not r.component: table.datetime.default = request.utcnow table.person_id.default = s3_logged_in_person() # @ToDo Default the Organisation too return True response.s3.prep = prep # Post-processor def postp(r, output): if r.representation in shn_interactive_view_formats: #if r.method == "create" and not r.component: # listadd arrives here as method=None if r.method != "delete" and not r.component: # Redirect to the Assessments tabs after creation r.next = r.other(method="ritem", record_id=s3xrc.get_session(prefix, resourcename)) # Custom Action Buttons if not r.component: response.s3.actions = [ dict(label=str(T("Open")), _class="action-btn", url=str(URL(r=request, args=["[id]", "update"]))), dict(label=str(T("Items")), _class="action-btn", url=str(URL(r=request, args=["[id]", "ritem"]))), ] return output response.s3.postp = postp s3xrc.model.configure(table, #listadd=False, #@todo: List add is causing errors with JS - FIX editable=True) return s3_rest_controller(prefix, resourcename, rheader=shn_rms_req_rheader) def shn_rms_req_rheader(r): """ Resource Header for Requests """ if r.representation == "html": if r.name == "req": req_record = r.record if req_record: _next = r.here() _same = r.same() try: location = db(db.gis_location.id == req_record.location_id).select(limitby=(0, 1)).first() location_represent = shn_gis_location_represent(location.id) except: location_represent = None rheader_tabs = shn_rheader_tabs( r, [(T("Edit Details"), None), (T("Items"), "ritem"), ] ) rheader = DIV( TABLE( TR( TH( T("Message") + ": "), TD(req_record.message, _colspan=3) ), TR( TH( T("Time of Request") + ": "), req_record.datetime, TH( T( "Location") + ": "), location_represent, ), TR( TH( T("Priority") + ": "), req_record.priority, TH( T("Document") + ": "), document_represent(req_record.document_id) ), ), rheader_tabs ) return rheader return None def ritem(): """ RESTful CRUD controller """ tablename = "%s_%s" % (prefix, resourcename) table = db[tablename] s3xrc.model.configure(table, insertable=False) return s3_rest_controller(prefix, resourcename) def store_for_req(): store_table = None return dict(store_table = store_table)
ptressel/sahana-eden-madpub
controllers/rms.py
Python
mit
6,228
import array import numbers real_types = [numbers.Real] int_types = [numbers.Integral] iterable_types = [set, list, tuple, array.array] try: import numpy except ImportError: pass else: real_types.extend([numpy.float32, numpy.float64]) int_types.extend([numpy.int32, numpy.int64]) iterable_types.append(numpy.ndarray) # use these with isinstance to test for various types that include builtins # and numpy types (if numpy is available) real_types = tuple(real_types) int_types = tuple(int_types) iterable_types = tuple(iterable_types)
DailyActie/Surrogate-Model
01-codes/OpenMDAO-Framework-dev/openmdao.util/src/openmdao/util/typegroups.py
Python
mit
558
#Attribute set in both superclass and subclass class C(object): def __init__(self): self.var = 0 class D(C): def __init__(self): self.var = 1 # self.var will be overwritten C.__init__(self) #Attribute set in both superclass and subclass class E(object): def __init__(self): self.var = 0 # self.var will be overwritten class F(E): def __init__(self): E.__init__(self) self.var = 1
github/codeql
python/ql/test/query-tests/Classes/overwriting-attribute/overwriting_attribute.py
Python
mit
451
import pymongo def connect (): ''' Create the connection to the MongoDB and create 3 collections needed ''' try: # Create the connection to the local host conn = pymongo.MongoClient() print 'MongoDB Connection Successful' except pymongo.errors.ConnectionFailure, err: print 'MongoDB Connection Unsuccessful' return False # This is the name of the database -'GtownTwitter' db = conn['GtownTwitter_PROD'] return db
Macemann/Georgetown-Capstone
app/db/connect.py
Python
mit
493
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2016, Jianfeng Chen <[email protected]> # vim: set ts=4 sts=4 sw=4 expandtab smartindent: # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import division def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b)) def GD(PF0, PFc): up = 0 for i in PFc: up += min([dist(i, j) for j in PF0]) return up**0.5 / (len(PFc))
Ginfung/FSSE
Metrics/gd.py
Python
mit
1,445
import unittest from unittest import skip from decimal import Decimal from cnab240 import errors from cnab240.bancos import itau from tests.data import get_itau_data_from_file class TestRegistro(unittest.TestCase): def setUp(self): itau_data = get_itau_data_from_file() self.header_arquivo = itau_data['header_arquivo'] self.seg_p = itau_data['seg_p1'] self.seg_p_str = itau_data['seg_p1_str'] self.seg_q = itau_data['seg_q1'] self.seg_q_str = itau_data['seg_q1_str'] def test_leitura_campo_num_decimal(self): self.assertEqual(self.seg_p.valor_titulo, Decimal('100.00')) def test_escrita_campo_num_decimal(self): # aceitar somente tipo Decimal with self.assertRaises(errors.TipoError): self.seg_p.valor_titulo = 10.0 with self.assertRaises(errors.TipoError): self.seg_p.valor_titulo = '' # Testa se as casas decimais estao sendo verificadas with self.assertRaises(errors.NumDecimaisError): self.seg_p.valor_titulo = Decimal('100.2') with self.assertRaises(errors.NumDecimaisError): self.seg_p.valor_titulo = Decimal('1001') with self.assertRaises(errors.NumDecimaisError): self.seg_p.valor_titulo = Decimal('1.000') # verifica se o numero de digitos esta sendo verificado with self.assertRaises(errors.NumDigitosExcedidoError): self.seg_p.valor_titulo = Decimal('10000000008100.21') # armazemamento correto de um decimal self.seg_p.valor_titulo = Decimal('2.13') self.assertEqual(self.seg_p.valor_titulo, Decimal('2.13')) def test_leitura_campo_num_int(self): self.assertEqual(self.header_arquivo.controle_banco, 341) def test_escrita_campo_num_int(self): # aceitar somente inteiros (int e long) with self.assertRaises(errors.TipoError): self.header_arquivo.controle_banco = 10.0 with self.assertRaises(errors.TipoError): self.header_arquivo.controle_banco = '' # verifica se o numero de digitos esta sendo verificado with self.assertRaises(errors.NumDigitosExcedidoError): self.header_arquivo.controle_banco = 12345678234567890234567890 with self.assertRaises(errors.NumDigitosExcedidoError): self.header_arquivo.controle_banco = 1234 # verifica valor armazenado self.header_arquivo.controle_banco = 5 self.assertEqual(self.header_arquivo.controle_banco, 5) def test_leitura_campo_alfa(self): self.assertEqual(self.header_arquivo.cedente_nome, 'TRACY TECNOLOGIA LTDA ME') @skip def test_escrita_campo_alfa(self): # Testa que serao aceitos apenas unicode objects with self.assertRaises(errors.TipoError): self.header_arquivo.cedente_nome = 'tracy' # Testa que strings mais longas que obj.digitos nao serao aceitas with self.assertRaises(errors.NumDigitosExcedidoError): self.header_arquivo.cedente_convenio = '123456789012345678901' # Testa que o valor atribuido foi guardado no objeto self.header_arquivo.cedente_nome = 'tracy' self.assertEqual(self.header_arquivo.cedente_nome, 'tracy') def test_fromdict(self): header_dict = self.header_arquivo.todict() header_arquivo = itau.registros.HeaderArquivo(**header_dict) self.assertEqual(header_arquivo.cedente_nome, 'TRACY TECNOLOGIA LTDA ME') self.assertEqual(header_arquivo.nome_do_banco, 'BANCO ITAU SA') def test_necessario(self): self.assertTrue(self.seg_p) seg_p2 = itau.registros.SegmentoP() self.assertFalse(seg_p2.necessario()) seg_p2.controle_banco = 33 self.assertFalse(seg_p2.necessario()) seg_p2.vencimento_titulo = 10102012 self.assertTrue(seg_p2.necessario()) def test_unicode(self): def unicode_test(seg_instance, seg_str): seg_gen_str = str(seg_instance) self.assertEqual(len(seg_gen_str), 240) self.assertEqual(len(seg_str), 240) self.assertEqual(seg_gen_str, seg_str) unicode_test(self.seg_p, self.seg_p_str) unicode_test(self.seg_q, self.seg_q_str) if __name__ == '__main__': unittest.main()
Trust-Code/python-cnab
tests/test_registro.py
Python
mit
4,409
#!/usr/bin/env python3 # # Copyright (c) 2017 Nick Douma # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from argparse import ArgumentParser, ArgumentTypeError import datetime import json import re import urllib.error import urllib.parse import urllib.request import sys ISO8601 = r"^(\d{4})-?(\d{2})-?(\d{2})?[T ]?(\d{2}):?(\d{2}):?(\d{2})" def iso8601_to_unix_timestamp(value): try: return int(value) except ValueError: pass matches = re.match(ISO8601, value) if not matches: raise ArgumentTypeError("Argument is not a valid UNIX or ISO8601 " "timestamp.") return int(datetime.datetime( *[int(m) for m in matches.groups()])).timestamp() def hex_value(value): value = value.replace("#", "") if not re.match(r"^[a-f0-9]{6}$", value): raise ArgumentTypeError("Argument is not a valid hex value.") return value parser = ArgumentParser(description="Send notifications using Slack") parser.add_argument("--webhook-url", help="Webhook URL.", required=True) parser.add_argument("--channel", help="Channel to post to (prefixed with #), " "or a specific user (prefixed with @).") parser.add_argument("--username", help="Username to post as") parser.add_argument("--title", help="Notification title.") parser.add_argument("--title_link", help="Notification title link.") parser.add_argument("--color", help="Sidebar color (as a hex value).", type=hex_value) parser.add_argument("--ts", help="Unix timestamp or ISO8601 timestamp " "(will be converted to Unix timestamp).", type=iso8601_to_unix_timestamp) parser.add_argument("message", help="Notification message.") args = parser.parse_args() message = {} for param in ["channel", "username"]: value = getattr(args, param) if value: message[param] = value attachment = {} for param in ["title", "title_link", "color", "ts", "message"]: value = getattr(args, param) if value: attachment[param] = value attachment['fallback'] = attachment['message'] attachment['text'] = attachment['message'] del attachment['message'] message['attachments'] = [attachment] payload = {"payload": json.dumps(message)} try: parameters = urllib.parse.urlencode(payload).encode('UTF-8') url = urllib.request.Request(args.webhook_url, parameters) responseData = urllib.request.urlopen(url).read() except urllib.error.HTTPError as he: print("Sending message to Slack failed: {}".format(he)) sys.exit(1)
LordGaav/notification-scripts
slack.py
Python
mit
3,578
# -*- coding: utf-8 -*- """The initialization file for the Pywikibot framework.""" # # (C) Pywikibot team, 2008-2015 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __release__ = '2.0b3' __version__ = '$Id$' __url__ = 'https://www.mediawiki.org/wiki/Special:MyLanguage/Manual:Pywikibot' import datetime import math import re import sys import threading import json if sys.version_info[0] > 2: from queue import Queue long = int else: from Queue import Queue from warnings import warn # Use pywikibot. prefix for all in-package imports; this is to prevent # confusion with similarly-named modules in version 1 framework, for users # who want to continue using both from pywikibot import config2 as config from pywikibot.bot import ( output, warning, error, critical, debug, stdout, exception, input, input_choice, input_yn, inputChoice, handle_args, showHelp, ui, log, calledModuleName, Bot, CurrentPageBot, WikidataBot, # the following are flagged as deprecated on usage handleArgs, ) from pywikibot.exceptions import ( Error, InvalidTitle, BadTitle, NoPage, NoMoveTarget, SectionError, SiteDefinitionError, NoSuchSite, UnknownSite, UnknownFamily, UnknownExtension, NoUsername, UserBlocked, PageRelatedError, IsRedirectPage, IsNotRedirectPage, PageSaveRelatedError, PageNotSaved, OtherPageSaveError, LockedPage, CascadeLockedPage, LockedNoPage, NoCreateError, EditConflict, PageDeletedConflict, PageCreatedConflict, ServerError, FatalServerError, Server504Error, CaptchaError, SpamfilterError, CircularRedirect, InterwikiRedirectPage, WikiBaseError, CoordinateGlobeUnknownException, ) from pywikibot.tools import PY2, UnicodeMixin, redirect_func from pywikibot.i18n import translate from pywikibot.data.api import UploadWarning from pywikibot.diff import PatchManager import pywikibot.textlib as textlib import pywikibot.tools textlib_methods = ( 'unescape', 'replaceExcept', 'removeDisabledParts', 'removeHTMLParts', 'isDisabled', 'interwikiFormat', 'interwikiSort', 'getLanguageLinks', 'replaceLanguageLinks', 'removeLanguageLinks', 'removeLanguageLinksAndSeparator', 'getCategoryLinks', 'categoryFormat', 'replaceCategoryLinks', 'removeCategoryLinks', 'removeCategoryLinksAndSeparator', 'replaceCategoryInPlace', 'compileLinkR', 'extract_templates_and_params', 'TimeStripper', ) __all__ = ( 'config', 'ui', 'UnicodeMixin', 'translate', 'Page', 'FilePage', 'Category', 'Link', 'User', 'ItemPage', 'PropertyPage', 'Claim', 'html2unicode', 'url2unicode', 'unicode2html', 'stdout', 'output', 'warning', 'error', 'critical', 'debug', 'exception', 'input_choice', 'input', 'input_yn', 'inputChoice', 'handle_args', 'handleArgs', 'showHelp', 'ui', 'log', 'calledModuleName', 'Bot', 'CurrentPageBot', 'WikidataBot', 'Error', 'InvalidTitle', 'BadTitle', 'NoPage', 'NoMoveTarget', 'SectionError', 'SiteDefinitionError', 'NoSuchSite', 'UnknownSite', 'UnknownFamily', 'UnknownExtension', 'NoUsername', 'UserBlocked', 'UserActionRefuse', 'PageRelatedError', 'IsRedirectPage', 'IsNotRedirectPage', 'PageSaveRelatedError', 'PageNotSaved', 'OtherPageSaveError', 'LockedPage', 'CascadeLockedPage', 'LockedNoPage', 'NoCreateError', 'EditConflict', 'PageDeletedConflict', 'PageCreatedConflict', 'UploadWarning', 'ServerError', 'FatalServerError', 'Server504Error', 'CaptchaError', 'SpamfilterError', 'CircularRedirect', 'InterwikiRedirectPage', 'WikiBaseError', 'CoordinateGlobeUnknownException', 'QuitKeyboardInterrupt', ) __all__ += textlib_methods if PY2: # T111615: Python 2 requires __all__ is bytes globals()['__all__'] = tuple(bytes(item) for item in __all__) for _name in textlib_methods: target = getattr(textlib, _name) wrapped_func = redirect_func(target) globals()[_name] = wrapped_func deprecated = redirect_func(pywikibot.tools.deprecated) deprecate_arg = redirect_func(pywikibot.tools.deprecate_arg) class Timestamp(datetime.datetime): """Class for handling MediaWiki timestamps. This inherits from datetime.datetime, so it can use all of the methods and operations of a datetime object. To ensure that the results of any operation are also a Timestamp object, be sure to use only Timestamp objects (and datetime.timedeltas) in any operation. Use Timestamp.fromISOformat() and Timestamp.fromtimestampformat() to create Timestamp objects from MediaWiki string formats. As these constructors are typically used to create objects using data passed provided by site and page methods, some of which return a Timestamp when previously they returned a MediaWiki string representation, these methods also accept a Timestamp object, in which case they return a clone. Use Site.getcurrenttime() for the current time; this is more reliable than using Timestamp.utcnow(). """ mediawikiTSFormat = "%Y%m%d%H%M%S" ISO8601Format = "%Y-%m-%dT%H:%M:%SZ" def clone(self): """Clone this instance.""" return self.replace(microsecond=self.microsecond) @classmethod def fromISOformat(cls, ts): """Convert an ISO 8601 timestamp to a Timestamp object.""" # If inadvertantly passed a Timestamp object, use replace() # to create a clone. if isinstance(ts, cls): return ts.clone() return cls.strptime(ts, cls.ISO8601Format) @classmethod def fromtimestampformat(cls, ts): """Convert a MediaWiki internal timestamp to a Timestamp object.""" # If inadvertantly passed a Timestamp object, use replace() # to create a clone. if isinstance(ts, cls): return ts.clone() return cls.strptime(ts, cls.mediawikiTSFormat) def isoformat(self): """ Convert object to an ISO 8601 timestamp accepted by MediaWiki. datetime.datetime.isoformat does not postfix the ISO formatted date with a 'Z' unless a timezone is included, which causes MediaWiki ~1.19 and earlier to fail. """ return self.strftime(self.ISO8601Format) toISOformat = redirect_func(isoformat, old_name='toISOformat', class_name='Timestamp') def totimestampformat(self): """Convert object to a MediaWiki internal timestamp.""" return self.strftime(self.mediawikiTSFormat) def __str__(self): """Return a string format recognized by the API.""" return self.isoformat() def __add__(self, other): """Perform addition, returning a Timestamp instead of datetime.""" newdt = super(Timestamp, self).__add__(other) if isinstance(newdt, datetime.datetime): return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour, newdt.minute, newdt.second, newdt.microsecond, newdt.tzinfo) else: return newdt def __sub__(self, other): """Perform substraction, returning a Timestamp instead of datetime.""" newdt = super(Timestamp, self).__sub__(other) if isinstance(newdt, datetime.datetime): return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour, newdt.minute, newdt.second, newdt.microsecond, newdt.tzinfo) else: return newdt class Coordinate(object): """ Class for handling and storing Coordinates. For now its just being used for DataSite, but in the future we can use it for the GeoData extension. """ def __init__(self, lat, lon, alt=None, precision=None, globe='earth', typ="", name="", dim=None, site=None, entity=''): """ Represent a geo coordinate. @param lat: Latitude @type lat: float @param lon: Longitude @type lon: float @param alt: Altitute? TODO FIXME @param precision: precision @type precision: float @param globe: Which globe the point is on @type globe: str @param typ: The type of coordinate point @type typ: str @param name: The name @type name: str @param dim: Dimension (in meters) @type dim: int @param entity: The URL entity of a Wikibase item @type entity: str """ self.lat = lat self.lon = lon self.alt = alt self._precision = precision if globe: globe = globe.lower() self.globe = globe self._entity = entity self.type = typ self.name = name self._dim = dim if not site: self.site = Site().data_repository() else: self.site = site def __repr__(self): string = 'Coordinate(%s, %s' % (self.lat, self.lon) if self.globe != 'earth': string += ', globe="%s"' % self.globe string += ')' return string @property def entity(self): if self._entity: return self._entity return self.site.globes()[self.globe] def toWikibase(self): """ Export the data to a JSON object for the Wikibase API. FIXME: Should this be in the DataSite object? """ if self.globe not in self.site.globes(): raise CoordinateGlobeUnknownException( u"%s is not supported in Wikibase yet." % self.globe) return {'latitude': self.lat, 'longitude': self.lon, 'altitude': self.alt, 'globe': self.entity, 'precision': self.precision, } @classmethod def fromWikibase(cls, data, site): """Constructor to create an object from Wikibase's JSON output.""" globes = {} for k in site.globes(): globes[site.globes()[k]] = k globekey = data['globe'] if globekey: globe = globes.get(data['globe']) else: # Default to earth or should we use None here? globe = 'earth' return cls(data['latitude'], data['longitude'], data['altitude'], data['precision'], globe, site=site, entity=data['globe']) @property def precision(self): u""" Return the precision of the geo coordinate. The biggest error (in degrees) will be given by the longitudinal error; the same error in meters becomes larger (in degrees) further up north. We can thus ignore the latitudinal error. The longitudinal can be derived as follows: In small angle approximation (and thus in radians): M{Δλ ≈ Δpos / r_φ}, where r_φ is the radius of earth at the given latitude. Δλ is the error in longitude. M{r_φ = r cos φ}, where r is the radius of earth, φ the latitude Therefore:: precision = math.degrees(self._dim/(radius*math.cos(math.radians(self.lat)))) """ if not self._precision: radius = 6378137 # TODO: Support other globes self._precision = math.degrees( self._dim / (radius * math.cos(math.radians(self.lat)))) return self._precision def precisionToDim(self): """Convert precision from Wikibase to GeoData's dim.""" raise NotImplementedError class WbTime(object): """A Wikibase time representation.""" PRECISION = {'1000000000': 0, '100000000': 1, '10000000': 2, '1000000': 3, '100000': 4, '10000': 5, 'millenia': 6, 'century': 7, 'decade': 8, 'year': 9, 'month': 10, 'day': 11, 'hour': 12, 'minute': 13, 'second': 14 } FORMATSTR = '{0:+012d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}Z' def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, precision=None, before=0, after=0, timezone=0, calendarmodel=None, site=None): """ Create a new WbTime object. The precision can be set by the Wikibase int value (0-14) or by a human readable string, e.g., 'hour'. If no precision is given, it is set according to the given time units. """ if year is None: raise ValueError('no year given') self.precision = self.PRECISION['second'] if second is None: self.precision = self.PRECISION['minute'] second = 0 if minute is None: self.precision = self.PRECISION['hour'] minute = 0 if hour is None: self.precision = self.PRECISION['day'] hour = 0 if day is None: self.precision = self.PRECISION['month'] day = 1 if month is None: self.precision = self.PRECISION['year'] month = 1 self.year = long(year) self.month = month self.day = day self.hour = hour self.minute = minute self.second = second self.after = after self.before = before self.timezone = timezone if calendarmodel is None: if site is None: site = Site().data_repository() calendarmodel = site.calendarmodel() self.calendarmodel = calendarmodel # if precision is given it overwrites the autodetection above if precision is not None: if (isinstance(precision, int) and precision in self.PRECISION.values()): self.precision = precision elif precision in self.PRECISION: self.precision = self.PRECISION[precision] else: raise ValueError('Invalid precision: "%s"' % precision) @classmethod def fromTimestr(cls, datetimestr, precision=14, before=0, after=0, timezone=0, calendarmodel=None, site=None): match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z', datetimestr) if not match: raise ValueError(u"Invalid format: '%s'" % datetimestr) t = match.groups() return cls(long(t[0]), int(t[1]), int(t[2]), int(t[3]), int(t[4]), int(t[5]), precision, before, after, timezone, calendarmodel, site) def toTimestr(self): """ Convert the data to a UTC date/time string. @return: str """ return self.FORMATSTR.format(self.year, self.month, self.day, self.hour, self.minute, self.second) def toWikibase(self): """ Convert the data to a JSON object for the Wikibase API. @return: dict """ json = {'time': self.toTimestr(), 'precision': self.precision, 'after': self.after, 'before': self.before, 'timezone': self.timezone, 'calendarmodel': self.calendarmodel } return json @classmethod def fromWikibase(cls, ts): return cls.fromTimestr(ts[u'time'], ts[u'precision'], ts[u'before'], ts[u'after'], ts[u'timezone'], ts[u'calendarmodel']) def __str__(self): return json.dumps(self.toWikibase(), indent=4, sort_keys=True, separators=(',', ': ')) def __eq__(self, other): return self.__dict__ == other.__dict__ def __repr__(self): return u"WbTime(year=%(year)d, month=%(month)d, day=%(day)d, " \ u"hour=%(hour)d, minute=%(minute)d, second=%(second)d, " \ u"precision=%(precision)d, before=%(before)d, after=%(after)d, " \ u"timezone=%(timezone)d, calendarmodel='%(calendarmodel)s')" \ % self.__dict__ class WbQuantity(object): """A Wikibase quantity representation.""" def __init__(self, amount, unit=None, error=None): u""" Create a new WbQuantity object. @param amount: number representing this quantity @type amount: float @param unit: not used (only unit-less quantities are supported) @param error: the uncertainty of the amount (e.g. ±1) @type error: float, or tuple of two floats, where the first value is the upper error and the second is the lower error value. """ if amount is None: raise ValueError('no amount given') if unit is None: unit = '1' self.amount = amount self.unit = unit upperError = lowerError = 0 if isinstance(error, tuple): upperError, lowerError = error elif error is not None: upperError = lowerError = error self.upperBound = self.amount + upperError self.lowerBound = self.amount - lowerError def toWikibase(self): """Convert the data to a JSON object for the Wikibase API.""" json = {'amount': self.amount, 'upperBound': self.upperBound, 'lowerBound': self.lowerBound, 'unit': self.unit } return json @classmethod def fromWikibase(cls, wb): """ Create a WbQuanity from the JSON data given by the Wikibase API. @param wb: Wikibase JSON """ amount = eval(wb['amount']) upperBound = eval(wb['upperBound']) lowerBound = eval(wb['lowerBound']) error = (upperBound - amount, amount - lowerBound) return cls(amount, wb['unit'], error) def __str__(self): return json.dumps(self.toWikibase(), indent=4, sort_keys=True, separators=(',', ': ')) def __eq__(self, other): return self.__dict__ == other.__dict__ def __repr__(self): return (u"WbQuantity(amount=%(amount)s, upperBound=%(upperBound)s, " u"lowerBound=%(lowerBound)s, unit=%(unit)s)" % self.__dict__) _sites = {} _url_cache = {} # The code/fam pair for each URL def Site(code=None, fam=None, user=None, sysop=None, interface=None, url=None): """A factory method to obtain a Site object. Site objects are cached and reused by this method. By default rely on config settings. These defaults may all be overridden using the method parameters. @param code: language code (override config.mylang) @type code: string @param fam: family name or object (override config.family) @type fam: string or Family @param user: bot user name to use on this site (override config.usernames) @type user: unicode @param sysop: sysop user to use on this site (override config.sysopnames) @type sysop: unicode @param interface: site class or name of class in pywikibot.site (override config.site_interface) @type interface: subclass of L{pywikibot.site.BaseSite} or string @param url: Instead of code and fam, does try to get a Site based on the URL. Still requires that the family supporting that URL exists. @type url: string """ # Either code and fam or only url if url and (code or fam): raise ValueError('URL to the wiki OR a pair of code and family name ' 'should be provided') _logger = "wiki" if url: if url not in _url_cache: matched_sites = [] # Iterate through all families and look, which does apply to # the given URL for fam in config.family_files: family = pywikibot.family.Family.load(fam) code = family.from_url(url) if code is not None: matched_sites += [(code, fam)] if matched_sites: if len(matched_sites) > 1: pywikibot.warning( 'Found multiple matches for URL "{0}": {1} (use first)' .format(url, ', '.join(str(s) for s in matched_sites))) _url_cache[url] = matched_sites[0] else: # TODO: As soon as AutoFamily is ready, try and use an # AutoFamily _url_cache[url] = None cached = _url_cache[url] if cached: code = cached[0] fam = cached[1] else: raise SiteDefinitionError("Unknown URL '{0}'.".format(url)) else: # Fallback to config defaults code = code or config.mylang fam = fam or config.family interface = interface or config.site_interface # config.usernames is initialised with a dict for each family name family_name = str(fam) if family_name in config.usernames: user = user or config.usernames[family_name].get(code) \ or config.usernames[family_name].get('*') sysop = sysop or config.sysopnames[family_name].get(code) \ or config.sysopnames[family_name].get('*') if not isinstance(interface, type): # If it isnt a class, assume it is a string try: tmp = __import__('pywikibot.site', fromlist=[interface]) interface = getattr(tmp, interface) except ImportError: raise ValueError("Invalid interface name '%(interface)s'" % locals()) if not issubclass(interface, pywikibot.site.BaseSite): warning('Site called with interface=%s' % interface.__name__) user = pywikibot.tools.normalize_username(user) key = '%s:%s:%s:%s' % (interface.__name__, fam, code, user) if key not in _sites or not isinstance(_sites[key], interface): _sites[key] = interface(code=code, fam=fam, user=user, sysop=sysop) debug(u"Instantiated %s object '%s'" % (interface.__name__, _sites[key]), _logger) if _sites[key].code != code: warn('Site %s instantiated using different code "%s"' % (_sites[key], code), UserWarning, 2) return _sites[key] # alias for backwards-compability getSite = pywikibot.tools.redirect_func(Site, old_name='getSite') from pywikibot.page import ( Page, FilePage, Category, Link, User, ItemPage, PropertyPage, Claim, ) from pywikibot.page import html2unicode, url2unicode, unicode2html link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]') @pywikibot.tools.deprecated("comment parameter for page saving method") def setAction(s): """Set a summary to use for changed page submissions.""" config.default_edit_summary = s def showDiff(oldtext, newtext, context=0): """ Output a string showing the differences between oldtext and newtext. The differences are highlighted (only on compatible systems) to show which changes were made. """ PatchManager(oldtext, newtext, context=context).print_hunks() # Throttle and thread handling stopped = False def stopme(): """Drop this process from the throttle log, after pending threads finish. Can be called manually if desired, but if not, will be called automatically at Python exit. """ global stopped _logger = "wiki" if not stopped: debug(u"stopme() called", _logger) def remaining(): remainingPages = page_put_queue.qsize() - 1 # -1 because we added a None element to stop the queue remainingSeconds = datetime.timedelta( seconds=(remainingPages * config.put_throttle)) return (remainingPages, remainingSeconds) page_put_queue.put((None, [], {})) stopped = True if page_put_queue.qsize() > 1: num, sec = remaining() format_values = dict(num=num, sec=sec) output(u'\03{lightblue}' u'Waiting for %(num)i pages to be put. ' u'Estimated time remaining: %(sec)s' u'\03{default}' % format_values) while(_putthread.isAlive()): try: _putthread.join(1) except KeyboardInterrupt: if input_yn('There are %i pages remaining in the queue. ' 'Estimated time remaining: %s\nReally exit?' % remaining(), default=False, automatic_quit=False): return # only need one drop() call because all throttles use the same global pid try: list(_sites.values())[0].throttle.drop() log(u"Dropped throttle(s).") except IndexError: pass import atexit atexit.register(stopme) # Create a separate thread for asynchronous page saves (and other requests) def async_manager(): """Daemon; take requests from the queue and execute them in background.""" while True: (request, args, kwargs) = page_put_queue.get() if request is None: break request(*args, **kwargs) page_put_queue.task_done() def async_request(request, *args, **kwargs): """Put a request on the queue, and start the daemon if necessary.""" if not _putthread.isAlive(): try: page_put_queue.mutex.acquire() try: _putthread.start() except (AssertionError, RuntimeError): pass finally: page_put_queue.mutex.release() page_put_queue.put((request, args, kwargs)) # queue to hold pending requests page_put_queue = Queue(config.max_queue_size) # set up the background thread _putthread = threading.Thread(target=async_manager) # identification for debugging purposes _putthread.setName('Put-Thread') _putthread.setDaemon(True) wrapper = pywikibot.tools.ModuleDeprecationWrapper(__name__) wrapper._add_deprecated_attr('ImagePage', FilePage) wrapper._add_deprecated_attr( 'PageNotFound', pywikibot.exceptions.DeprecatedPageNotFoundError, warning_message=('{0}.{1} is deprecated, and no longer ' 'used by pywikibot; use http.fetch() instead.')) wrapper._add_deprecated_attr( 'UserActionRefuse', pywikibot.exceptions._EmailUserError, warning_message='UserActionRefuse is deprecated; ' 'use UserRightsError and/or NotEmailableError') wrapper._add_deprecated_attr( 'QuitKeyboardInterrupt', pywikibot.bot.QuitKeyboardInterrupt, warning_message='pywikibot.QuitKeyboardInterrupt is deprecated; ' 'use pywikibot.bot.QuitKeyboardInterrupt instead')
icyflame/batman
pywikibot/__init__.py
Python
mit
26,823
# test_eng.py # Copyright (c) 2013-2016 Pablo Acosta-Serafini # See LICENSE for details # pylint: disable=C0103,C0111,C0302,E0611,R0913,R0915,W0108,W0212 # Standard library imports import functools import sys import pytest from numpy import array, ndarray # Putil imports import putil.eng from putil.test import AE, AI, CS ### # Global variables ### DFLT = 'def' PY2 = bool(sys.hexversion < 0x03000000) ### # Helper functions ### isdflt = lambda obj: bool(obj == DFLT) h = lambda num: '100.'+('0'*num) o = lambda num: '1.'+('0'*num) pv = lambda py2arg, py3arg: py2arg if PY2 else py3arg sarg = lambda msg: 'Argument `{0}` is not valid'.format(msg) t = lambda num: '10.'+('0'*num) def to_sci_string(number): """ Returns a string with the number formatted in scientific notation. This function does not have all the configurability of the public function to_scientific_string, it is a convenience function to test _to_eng_tuple """ mant, exp = putil.eng._to_eng_tuple(number) return '{mant}E{exp_sign}{exp}'.format( mant=mant, exp_sign='-' if exp < 0 else '+', exp=abs(exp) ) ### # Test functions ### @pytest.mark.parametrize( 'text, sep, num, lstrip, rstrip, ref', [ ('a, b, c, d', ',', 1, DFLT, DFLT, ('a', ' b', ' c', ' d')), ('a , b , c , d ', ',', 1, DFLT, DFLT, ('a ', ' b ', ' c ', ' d ')), ('a , b , c , d ', ',', 1, True, DFLT, ('a ', 'b ', 'c ', 'd ')), ('a , b , c , d ', ',', 1, DFLT, True, ('a', ' b', ' c', ' d')), ('a , b , c , d ', ',', 1, True, True, ('a', 'b', 'c', 'd')), ('a, b, c, d', ',', 2, DFLT, DFLT, ('a, b', ' c, d')), ('a, b, c, d', ',', 3, DFLT, DFLT, ('a, b, c', ' d')), ('a, b, c, d', ',', 4, DFLT, DFLT, ('a, b, c, d',)), ('a, b, c, d', ',', 5, DFLT, DFLT, ('a, b, c, d',)), ] ) def test_split_every(text, sep, num, lstrip, rstrip, ref): """ Test _split_every function behavior """ # DFLT in lstrip or rstrip means default argument values should be used obj = putil.eng._split_every obj = obj if isdflt(lstrip) else functools.partial(obj, lstrip=lstrip) obj = obj if isdflt(rstrip) else functools.partial(obj, rstrip=rstrip) assert obj(text, sep, num) == ref @pytest.mark.parametrize( 'num, ref', [ (0.000000000000000000000001001234567890, '1.00123456789E-24'), (0.000000000000000000000001, '1E-24'), (0.00000000000000000000001001234567890, '10.0123456789E-24'), (0.00000000000000000000001, '10E-24'), (0.0000000000000000000001001234567890, '100.123456789E-24'), (0.0000000000000000000001, '100E-24'), (0.000000000000000000001001234567890, '1.00123456789E-21'), (0.000000000000000000001, '1E-21'), (0.00000000000000000001001234567890, '10.0123456789E-21'), (0.00000000000000000001, '10E-21'), (0.0000000000000000001001234567890, '100.123456789E-21'), (0.0000000000000000001, '100E-21'), (0.000000000000000001001234567890, '1.00123456789E-18'), (0.000000000000000001, '1E-18'), (0.00000000000000001001234567890, '10.0123456789E-18'), (0.00000000000000001, '10E-18'), (0.0000000000000001001234567890, '100.123456789E-18'), (0.0000000000000001, '100E-18'), (0.000000000000001001234567890, '1.00123456789E-15'), (0.000000000000001, '1E-15'), (0.00000000000001001234567890, '10.0123456789E-15'), (0.00000000000001, '10E-15'), (0.0000000000001001234567890, '100.123456789E-15'), (0.0000000000001, '100E-15'), (0.000000000001001234567890, '1.00123456789E-12'), (0.000000000001, '1E-12'), (0.00000000001001234567890, '10.0123456789E-12'), (0.00000000001, '10E-12'), (0.0000000001001234567890, '100.123456789E-12'), (0.0000000001, '100E-12'), (0.000000001001234567890, '1.00123456789E-9'), (0.000000001, '1E-9'), (0.00000001001234567890, '10.0123456789E-9'), (0.00000001, '10E-9'), (0.0000001001234567890, '100.123456789E-9'), (0.0000001, '100E-9'), (0.000001001234567890, '1.00123456789E-6'), (0.000001, '1E-6'), (0.00001001234567890, '10.0123456789E-6'), (0.00001, '10E-6'), (0.0001001234567890, '100.123456789E-6'), (0.0001, '100E-6'), (0.001001234567890, '1.00123456789E-3'), (0.001, '1E-3'), (0.01001234567890, '10.0123456789E-3'), (0.01, '10E-3'), (0.1001234567890, '100.123456789E-3'), (0.1, '100E-3'), (0, '0E+0'), (1, '1E+0'), (1.1234567890, '1.123456789E+0'), (10, '10E+0'), (10.1234567890, '10.123456789E+0'), (100, '100E+0'), (100.1234567890, '100.123456789E+0'), (1000, '1E+3'), (1000.1234567890, pv('1.00012345679E+3', '1.000123456789E+3')), (10000, '10E+3'), (10000.1234567890, pv('10.0001234568E+3', '10.000123456789E+3')), (100000, '100E+3'), (100000.1234567890, pv('100.000123457E+3', '100.000123456789E+3')), (1000000, '1E+6'), (1000000.1234567890, pv('1.00000012346E+6', '1.000000123456789E+6')), (10000000, '10E+6'), (10000000.1234567890, pv('10.0000001235E+6', '10.00000012345679E+6')), (100000000, '100E+6'), (100000000.1234567890, pv('100.000000123E+6', '100.00000012345679E+6')), (1000000000, '1E+9'), (1000000000.1234567890, pv('1.00000000012E+9', '1.0000000001234568E+9')), (10000000000, '10E+9'), (10000000000.1234567890, pv(t(9)+'1E+9', '10.000000000123457E+9')), (100000000000, '100E+9'), (100000000000.1234567890, pv('100E+9', '100.00000000012346E+9')), (1000000000000, '1E+12'), (1000000000000.1234567890, pv('1E+12', '1.0000000000001234E+12')), (10000000000000, '10E+12'), (10000000000000.1234567890, pv('10E+12', '10.000000000000123E+12')), (100000000000000, '100E+12'), (100000000000000.1234567890, pv('100E+12', '100.00000000000012E+12')), (1000000000000000, '1E+15'), (1000000000000000.1234567890, pv('1E+15', '1.0000000000000001E+15')), (10000000000000000, '10E+15'), (10000000000000000.1234567890, '10E+15'), (100000000000000000, '100E+15'), (100000000000000000.1234567890, '100E+15'), (1000000000000000000, '1E+18'), (1000000000000000000.1234567890, '1E+18'), (10000000000000000000, '10E+18'), (10000000000000000000.1234567890, '10E+18'), (100000000000000000000, '100E+18'), (100000000000000000000.1234567890, '100E+18'), (1000000000000000000000, '1E+21'), (1000000000000000000000.1234567890, '1E+21'), (10000000000000000000000, '10E+21'), (10000000000000000000000.1234567890, '10E+21'), (100000000000000000000000, '100E+21'), (100000000000000000000000.1234567890, pv('100E+21', h(13)+'1E+21')), (1000000000000000000000000, '1E+24'), (1000000000000000000000000.1234567890, '1E+24'), (10000000000000000000000000, '10E+24'), (10000000000000000000000000.1234567890, '10E+24'), (100000000000000000000000000, '100E+24'), (100000000000000000000000000.1234567890, '100E+24'), (-0.000000000000000000000001001234567890, '-1.00123456789E-24'), (-0.000000000000000000000001, '-1E-24'), (-0.00000000000000000000001001234567890, '-10.0123456789E-24'), (-0.00000000000000000000001, '-10E-24'), (-0.0000000000000000000001001234567890, '-100.123456789E-24'), (-0.0000000000000000000001, '-100E-24'), (-0.000000000000000000001001234567890, '-1.00123456789E-21'), (-0.000000000000000000001, '-1E-21'), (-0.00000000000000000001001234567890, '-10.0123456789E-21'), (-0.00000000000000000001, '-10E-21'), (-0.0000000000000000001001234567890, '-100.123456789E-21'), (-0.0000000000000000001, '-100E-21'), (-0.000000000000000001001234567890, '-1.00123456789E-18'), (-0.000000000000000001, '-1E-18'), (-0.00000000000000001001234567890, '-10.0123456789E-18'), (-0.00000000000000001, '-10E-18'), (-0.0000000000000001001234567890, '-100.123456789E-18'), (-0.0000000000000001, '-100E-18'), (-0.000000000000001001234567890, '-1.00123456789E-15'), (-0.000000000000001, '-1E-15'), (-0.00000000000001001234567890, '-10.0123456789E-15'), (-0.00000000000001, '-10E-15'), (-0.0000000000001001234567890, '-100.123456789E-15'), (-0.0000000000001, '-100E-15'), (-0.000000000001001234567890, '-1.00123456789E-12'), (-0.000000000001, '-1E-12'), (-0.00000000001001234567890, '-10.0123456789E-12'), (-0.00000000001, '-10E-12'), (-0.0000000001001234567890, '-100.123456789E-12'), (-0.0000000001, '-100E-12'), (-0.000000001001234567890, '-1.00123456789E-9'), (-0.000000001, '-1E-9'), (-0.00000001001234567890, '-10.0123456789E-9'), (-0.00000001, '-10E-9'), (-0.0000001001234567890, '-100.123456789E-9'), (-0.0000001, '-100E-9'), (-0.000001001234567890, '-1.00123456789E-6'), (-0.000001, '-1E-6'), (-0.00001001234567890, '-10.0123456789E-6'), (-0.00001, '-10E-6'), (-0.0001001234567890, '-100.123456789E-6'), (-0.0001, '-100E-6'), (-0.001001234567890, '-1.00123456789E-3'), (-0.001, '-1E-3'), (-0.01001234567890, '-10.0123456789E-3'), (-0.01, '-10E-3'), (-0.1001234567890, '-100.123456789E-3'), (-0.1, '-100E-3'), (-1, '-1E+0'), (-1.1234567890, '-1.123456789E+0'), (-10, '-10E+0'), (-10.1234567890, '-10.123456789E+0'), (-100, '-100E+0'), (-100.1234567890, '-100.123456789E+0'), (-1000, '-1E+3'), (-1000.1234567890, pv('-1.00012345679E+3', '-1.000123456789E+3')), (-10000, '-10E+3'), (-10000.1234567890, pv('-10.0001234568E+3', '-10.000123456789E+3')), (-100000, '-100E+3'), (-100000.1234567890, pv('-100.000123457E+3', '-100.000123456789E+3')), (-1000000, '-1E+6'), (-1000000.1234567890, pv('-1.00000012346E+6', '-1.000000123456789E+6')), (-10000000, '-10E+6'), (-10000000.1234567890, pv('-10.0000001235E+6', '-10.00000012345679E+6')), (-100000000, '-100E+6'), (-100000000.1234567890, pv('-'+h(6)+'123E+6', '-100.00000012345679E+6')), (-1000000000, '-1E+9'), (-1000000000.1234567890, pv('-'+o(9)+'12E+9', '-1.0000000001234568E+9')), (-10000000000, '-10E+9'), (-10000000000.1234567890, pv('-'+t(9)+'1E+9', '-'+t(9)+'123457E+9')), (-100000000000, '-100E+9'), (-100000000000.1234567890, pv('-100E+9', '-100.00000000012346E+9')), (-1000000000000, '-1E+12'), (-1000000000000.1234567890, pv('-1E+12', '-1.0000000000001234E+12')), (-10000000000000, '-10E+12'), (-10000000000000.1234567890, pv('-10E+12', '-10.000000000000123E+12')), (-100000000000000, '-100E+12'), (-100000000000000.1234567890, pv('-100E+12', '-100.00000000000012E+12')), (-1000000000000000, '-1E+15'), (-1000000000000000.1234567890, pv('-1E+15', '-1.0000000000000001E+15')), (-10000000000000000, '-10E+15'), (-10000000000000000.1234567890, '-10E+15'), (-100000000000000000, '-100E+15'), (-100000000000000000.1234567890, '-100E+15'), (-1000000000000000000, '-1E+18'), (-1000000000000000000.1234567890, '-1E+18'), (-10000000000000000000, '-10E+18'), (-10000000000000000000.1234567890, '-10E+18'), (-100000000000000000000, '-100E+18'), (-100000000000000000000.1234567890, '-100E+18'), (-1000000000000000000000, '-1E+21'), (-1000000000000000000000.1234567890, '-1E+21'), (-10000000000000000000000, '-10E+21'), (-10000000000000000000000.1234567890, '-10E+21'), (-100000000000000000000000, '-100E+21'), (-100000000000000000000000.1234567890, pv('-100E+21', '-'+h(13)+'1E+21')), (-1000000000000000000000000, '-1E+24'), (-1000000000000000000000000.1234567890, '-1E+24'), (-10000000000000000000000000, '-10E+24'), (-10000000000000000000000000.1234567890, '-10E+24'), (-100000000000000000000000000, '-100E+24'), (-100000000000000000000000000.1234567890, '-100E+24'), ('100000.1234567890', '100.000123456789E+3'), ('-100000.1234567890', '-100.000123456789E+3'), ] ) def test_to_sci_string(num, ref): """ Test _to_eng_string function behavior """ assert to_sci_string(num) == ref @pytest.mark.parametrize( 'num, ref', [ (0, '0'), (0.0, '0.0'), (4, '4'), (4.0, '4.0'), (45, '45'), (450, '450'), (1234567, '1234567'), (4.5, '4.5'), (4.1234, '4.1234'), (4123.4E4, '41234000'), (0.1, '0.1'), (1.43E-2, '0.0143'), (100000000.0, '100000000.0'), (1000000, '1000000'), (1e3, '1000.0'), ] ) def test_no_exp(num, ref): """ Test no_exp function behavior """ assert putil.eng.no_exp(num) == ref @pytest.mark.eng def test_no_ex_exceptions(): """ Test no_exp function exceptions """ AI(putil.eng.no_exp, 'number', number='a') @pytest.mark.eng @pytest.mark.parametrize( 'args, name', [ (dict(number=['5'], frac_length=3, rjust=True), 'number'), (dict(number=5, frac_length=3.5, rjust=True), 'frac_length'), (dict(number=5, frac_length=-2, rjust=True), 'frac_length'), (dict(number=5, frac_length=3, rjust='a'), 'rjust') ] ) def test_peng_exceptions(args, name): """ Test peng function exceptions """ AI(putil.eng.peng, name, **args) @pytest.mark.parametrize( 'num, mant, rjust, ref', [ (3.0333333333, 1, False, '3.0'), (0, 3, True, ' 0.000 '), (0, 3, False, '0.000'), (125.5, 0, False, '126'), (1e-25, 3, True, ' 1.000y'), (1e-24, 3, True, ' 1.000y'), (1e-23, 3, True, ' 10.000y'), (1e-22, 3, True, ' 100.000y'), (1e-21, 3, True, ' 1.000z'), (1e-20, 3, True, ' 10.000z'), (1e-19, 3, True, ' 100.000z'), (1e-18, 3, True, ' 1.000a'), (1e-17, 3, True, ' 10.000a'), (1e-16, 3, True, ' 100.000a'), (1e-15, 3, True, ' 1.000f'), (1e-14, 3, True, ' 10.000f'), (1e-13, 3, True, ' 100.000f'), (1e-12, 3, True, ' 1.000p'), (1e-11, 3, True, ' 10.000p'), (1e-10, 3, True, ' 100.000p'), (1e-9, 3, True, ' 1.000n'), (1e-8, 3, True, ' 10.000n'), (1e-7, 3, True, ' 100.000n'), (1e-6, 3, True, ' 1.000u'), (1e-5, 3, True, ' 10.000u'), (1e-4, 3, True, ' 100.000u'), (1e-3, 3, True, ' 1.000m'), (1e-2, 3, True, ' 10.000m'), (1e-1, 3, True, ' 100.000m'), (1e-0, 3, True, ' 1.000 '), (1e+1, 3, True, ' 10.000 '), (1e+2, 3, True, ' 100.000 '), (1e+3, 3, True, ' 1.000k'), (1e+4, 3, True, ' 10.000k'), (1e+5, 3, True, ' 100.000k'), (1e+6, 3, True, ' 1.000M'), (1e+7, 3, True, ' 10.000M'), (1e+8, 3, True, ' 100.000M'), (1e+9, 3, True, ' 1.000G'), (1e+10, 3, True, ' 10.000G'), (1e+11, 3, True, ' 100.000G'), (1e+12, 3, True, ' 1.000T'), (1e+13, 3, True, ' 10.000T'), (1e+14, 3, True, ' 100.000T'), (1e+15, 3, True, ' 1.000P'), (1e+16, 3, True, ' 10.000P'), (1e+17, 3, True, ' 100.000P'), (1e+18, 3, True, ' 1.000E'), (1e+19, 3, True, ' 10.000E'), (1e+20, 3, True, ' 100.000E'), (1e+21, 3, True, ' 1.000Z'), (1e+22, 3, True, ' 10.000Z'), (1e+23, 3, True, ' 100.000Z'), (1e+24, 3, True, ' 1.000Y'), (1e+25, 3, True, ' 10.000Y'), (1e+26, 3, True, ' 100.000Y'), (1e+27, 3, True, ' 999.999Y'), (12.45, 1, True, ' 12.5 '), (998.999e3, 1, True, ' 999.0k'), (998.999e3, 1, False, '999.0k'), (999.999e3, 1, True, ' 1.0M'), (999.999e3, 1, DFLT, ' 1.0M'), (999.999e3, 1, False, '1.0M'), (0.995, 0, False, '995m'), (0.9999, 0, False, '1'), (1.9999, 0, False, '2'), (999.99, 0, False, '1k'), (9.99, 1, False, '10.0'), (5.25e3, 1, True, ' 5.3k'), (1.05e3, 0, True, ' 1k'), (-1e-25, 3, True, ' -1.000y'), (-1e-24, 3, True, ' -1.000y'), (-1e-23, 3, True, ' -10.000y'), (-1e-22, 3, True, '-100.000y'), (-1e-21, 3, True, ' -1.000z'), (-1e-20, 3, True, ' -10.000z'), (-1e-19, 3, True, '-100.000z'), (-1e-18, 3, True, ' -1.000a'), (-1e-17, 3, True, ' -10.000a'), (-1e-16, 3, True, '-100.000a'), (-1e-15, 3, True, ' -1.000f'), (-1e-14, 3, True, ' -10.000f'), (-1e-13, 3, True, '-100.000f'), (-1e-12, 3, True, ' -1.000p'), (-1e-11, 3, True, ' -10.000p'), (-1e-10, 3, True, '-100.000p'), (-1e-9, 3, True, ' -1.000n'), (-1e-8, 3, True, ' -10.000n'), (-1e-7, 3, True, '-100.000n'), (-1e-6, 3, True, ' -1.000u'), (-1e-5, 3, True, ' -10.000u'), (-1e-4, 3, True, '-100.000u'), (-1e-3, 3, True, ' -1.000m'), (-1e-2, 3, True, ' -10.000m'), (-1e-1, 3, True, '-100.000m'), (-1e-0, 3, True, ' -1.000 '), (-1e+1, 3, True, ' -10.000 '), (-1e+2, 3, True, '-100.000 '), (-1e+3, 3, True, ' -1.000k'), (-1e+4, 3, True, ' -10.000k'), (-1e+5, 3, True, '-100.000k'), (-1e+6, 3, True, ' -1.000M'), (-1e+7, 3, True, ' -10.000M'), (-1e+8, 3, True, '-100.000M'), (-1e+9, 3, True, ' -1.000G'), (-1e+10, 3, True, ' -10.000G'), (-1e+11, 3, True, '-100.000G'), (-1e+12, 3, True, ' -1.000T'), (-1e+13, 3, True, ' -10.000T'), (-1e+14, 3, True, '-100.000T'), (-1e+15, 3, True, ' -1.000P'), (-1e+16, 3, True, ' -10.000P'), (-1e+17, 3, True, '-100.000P'), (-1e+18, 3, True, ' -1.000E'), (-1e+19, 3, True, ' -10.000E'), (-1e+20, 3, True, '-100.000E'), (-1e+21, 3, True, ' -1.000Z'), (-1e+22, 3, True, ' -10.000Z'), (-1e+23, 3, True, '-100.000Z'), (-1e+24, 3, True, ' -1.000Y'), (-1e+25, 3, True, ' -10.000Y'), (-1e+26, 3, True, '-100.000Y'), (-1e+27, 3, True, '-999.999Y'), (-12.45, 1, True, ' -12.5 '), (-998.999e3, 1, True, '-999.0k'), (-998.999e3, 1, False, '-999.0k'), (-999.999e3, 1, True, ' -1.0M'), (-999.999e3, 1, DFLT, ' -1.0M'), (-999.999e3, 1, False, '-1.0M'), (-0.995, 0, False, '-995m'), (-0.9999, 0, False, '-1'), (-1.9999, 0, False, '-2'), (-999.99, 0, False, '-1k'), (-9.99, 1, False, '-10.0'), (-5.25e3, 1, True, ' -5.3k'), (-1.05e3, 0, True, ' -1k') ] ) def test_peng(num, mant, rjust, ref): """ Test peng function behavior """ obj = putil.eng.peng obj = obj if isdflt(rjust) else functools.partial(obj, rjust=rjust) assert obj(num, mant) == ref @pytest.mark.eng @pytest.mark.parametrize('arg', [None, 5, '', ' 5x', 'a5M', '- - a5M']) @pytest.mark.parametrize( 'func', [ putil.eng.peng_float, putil.eng.peng_frac, putil.eng.peng_int, putil.eng.peng_mant, putil.eng.peng_power, putil.eng.peng_suffix, ] ) def test_peng_snum_exceptions(func, arg): """ Test exceptions of functions that receive a string representing a number in engineering notation """ AI(func, 'snum', **dict(snum=arg)) @pytest.mark.parametrize( 'arg, ref', [ (putil.eng.peng(5234.567, 3, True), 5.235e3), (' 5.235k ', 5.235e3), (' -5.235k ', -5.235e3), ] ) def test_peng_float(arg, ref): """ Test peng_float function behavior """ assert putil.eng.peng_float(arg) == ref @pytest.mark.parametrize( 'arg, ref', [ (putil.eng.peng(5234.567, 6, True), 234567), (putil.eng.peng(5234, 0, True), 0) ] ) def test_peng_frac(arg, ref): """ Test peng_frac function behavior """ assert putil.eng.peng_frac(arg) == ref def test_peng_int(): """ Test peng_int function behavior """ assert putil.eng.peng_int(putil.eng.peng(5234.567, 6, True)) == 5 def test_peng_mant(): """ Test peng_mant function behavior """ assert putil.eng.peng_mant(putil.eng.peng(5234.567, 3, True)) == 5.235 def test_peng_power(): """ Test peng_power function behavior """ tup = putil.eng.peng_power(putil.eng.peng(1234.567, 3, True)) assert tup == ('k', 1000.0) assert isinstance(tup[1], float) @pytest.mark.parametrize( 'arg, ref', [ (putil.eng.peng(1, 3, True), ' '), (putil.eng.peng(-10.5e-6, 3, False), 'u') ] ) def test_peng_suffix(arg, ref): """ Test peng_suffix function behavior """ assert putil.eng.peng_suffix(arg) == ref @pytest.mark.eng @pytest.mark.parametrize( 'args, extype, name', [ (dict(suffix='X', offset=-1), RuntimeError, 'suffix'), (dict(suffix='M', offset='a'), RuntimeError, 'offset'), (dict(suffix='M', offset=20), ValueError, 'offset'), ] ) @pytest.mark.eng def test_peng_suffix_math_exceptions(args, extype, name): """ Test peng_suffix_math function exceptions """ AE(putil.eng.peng_suffix_math, extype, sarg(name), **args) @pytest.mark.parametrize('args, ref', [((' ', 3), 'G'), (('u', -2), 'p')]) def test_peng_suffix_math(args, ref): """ Test peng_suffix_math function behavior """ assert putil.eng.peng_suffix_math(*args) == ref @pytest.mark.parametrize( 'num, frac_length, exp_length, sign_always, ref', [ ('5.35E+3', DFLT, DFLT, DFLT, '5.35E+3'), (0, DFLT, DFLT, DFLT, '0E+0'), (0.1, DFLT, DFLT, DFLT, '1E-1'), (0.01, DFLT, DFLT, DFLT, '1E-2'), (0.001, DFLT, DFLT, DFLT, '1E-3'), (0.00101, DFLT, DFLT, DFLT, '1.01E-3'), (0.123456789012, DFLT, DFLT, DFLT, '1.23456789012E-1'), (1234567.89012, DFLT, DFLT, DFLT, '1.23456789012E+6'), (1, DFLT, DFLT, DFLT, '1E+0'), (20, DFLT, DFLT, DFLT, '2E+1'), (100, DFLT, DFLT, DFLT, '1E+2'), (200, DFLT, DFLT, DFLT, '2E+2'), (333, DFLT, DFLT, DFLT, '3.33E+2'), (4567, DFLT, DFLT, DFLT, '4.567E+3'), (4567.890, DFLT, DFLT, DFLT, '4.56789E+3'), (500, 3, DFLT, DFLT, '5.000E+2'), (4567.890, 8, DFLT, DFLT, '4.56789000E+3'), (99.999, 1, DFLT, DFLT, '1.0E+2'), (4567.890, DFLT, DFLT, True, '+4.56789E+3'), (500, 3, DFLT, True, '+5.000E+2'), (4567.890, 8, DFLT, True, '+4.56789000E+3'), (99.999, 1, DFLT, True, '+1.0E+2'), (500, 3, 2, True, '+5.000E+02'), (4567.890, 8, 3, True, '+4.56789000E+003'), (9999999999.999, 1, 1, True, '+1.0E+10'), (-0.1, DFLT, DFLT, DFLT, '-1E-1'), (-0.01, DFLT, DFLT, DFLT, '-1E-2'), (-0.001, DFLT, DFLT, DFLT, '-1E-3'), (-0.00101, DFLT, DFLT, DFLT, '-1.01E-3'), (-0.123456789012, DFLT, DFLT, DFLT, '-1.23456789012E-1'), (-1234567.89012, DFLT, DFLT, DFLT, '-1.23456789012E+6'), (-1, DFLT, DFLT, DFLT, '-1E+0'), (-20, DFLT, DFLT, DFLT, '-2E+1'), (-100, DFLT, DFLT, DFLT, '-1E+2'), (-200, DFLT, DFLT, DFLT, '-2E+2'), (-333, DFLT, DFLT, DFLT, '-3.33E+2'), (-4567, DFLT, DFLT, DFLT, '-4.567E+3'), (-4567.890, DFLT, DFLT, DFLT, '-4.56789E+3'), (-500, 3, DFLT, DFLT, '-5.000E+2'), (-4567.890, 8, DFLT, DFLT, '-4.56789000E+3'), (-99.999, 1, DFLT, DFLT, '-1.0E+2'), (-4567.890, DFLT, DFLT, True, '-4.56789E+3'), (-500, 3, DFLT, True, '-5.000E+2'), (-4567.890, 8, DFLT, True, '-4.56789000E+3'), (-99.999, 1, DFLT, True, '-1.0E+2'), (-500, 3, 2, True, '-5.000E+02'), (-4567.890, 8, 3, True, '-4.56789000E+003'), (-9999999999.999, 1, 1, True, '-1.0E+10'), ] ) def test_to_scientific_string(num, frac_length, exp_length, sign_always, ref): """ Test _to_scientific function behavior """ fp = functools.partial obj = putil.eng.to_scientific_string obj = obj if isdflt(frac_length) else fp(obj, frac_length=frac_length) obj = obj if isdflt(exp_length) else fp(obj, exp_length=exp_length) obj = obj if isdflt(sign_always) else fp(obj, sign_always=sign_always) assert obj(num) == ref CVECTOR = [-1+2j, 3+4j, 5+6j, 7+8j, 9-10j, 11+12j, -13+14j, 15678-16j] @pytest.mark.parametrize( 'vector, args, ref, header', [ ( None, DFLT, 'None', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], DFLT, '[ 1, 2, 3, 4, 5, 6, 7, 8 ]', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], dict(indent=20), '[ 1, 2, 3, 4, 5, 6, 7, 8 ]', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], dict(indent=20), '[ 1, 2, 3, 4, 5, 6, 7, 8 ]', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], dict(limit=True), '[ 1, 2, 3, ..., 6, 7, 8 ]', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], dict(limit=True, indent=20), '[ 1, 2, 3, ..., 6, 7, 8 ]', '' ), # Float and integer item #ref = ( ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(eng=True), '[ 1.000m, 20.000u, 300.000M, 4.000p,' ' 5.250k, -6.000n, 700.000 , 800.000m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(eng=True, indent=20), '[ 1.000m, 20.000u, 300.000M, 4.000p,' ' 5.250k, -6.000n, 700.000 , 800.000m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(limit=True, eng=True), '[ 1.000m, 20.000u, 300.000M,' ' ...,' ' -6.000n, 700.000 , 800.000m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(limit=True, eng=True, indent=20), '[ 1.000m, 20.000u, 300.000M,' ' ...,' ' -6.000n, 700.000 , 800.000m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(eng=True, frac_length=1), '[ 1.0m, 20.0u, 300.0M, 4.0p,' ' 5.3k, -6.0n, 700.0 , 800.0m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(eng=True, frac_length=1, indent=20), '[ 1.0m, 20.0u, 300.0M, 4.0p,' ' 5.3k, -6.0n, 700.0 , 800.0m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(limit=True, eng=True, frac_length=1), '[ 1.0m, 20.0u, 300.0M, ..., -6.0n, 700.0 , 800.0m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(limit=True, indent=20, eng=True, frac_length=1), '[ 1.0m, 20.0u, 300.0M, ..., -6.0n, 700.0 , 800.0m ]', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], dict(width=8), #12345678 '[ 1, 2,\n' ' 3, 4,\n' ' 5, 6,\n' ' 7, 8 ]', '' ), ( [1, 2, 3, 4, 5, 6, 7, 8], dict(width=10), '[ 1, 2, 3,\n' ' 4, 5, 6,\n' ' 7, 8 ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 8, 9], dict(width=20, eng=True, frac_length=0), '[ 1m, 20u,\n' ' 300M, 4p,\n' ' 5k, -6n,\n' ' 700 , 8 ,\n' ' 9 ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(width=30, eng=True, frac_length=1), '[ 1.0m, 20.0u, 300.0M,\n' ' 4.0p, 5.3k, -6.0n,\n' ' 700.0 , 800.0m ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 8, 9], dict(width=20, eng=True, frac_length=0, limit=True), '[ 1m,\n' ' 20u,\n' ' 300M,\n' ' ...\n' ' 700 ,\n' ' 8 ,\n' ' 9 ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 8, 9], dict(width=30, eng=True, frac_length=1, limit=True), '[ 1.0m, 20.0u, 300.0M,\n' ' ...\n' ' 700.0 , 8.0 , 9.0 ]', '' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 8, 9], dict(width=30, eng=True, frac_length=1, limit=True, indent=8), 'Vector: [ 1.0m, 20.0u, 300.0M,\n' ' ...\n' ' 700.0 , 8.0 , 9.0 ]', 'Vector: ' ), ( [1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 0.8], dict(width=30, eng=True, frac_length=1, indent=8), 'Vector: [ 1.0m, 20.0u, 300.0M,\n' ' 4.0p, 5.3k, -6.0n,\n' ' 700.0 , 800.0m ]', 'Vector: ' ), ( [ 1.23456789, 2.45678901, 3.45678901, 4.56789012, 5.67890123, 6.78901234, 7.89012345 ], dict(limit=True, width=80-22, indent=22), 'Independent variable: [ 1.23456789, 2.45678901, 3.45678901,\n' ' ...\n' ' 5.67890123, 6.78901234, 7.89012345 ]', 'Independent variable: ' ), ( [ 1.23456789, 2.45678901, 3.45678901, 4.56789012, 5.67890123, 6.78901234, 7.89012345 ], dict(width=49, indent=17), 'Independent var: [ 1.23456789, 2.45678901, 3.45678901, ' '4.56789012,\n' ' 5.67890123, 6.78901234, 7.89012345 ]', 'Independent var: ' ), # Complex items ( CVECTOR, DFLT, '[ -1+2j, 3+4j, 5+6j, 7+8j, 9-10j, 11+12j, -13+14j, 15678-16j ]', '' ), ( CVECTOR, dict(indent=20), '[ -1+2j, 3+4j, 5+6j, 7+8j, 9-10j, 11+12j, -13+14j, 15678-16j ]', '' ), ( CVECTOR, dict(limit=True), '[ -1+2j, 3+4j, 5+6j, ..., 11+12j, -13+14j, 15678-16j ]', '' ), ( CVECTOR, dict(limit=True, indent=20), '[ -1+2j, 3+4j, 5+6j, ..., 11+12j, -13+14j, 15678-16j ]', '' ), ( CVECTOR, dict(eng=True), '[ -1.000 + 2.000 j, 3.000 + 4.000 j,' ' 5.000 + 6.000 j,' ' 7.000 + 8.000 j, 9.000 - 10.000 j,' ' 11.000 + 12.000 j,' ' -13.000 + 14.000 j, 15.678k- 16.000 j ]', '' ), ( CVECTOR, dict(eng=True, indent=20), '[ -1.000 + 2.000 j, 3.000 + 4.000 j,' ' 5.000 + 6.000 j,' ' 7.000 + 8.000 j, 9.000 - 10.000 j,' ' 11.000 + 12.000 j,' ' -13.000 + 14.000 j, 15.678k- 16.000 j ]', '' ), ( CVECTOR, dict(limit=True, eng=True), '[ -1.000 + 2.000 j, 3.000 + 4.000 j,' ' 5.000 + 6.000 j,' ' ..., 11.000 + 12.000 j, -13.000 + 14.000 j,' ' 15.678k- 16.000 j ]', '' ), ( CVECTOR, dict(limit=True, eng=True, indent=20), '[ -1.000 + 2.000 j, 3.000 + 4.000 j,' ' 5.000 + 6.000 j,' ' ..., 11.000 + 12.000 j, -13.000 + 14.000 j,' ' 15.678k- 16.000 j ]', '' ), ( CVECTOR, dict(eng=True, frac_length=1), '[ -1.0 + 2.0 j, 3.0 + 4.0 j, 5.0 + 6.0 j,' ' 7.0 + 8.0 j, 9.0 - 10.0 j, 11.0 + 12.0 j,' ' -13.0 + 14.0 j, 15.7k- 16.0 j ]', '' ), ( CVECTOR, dict(eng=True, frac_length=1, indent=20), '[ -1.0 + 2.0 j, 3.0 + 4.0 j, 5.0 + 6.0 j,' ' 7.0 + 8.0 j, 9.0 - 10.0 j, 11.0 + 12.0 j,' ' -13.0 + 14.0 j, 15.7k- 16.0 j ]', '' ), ( CVECTOR, dict(limit=True, eng=True, frac_length=1), '[ -1.0 + 2.0 j, 3.0 + 4.0 j, 5.0 + 6.0 j,' ' ..., 11.0 + 12.0 j, -13.0 + 14.0 j, 15.7k- 16.0 j ]', '' ), ( CVECTOR, dict(limit=True, eng=True, frac_length=1, indent=20), '[ -1.0 + 2.0 j, 3.0 + 4.0 j, 5.0 + 6.0 j,' ' ..., 11.0 + 12.0 j, -13.0 + 14.0 j, 15.7k- 16.0 j ]', '' ), ( CVECTOR, dict(width=22), '[ -1+2j, 3+4j, 5+6j,\n' ' 7+8j, 9-10j, 11+12j,\n' ' -13+14j, 15678-16j ]', '' ), ( CVECTOR, dict(width=20), '[ -1+2j, 3+4j, 5+6j,\n' ' 7+8j, 9-10j,\n' ' 11+12j, -13+14j,\n' ' 15678-16j ]', '' ), ( CVECTOR, dict(width=29, eng=True, frac_length=0), '[ -1 + 2 j, 3 + 4 j,\n' ' 5 + 6 j, 7 + 8 j,\n' ' 9 - 10 j, 11 + 12 j,\n' ' -13 + 14 j, 16k- 16 j ]', '' ), ( CVECTOR, dict(width=37, eng=True, frac_length=1), '[ -1.0 + 2.0 j, 3.0 + 4.0 j,\n' ' 5.0 + 6.0 j, 7.0 + 8.0 j,\n' ' 9.0 - 10.0 j, 11.0 + 12.0 j,\n' ' -13.0 + 14.0 j, 15.7k- 16.0 j ]', '' ), ( CVECTOR, dict(width=16, eng=True, frac_length=0), '[ -1 + 2 j,\n' ' 3 + 4 j,\n' ' 5 + 6 j,\n' ' 7 + 8 j,\n' ' 9 - 10 j,\n' ' 11 + 12 j,\n' ' -13 + 14 j,\n' ' 16k- 16 j ]', '' ), ( CVECTOR, dict(width=16, eng=True, frac_length=0, limit=True), '[ -1 + 2 j,\n' ' 3 + 4 j,\n' ' 5 + 6 j,\n' ' ...\n' ' 11 + 12 j,\n' ' -13 + 14 j,\n' ' 16k- 16 j ]', '' ), ( CVECTOR, dict(width=56, eng=True, frac_length=1, limit=True), '[ -1.0 + 2.0 j, 3.0 + 4.0 j, 5.0 + 6.0 j,\n' ' ...\n' ' 11.0 + 12.0 j, -13.0 + 14.0 j, 15.7k- 16.0 j ]', '' ), ( CVECTOR, dict(width=64, eng=True, frac_length=1, limit=True, indent=8), 'Vector: [ -1.0 + 2.0 j, 3.0 + 4.0 j, 5.0 + 6.0 j,\n' ' ...\n' ' 11.0 + 12.0 j, -13.0 + 14.0 j, 15.7k- 16.0 j ]', 'Vector: ' ), ( CVECTOR, dict(width=20, indent=8), 'Vector: [ -1+2j, 3+4j, 5+6j,\n' ' 7+8j, 9-10j,\n' ' 11+12j, -13+14j,\n' ' 15678-16j ]', 'Vector: ' ), ( CVECTOR, dict(width=30, indent=8, limit=True), 'Vector: [ -1+2j, 3+4j, 5+6j,\n' ' ...\n' ' 11+12j, -13+14j, 15678-16j ]', 'Vector: ' ), ( CVECTOR, dict(width=20, indent=8, limit=True), 'Vector: [ -1+2j,\n' ' 3+4j,\n' ' 5+6j,\n' ' ...\n' ' 11+12j,\n' ' -13+14j,\n' ' 15678-16j ]', 'Vector: ' ), ( array( [ -0.10081675027325637-0.06910517142735251j, 0.018754229185649937+0.017142783560861786j, 0+18j ] ), DFLT, '[ -0.100816750273-0.0691051714274j, ' '0.0187542291856+0.0171427835609j, 18j ]', '' ), ( array( [ -0.10081675027325637-0.06910517142735251j, 0.018754229185649937+0.017142783560861786j, 0+18j ] ), dict(width=60, limit=True, indent=20), 'Dependent variable: [ -0.100816750273-0.0691051714274j,\n' ' 0.0187542291856+0.0171427835609j, 18j ]', 'Dependent variable: ' ), ( array( [ -0.10081675027325637-0.06910517142735251j, 0.018754229185649937+0.017142783560861786j, 0+18j, 0.118754229185649937+0.117142783560861786j, 0.218754229185649937+0.217142783560861786j, 0+28j, 10+2j, ] ), dict(width=60), '[ -0.100816750273-0.0691051714274j,\n' ' 0.0187542291856+0.0171427835609j, 18j,\n' ' 0.118754229186+0.117142783561j,\n' ' 0.218754229186+0.217142783561j, 28j, 10+2j ]', '' ), ( array( [ -0.10081675027325637-0.06910517142735251j, 0.018754229185649937+0.017142783560861786j, 0+18j, 0.118754229185649937+0.117142783560861786j, 0.218754229185649937+0.217142783560861786j, 0+28j, 10+2j, ] ), dict(width=60, limit=True), '[ -0.100816750273-0.0691051714274j,\n' ' 0.0187542291856+0.0171427835609j,\n' ' 18j,\n' ' ...\n' ' 0.218754229186+0.217142783561j,\n' ' 28j,\n' ' 10+2j ]', '' ), ] ) def test_pprint_vector(vector, args, ref, header): """ Test pprint_vector function behavior """ obj = putil.eng.pprint_vector obj = obj if isdflt(args) else functools.partial(obj, **args) CS(header+obj(vector), ref) @pytest.mark.parametrize( 'args', [ dict( vector=[1e-3, 20e-6, 300e+6, 4e-12, 5.25e3, -6e-9, 700, 8, 9], width=5, eng=True, frac_length=1, limit=True ), dict( vector=[-1+2j, 3, 5+6j, 7+8j, 9-10j, 11+12j, -13+14j, 15678-16j], width=8, limit=True ) ] ) @pytest.mark.eng def test_pprint_vector_exceptions(args): """ Test pprint_vector function exceptions """ msg = 'Argument `width` is too small' AE(putil.eng.pprint_vector, ValueError, msg, **args) @pytest.mark.parametrize( 'num, dec, ref', [ (None, DFLT, None), (1.3333, 2, 1.33), (1.5555E-12, 2, 1.56E-12), (3, 2, 3), (array([1.3333, 2.666666]), 2, array([1.33, 2.67])), (array([1.3333E-12, 2.666666E-12]), 2, array([1.33E-12, 2.67E-12])), (array([1, 3]), 2, array([1, 3])), ] ) def test_round_mantissa(num, dec, ref): """ Test round_mantissa function behavior """ obj = putil.eng.round_mantissa obj = obj if isdflt(dec) else functools.partial(obj, decimals=dec) test = obj(num) == ref assert test.all() if isinstance(num, ndarray) else test
pmacosta/putil
tests/test_eng.py
Python
mit
40,156
import logging from abc import ABCMeta, abstractmethod from collections import deque from typing import List, Union, Iterable, Sequence log = logging.getLogger(__name__) class NoSensorsFoundException(RuntimeError): pass class Controller(metaclass=ABCMeta): @abstractmethod def run(self): raise NotImplementedError @abstractmethod def enable(self): raise NotImplementedError @abstractmethod def disable(self): raise NotImplementedError @abstractmethod def valid(self) -> bool: raise NotImplementedError class InputDevice(metaclass=ABCMeta): """ Abstract class for input devices. """ def __init__(self, name): self.name = name self.values = ValueBuffer(name, 128) @abstractmethod def get_value(self) -> float: raise NotImplementedError class OutputDevice(metaclass=ABCMeta): """ Abstract class for output devices. """ def __init__(self, name): self.name = name self.values = ValueBuffer(name, 128) def set_value(self, value: Union[int, float]): self.values.update(value) @abstractmethod def apply(self): raise NotImplementedError @abstractmethod def enable(self): raise NotImplementedError @abstractmethod def disable(self): raise NotImplementedError class PassthroughController(Controller): def __init__(self, inputs=Sequence[InputDevice], outputs=Sequence[OutputDevice], speeds=None): self.inputs = list(inputs) self.outputs = list(outputs) def run(self): for idx, input_reader in enumerate(self.inputs): output = self.outputs[idx] output.name = input_reader.name output.values.name = input_reader.name output.set_value(input_reader.get_value()) output.apply() log.debug('ran loop') def apply_candidates(self): return self.outputs def enable(self): for output_dev in self.outputs: output_dev.enable() def disable(self): for output_dev in self.outputs: output_dev.disable() def valid(self) -> bool: return bool(self.inputs and self.outputs) and len(self.inputs) == len(self.outputs) class DummyInput(InputDevice): def __init__(self): super().__init__('dummy') self.temp = 0 def get_value(self): return self.temp def set_value(self, value): self.temp = value class DummyOutput(OutputDevice): def __init__(self): super().__init__('dummy') self.speed = None self.enabled = False def apply(self): if self.enabled: self.speed = round(self.values.mean()) def enable(self): self.enabled = True def disable(self): self.enabled = False def mean(seq: Iterable) -> float: if not isinstance(seq, Iterable): raise ValueError('provided sequence MUST be iterable') if not isinstance(seq, Sequence): seq = list(seq) if len(seq) == 1: return float(seq[0]) if len(seq) == 0: raise ValueError('sequence must have at least one value.') return sum(seq) / len(seq) def lerp(value: Union[float, int], input_min: Union[float, int], input_max: Union[float, int], output_min: Union[float, int], output_max: Union[float, int]) -> float: if value <= input_min: return float(output_min) if value >= input_max: return float(output_max) return (output_min * (input_max - value) + output_max * (value - input_min)) / (input_max - input_min) def lerp_range(seq: Iterable[Union[float, int]], input_min, input_max, output_min, output_max) -> List[float]: return [lerp(val, input_min, input_max, output_min, output_max) for val in seq] class ValueBuffer: def __init__(self, name, default_value=0.0): self.name = name self.buffer = deque(maxlen=32) self._default_value = default_value def update(self, value: float): self.buffer.append(value) def mean(self) -> float: try: return mean(self.buffer) except (ValueError, ZeroDivisionError): return self._default_value
vrga/pyFanController
pyfc/common.py
Python
mit
4,243
from core.himesis import Himesis, HimesisPreConditionPatternLHS import uuid class HUnitDaughter2Woman_ConnectedLHS(HimesisPreConditionPatternLHS): def __init__(self): """ Creates the himesis graph representing the AToM3 model HUnitDaughter2Woman_ConnectedLHS """ # Flag this instance as compiled now self.is_compiled = True super(HUnitDaughter2Woman_ConnectedLHS, self).__init__(name='HUnitDaughter2Woman_ConnectedLHS', num_nodes=0, edges=[]) # Add the edges self.add_edges([]) # Set the graph attributes self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule'] self["MT_constraint__"] = """return True""" self["name"] = """""" self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'HUnitDaughter2Woman_ConnectedLHS') self["equations"] = [] # Set the node attributes # match class Family(Fam) node self.add_node() self.vs[0]["MT_pre__attr1"] = """return True""" self.vs[0]["MT_label__"] = """1""" self.vs[0]["mm__"] = """MT_pre__Family""" self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Fam') # match class Child(Child) node self.add_node() self.vs[1]["MT_pre__attr1"] = """return True""" self.vs[1]["MT_label__"] = """2""" self.vs[1]["mm__"] = """MT_pre__Child""" self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Child') # match association null--daughters-->nullnode self.add_node() self.vs[2]["MT_pre__attr1"] = """return attr_value == "daughters" """ self.vs[2]["MT_label__"] = """3""" self.vs[2]["mm__"] = """MT_pre__directLink_S""" self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Famassoc2Child') # Add the edges self.add_edges([ (0,2), # match class null(Fam) -> association daughters (2,1), # association null -> match class null(Child) ]) # define evaluation methods for each match class. def eval_attr11(self, attr_value, this): return True def eval_attr12(self, attr_value, this): return True # define evaluation methods for each match association. def eval_attr13(self, attr_value, this): return attr_value == "daughters" def constraint(self, PreNode, graph): return True
levilucio/SyVOLT
ExFamToPerson/contracts/unit/HUnitDaughter2Woman_ConnectedLHS.py
Python
mit
2,107
#David Hickox #Feb 15 17 #Squares (counter update) #descriptions, description, much descripticve #variables # limit, where it stops # num, sentry variable #creates array if needbe #array = [[0 for x in range(h)] for y in range(w)] #imports date time and curency handeling because i hate string formating (this takes the place of #$%.2f"%) import locale locale.setlocale( locale.LC_ALL, '' ) #use locale.currency() for currency formating print("Welcome to the (insert name here bumbblefack) Program\n") limit = float(input("How many squares do you want? ")) num = 1 print("number\tnumber^2") while num <= limit: #prints the number and squares it then seperates by a tab print (num,num**2,sep='\t') #increments num += 1 input("\nPress Enter to Exit")
dwhickox/NCHS-Programming-1-Python-Programs
Chap 3/Squares.py
Python
mit
778
from datetime import datetime from zipfile import ZipFile from io import BytesIO from lxml import etree from docxgen import * from docxgen import nsmap from . import check_tag def test_init(): doc = Document() check_tag(doc.doc, ['document', 'body']) check_tag(doc.body, ['body']) def test_save(): doc = Document() tmp = BytesIO() doc.save(tmp) with ZipFile(tmp) as zippy: assert(zippy.testzip() is None) assert(set(zippy.namelist()) == set([ '[Content_Types].xml', '_rels/.rels', 'docProps/app.xml', 'word/fontTable.xml', 'word/numbering.xml', 'word/settings.xml', 'word/styles.xml', 'word/stylesWithEffects.xml', 'word/webSettings.xml', 'word/theme/theme1.xml', 'word/_rels/document.xml.rels', 'word/document.xml', 'docProps/core.xml' ])) zxf = zippy.open('word/document.xml') root = etree.parse(zxf) check_tag(root, 'document body'.split()) def test_load(): # assume save works d = Document() tmp = BytesIO() d.save(tmp) doc = Document.load(tmp) check_tag(doc.doc, 'document body'.split()) check_tag(doc.body, 'body'.split()) def test_dumps(): doc = Document() assert doc.dumps() def test_core_props(): doc = Document() attrs = dict(lastModifiedBy='Joe Smith', keywords=['egg', 'spam'], title='Testing Doc', subject='A boilerplate document', creator='Jill Smith', description='Core properties test.', created=datetime.now() ) doc.update(attrs) core = doc.get_core_props() for key in ('title', 'subject', 'creator', 'description'): attr = core.find('.//dc:%s' % key, namespaces=nsmap) assert attr is not None assert attr.text == attrs[key] attr = core.find('.//cp:keywords', namespaces=nsmap) assert attr is not None assert attr.text == ','.join(attrs['keywords']) attr = core.find('.//dcterms:created', namespaces=nsmap) assert attr is not None assert datetime.strptime(attr.text, '%Y-%m-%dT%H:%M:%SZ') == datetime(*attrs['created'].timetuple()[:6])
kunxi/docxgen
tests/test_docx.py
Python
mit
2,167
# -*- coding: utf-8 -*- """ 将json文件中的数据存到数据库中 """ import requests import json import os from word.models import (Word, EnDefinition, CnDefinition, Audio, Pronunciation, Example, Note) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) print(BASE_DIR) def process_data(data): data = data['data']['reviews'] print('len', len(data)) for item in data: content = item['content'] print('uk_audio', item['uk_audio']) print('us_audio', item['us_audio']) obj = Word.objects.create(content=content) if item['uk_audio']: uk_audio_filepath = save_files('uk', item['content'], item['uk_audio']) if item['us_audio']: us_audio_filepath = save_files('us', item['content'], item['us_audio']) if filepath is not None: pass Audio.objects.create(word=obj, us_audio=us_audio_filepath, uk_audio=uk_audio_filepath) def save_files(tp, word, url): filepath = '%s/media/audio/%stp/%s.mp3' % (BASE_DIR, tp, word) with open(BASE_DIR + '/media/audio/'+ tp +'/'+word+'.mp3', 'wb') as handle: response = requests.get(url, stream=True) if response.ok: # Something went wrong for block in response.iter_content(1024): handle.write(block) return filepath return None def serialize_data(file_name): """ """ with open(file_name, 'r') as f: data = json.loads(f.read()) process_data(data) # data = requests.get('', stream=True) if __name__ == "__main__": serialize_data("data1.json")
HideMode/ShanBay
data/script.py
Python
mit
1,642
from django.conf import settings from django.contrib.sites.shortcuts import get_current_site from django.core.management import call_command from django.db import models, connections, transaction from django.urls import reverse from django_tenants.clone import CloneSchema from .postgresql_backend.base import _check_schema_name from .signals import post_schema_sync, schema_needs_to_be_sync from .utils import get_creation_fakes_migrations, get_tenant_base_schema from .utils import schema_exists, get_tenant_domain_model, get_public_schema_name, get_tenant_database_alias class TenantMixin(models.Model): """ All tenant models must inherit this class. """ auto_drop_schema = False """ USE THIS WITH CAUTION! Set this flag to true on a parent class if you want the schema to be automatically deleted if the tenant row gets deleted. """ auto_create_schema = True """ Set this flag to false on a parent class if you don't want the schema to be automatically created upon save. """ schema_name = models.CharField(max_length=63, unique=True, db_index=True, validators=[_check_schema_name]) domain_url = None """ Leave this as None. Stores the current domain url so it can be used in the logs """ domain_subfolder = None """ Leave this as None. Stores the subfolder in subfolder routing was used """ _previous_tenant = [] class Meta: abstract = True def __enter__(self): """ Syntax sugar which helps in celery tasks, cron jobs, and other scripts Usage: with Tenant.objects.get(schema_name='test') as tenant: # run some code in tenant test # run some code in previous tenant (public probably) """ connection = connections[get_tenant_database_alias()] self._previous_tenant.append(connection.tenant) self.activate() return self def __exit__(self, exc_type, exc_val, exc_tb): connection = connections[get_tenant_database_alias()] connection.set_tenant(self._previous_tenant.pop()) def activate(self): """ Syntax sugar that helps at django shell with fast tenant changing Usage: Tenant.objects.get(schema_name='test').activate() """ connection = connections[get_tenant_database_alias()] connection.set_tenant(self) @classmethod def deactivate(cls): """ Syntax sugar, return to public schema Usage: test_tenant.deactivate() # or simpler Tenant.deactivate() """ connection = connections[get_tenant_database_alias()] connection.set_schema_to_public() def save(self, verbosity=1, *args, **kwargs): connection = connections[get_tenant_database_alias()] is_new = self.pk is None has_schema = hasattr(connection, 'schema_name') if has_schema and is_new and connection.schema_name != get_public_schema_name(): raise Exception("Can't create tenant outside the public schema. " "Current schema is %s." % connection.schema_name) elif has_schema and not is_new and connection.schema_name not in (self.schema_name, get_public_schema_name()): raise Exception("Can't update tenant outside it's own schema or " "the public schema. Current schema is %s." % connection.schema_name) super().save(*args, **kwargs) if has_schema and is_new and self.auto_create_schema: try: self.create_schema(check_if_exists=True, verbosity=verbosity) post_schema_sync.send(sender=TenantMixin, tenant=self.serializable_fields()) except Exception: # We failed creating the tenant, delete what we created and # re-raise the exception self.delete(force_drop=True) raise elif is_new: # although we are not using the schema functions directly, the signal might be registered by a listener schema_needs_to_be_sync.send(sender=TenantMixin, tenant=self.serializable_fields()) elif not is_new and self.auto_create_schema and not schema_exists(self.schema_name): # Create schemas for existing models, deleting only the schema on failure try: self.create_schema(check_if_exists=True, verbosity=verbosity) post_schema_sync.send(sender=TenantMixin, tenant=self.serializable_fields()) except Exception: # We failed creating the schema, delete what we created and # re-raise the exception self._drop_schema() raise def serializable_fields(self): """ in certain cases the user model isn't serializable so you may want to only send the id """ return self def _drop_schema(self, force_drop=False): """ Drops the schema""" connection = connections[get_tenant_database_alias()] has_schema = hasattr(connection, 'schema_name') if has_schema and connection.schema_name not in (self.schema_name, get_public_schema_name()): raise Exception("Can't delete tenant outside it's own schema or " "the public schema. Current schema is %s." % connection.schema_name) if has_schema and schema_exists(self.schema_name) and (self.auto_drop_schema or force_drop): self.pre_drop() cursor = connection.cursor() cursor.execute('DROP SCHEMA "%s" CASCADE' % self.schema_name) def pre_drop(self): """ This is a routine which you could override to backup the tenant schema before dropping. :return: """ def delete(self, force_drop=False, *args, **kwargs): """ Deletes this row. Drops the tenant's schema if the attribute auto_drop_schema set to True. """ self._drop_schema(force_drop) super().delete(*args, **kwargs) def create_schema(self, check_if_exists=False, sync_schema=True, verbosity=1): """ Creates the schema 'schema_name' for this tenant. Optionally checks if the schema already exists before creating it. Returns true if the schema was created, false otherwise. """ # safety check connection = connections[get_tenant_database_alias()] _check_schema_name(self.schema_name) cursor = connection.cursor() if check_if_exists and schema_exists(self.schema_name): return False fake_migrations = get_creation_fakes_migrations() if sync_schema: if fake_migrations: # copy tables and data from provided model schema base_schema = get_tenant_base_schema() clone_schema = CloneSchema() clone_schema.clone_schema(base_schema, self.schema_name) call_command('migrate_schemas', tenant=True, fake=True, schema_name=self.schema_name, interactive=False, verbosity=verbosity) else: # create the schema cursor.execute('CREATE SCHEMA "%s"' % self.schema_name) call_command('migrate_schemas', tenant=True, schema_name=self.schema_name, interactive=False, verbosity=verbosity) connection.set_schema_to_public() def get_primary_domain(self): """ Returns the primary domain of the tenant """ try: domain = self.domains.get(is_primary=True) return domain except get_tenant_domain_model().DoesNotExist: return None def reverse(self, request, view_name): """ Returns the URL of this tenant. """ http_type = 'https://' if request.is_secure() else 'http://' domain = get_current_site(request).domain url = ''.join((http_type, self.schema_name, '.', domain, reverse(view_name))) return url def get_tenant_type(self): """ Get the type of tenant. Will only work for multi type tenants :return: str """ return getattr(self, settings.MULTI_TYPE_DATABASE_FIELD) class DomainMixin(models.Model): """ All models that store the domains must inherit this class """ domain = models.CharField(max_length=253, unique=True, db_index=True) tenant = models.ForeignKey(settings.TENANT_MODEL, db_index=True, related_name='domains', on_delete=models.CASCADE) # Set this to true if this is the primary domain is_primary = models.BooleanField(default=True, db_index=True) @transaction.atomic def save(self, *args, **kwargs): # Get all other primary domains with the same tenant domain_list = self.__class__.objects.filter(tenant=self.tenant, is_primary=True).exclude(pk=self.pk) # If we have no primary domain yet, set as primary domain by default self.is_primary = self.is_primary or (not domain_list.exists()) if self.is_primary: # Remove primary status of existing domains for tenant domain_list.update(is_primary=False) super().save(*args, **kwargs) class Meta: abstract = True
tomturner/django-tenants
django_tenants/models.py
Python
mit
9,732
from nintendo import dauth, switch from anynet import http import pytest CHALLENGE_REQUEST = \ "POST /v6/challenge HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 17\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "key_generation=11" TOKEN_REQUEST = \ "POST /v6/device_auth_token HTTP/1.1\r\n" \ "Host: 127.0.0.1:12345\r\n" \ "User-Agent: libcurl (nnDauth; 16f4553f-9eee-4e39-9b61-59bc7c99b7c8; SDK 12.3.0.0)\r\n" \ "Accept: */*\r\n" \ "X-Nintendo-PowerState: FA\r\n" \ "Content-Length: 211\r\n" \ "Content-Type: application/x-www-form-urlencoded\r\n\r\n" \ "challenge=vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=&" \ "client_id=8f849b5d34778d8e&ist=false&key_generation=11&" \ "system_version=CusHY#000c0000#C-BynYNPXdQJNBZjx02Hizi8lRUSIKLwPGa5p8EY1uo=&" \ "mac=xRB_6mgnNqrnF9DRsEpYMg" @pytest.mark.anyio async def test_dauth(): async def handler(client, request): if request.path == "/v6/challenge": assert request.encode().decode() == CHALLENGE_REQUEST response = http.HTTPResponse(200) response.json = { "challenge": "vaNgVZZH7gUse0y3t8Cksuln-TAVtvBmcD-ow59qp0E=", "data": "dlL7ZBNSLmYo1hUlKYZiUA==" } return response else: assert request.encode().decode() == TOKEN_REQUEST response = http.HTTPResponse(200) response.json = { "device_auth_token": "device token" } return response async with http.serve(handler, "127.0.0.1", 12345): keys = switch.KeySet() keys["aes_kek_generation_source"] = bytes.fromhex("485d45ad27c07c7e538c0183f90ee845") keys["master_key_0a"] = bytes.fromhex("37eed242e0f2ce6f8371e783c1a6a0ae") client = dauth.DAuthClient(keys) client.set_url("127.0.0.1:12345") client.set_system_version(1200) client.set_context(None) response = await client.device_token(client.BAAS) token = response["device_auth_token"] assert token == "device token"
Kinnay/NintendoClients
tests/test_dauth.py
Python
mit
2,095
#!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test addressindex generation and fetching # import binascii from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch from test_framework.script import CScript, OP_CHECKSIG, OP_DUP, OP_EQUAL, OP_EQUALVERIFY, OP_HASH160 from test_framework.util import assert_equal, connect_nodes class AddressIndexTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 4 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): self.add_nodes(self.num_nodes) # Nodes 0/1 are "wallet" nodes self.start_node(0, []) self.start_node(1, ["-addressindex"]) # Nodes 2/3 are used for testing self.start_node(2, ["-addressindex"]) self.start_node(3, ["-addressindex"]) connect_nodes(self.nodes[0], 1) connect_nodes(self.nodes[0], 2) connect_nodes(self.nodes[0], 3) self.sync_all() def run_test(self): self.log.info("Test that settings can't be changed without -reindex...") self.stop_node(1) self.nodes[1].assert_start_raises_init_error(["-addressindex=0"], "You need to rebuild the database using -reindex to change -addressindex", match=ErrorMatch.PARTIAL_REGEX) self.start_node(1, ["-addressindex=0", "-reindex"]) connect_nodes(self.nodes[0], 1) self.sync_all() self.stop_node(1) self.nodes[1].assert_start_raises_init_error(["-addressindex"], "You need to rebuild the database using -reindex to change -addressindex", match=ErrorMatch.PARTIAL_REGEX) self.start_node(1, ["-addressindex", "-reindex"]) connect_nodes(self.nodes[0], 1) self.sync_all() self.log.info("Mining blocks...") mining_address = self.nodes[0].getnewaddress() self.nodes[0].generatetoaddress(105, mining_address) self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) # Check that balances are correct balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") balance_mining = self.nodes[1].getaddressbalance(mining_address) assert_equal(balance0["balance"], 0) assert_equal(balance_mining["balance"], 105 * 500 * COIN) assert_equal(balance_mining["balance_immature"], 100 * 500 * COIN) assert_equal(balance_mining["balance_spendable"], 5 * 500 * COIN) # Check p2pkh and p2sh address indexes self.log.info("Testing p2pkh and p2sh address index...") txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10) self.nodes[0].generate(1) txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10) self.nodes[0].generate(1) txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15) self.nodes[0].generate(1) txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15) self.nodes[0].generate(1) txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20) self.nodes[0].generate(1) txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20) self.nodes[0].generate(1) self.sync_all() txids = self.nodes[1].getaddresstxids("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4") assert_equal(len(txids), 3) assert_equal(txids[0], txid0) assert_equal(txids[1], txid1) assert_equal(txids[2], txid2) txidsb = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsb), 3) assert_equal(txidsb[0], txidb0) assert_equal(txidsb[1], txidb1) assert_equal(txidsb[2], txidb2) # Check that limiting by height works self.log.info("Testing querying txids by range of block heights..") height_txids = self.nodes[1].getaddresstxids({ "addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB"], "start": 105, "end": 110 }) assert_equal(len(height_txids), 2) assert_equal(height_txids[0], txidb0) assert_equal(height_txids[1], txidb1) # Check that multiple addresses works multitxids = self.nodes[1].getaddresstxids({"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", "yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4"]}) assert_equal(len(multitxids), 6) assert_equal(multitxids[0], txid0) assert_equal(multitxids[1], txidb0) assert_equal(multitxids[2], txid1) assert_equal(multitxids[3], txidb1) assert_equal(multitxids[4], txid2) assert_equal(multitxids[5], txidb2) # Check that balances are correct balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 45 * 100000000) # Check that outputs with the same address will only return one txid self.log.info("Testing for txid uniqueness...") addressHash = binascii.unhexlify("FE30B718DCF0BF8A2A686BF1820C073F8B2C3B37") scriptPubKey = CScript([OP_HASH160, addressHash, OP_EQUAL]) unspent = self.nodes[0].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] tx.vout = [CTxOut(10 * COIN, scriptPubKey), CTxOut(11 * COIN, scriptPubKey)] tx.rehash() signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.nodes[0].generate(1) self.sync_all() txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsmany), 4) assert_equal(txidsmany[3], sent_txid) # Check that balances are correct self.log.info("Testing balances...") balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], (45 + 21) * 100000000) # Check that balances are correct after spending self.log.info("Testing balances after spending...") privkey2 = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc" address2 = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3" addressHash2 = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D") scriptPubKey2 = CScript([OP_DUP, OP_HASH160, addressHash2, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].importprivkey(privkey2) unspent = self.nodes[0].listunspent() tx = CTransaction() tx_fee_sat = 1000 tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] amount = int(unspent[0]["amount"] * 100000000) - tx_fee_sat tx.vout = [CTxOut(amount, scriptPubKey2)] tx.rehash() signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.nodes[0].generate(1) self.sync_all() balance1 = self.nodes[1].getaddressbalance(address2) assert_equal(balance1["balance"], amount) tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(spending_txid, 16), 0))] send_amount = 1 * 100000000 + 12840 change_amount = amount - send_amount - 10000 tx.vout = [CTxOut(change_amount, scriptPubKey2), CTxOut(send_amount, scriptPubKey)] tx.rehash() signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.nodes[0].generate(1) self.sync_all() balance2 = self.nodes[1].getaddressbalance(address2) assert_equal(balance2["balance"], change_amount) # Check that deltas are returned correctly deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 0, "end": 200}) balance3 = 0 for delta in deltas: balance3 += delta["satoshis"] assert_equal(balance3, change_amount) assert_equal(deltas[0]["address"], address2) assert_equal(deltas[0]["blockindex"], 1) # Check that entire range will be queried deltasAll = self.nodes[1].getaddressdeltas({"addresses": [address2]}) assert_equal(len(deltasAll), len(deltas)) # Check that deltas can be returned from range of block heights deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 113, "end": 113}) assert_equal(len(deltas), 1) # Check that unspent outputs can be queried self.log.info("Testing utxos...") utxos = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos), 1) assert_equal(utxos[0]["satoshis"], change_amount) # Check that indexes will be updated with a reorg self.log.info("Testing reorg...") best_hash = self.nodes[0].getbestblockhash() self.nodes[0].invalidateblock(best_hash) self.nodes[1].invalidateblock(best_hash) self.nodes[2].invalidateblock(best_hash) self.nodes[3].invalidateblock(best_hash) # Allow some time for the reorg to start self.bump_mocktime(2) self.sync_all() balance4 = self.nodes[1].getaddressbalance(address2) assert_equal(balance4, balance1) utxos2 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos2), 1) assert_equal(utxos2[0]["satoshis"], amount) # Check sorting of utxos self.nodes[2].generate(150) self.nodes[2].sendtoaddress(address2, 50) self.nodes[2].generate(1) self.nodes[2].sendtoaddress(address2, 50) self.nodes[2].generate(1) self.sync_all() utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos3), 3) assert_equal(utxos3[0]["height"], 114) assert_equal(utxos3[1]["height"], 264) assert_equal(utxos3[2]["height"], 265) # Check mempool indexing self.log.info("Testing mempool indexing...") privKey3 = "cRyrMvvqi1dmpiCmjmmATqjAwo6Wu7QTjKu1ABMYW5aFG4VXW99K" address3 = "yWB15aAdpeKuSaQHFVJpBDPbNSLZJSnDLA" addressHash3 = binascii.unhexlify("6C186B3A308A77C779A9BB71C3B5A7EC28232A13") scriptPubKey3 = CScript([OP_DUP, OP_HASH160, addressHash3, OP_EQUALVERIFY, OP_CHECKSIG]) # address4 = "2N8oFVB2vThAKury4vnLquW2zVjsYjjAkYQ" scriptPubKey4 = CScript([OP_HASH160, addressHash3, OP_EQUAL]) unspent = self.nodes[2].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] amount = int(unspent[0]["amount"] * 100000000) - tx_fee_sat tx.vout = [CTxOut(amount, scriptPubKey3)] tx.rehash() signed_tx = self.nodes[2].signrawtransactionwithwallet(tx.serialize().hex()) memtxid1 = self.nodes[2].sendrawtransaction(signed_tx["hex"], 0) self.bump_mocktime(2) tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(int(unspent[1]["txid"], 16), unspent[1]["vout"]))] amount = int(unspent[1]["amount"] * 100000000) - tx_fee_sat tx2.vout = [ CTxOut(int(amount / 4), scriptPubKey3), CTxOut(int(amount / 4), scriptPubKey3), CTxOut(int(amount / 4), scriptPubKey4), CTxOut(int(amount / 4), scriptPubKey4) ] tx2.rehash() signed_tx2 = self.nodes[2].signrawtransactionwithwallet(tx2.serialize().hex()) memtxid2 = self.nodes[2].sendrawtransaction(signed_tx2["hex"], 0) self.bump_mocktime(2) mempool = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool), 3) assert_equal(mempool[0]["txid"], memtxid1) assert_equal(mempool[0]["address"], address3) assert_equal(mempool[0]["index"], 0) assert_equal(mempool[1]["txid"], memtxid2) assert_equal(mempool[1]["index"], 0) assert_equal(mempool[2]["txid"], memtxid2) assert_equal(mempool[2]["index"], 1) self.nodes[2].generate(1) self.sync_all() mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool2), 0) tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(memtxid2, 16), 0)), CTxIn(COutPoint(int(memtxid2, 16), 1)) ] tx.vout = [CTxOut(int(amount / 2 - 10000), scriptPubKey2)] tx.rehash() self.nodes[2].importprivkey(privKey3) signed_tx3 = self.nodes[2].signrawtransactionwithwallet(tx.serialize().hex()) self.nodes[2].sendrawtransaction(signed_tx3["hex"], 0) self.bump_mocktime(2) mempool3 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool3), 2) assert_equal(mempool3[0]["prevtxid"], memtxid2) assert_equal(mempool3[0]["prevout"], 0) assert_equal(mempool3[1]["prevtxid"], memtxid2) assert_equal(mempool3[1]["prevout"], 1) # sending and receiving to the same address privkey1 = "cMvZn1pVWntTEcsK36ZteGQXRAcZ8CoTbMXF1QasxBLdnTwyVQCc" address1 = "yM9Eed1bxjy7tYxD3yZDHxjcVT48WdRoB1" address1hash = binascii.unhexlify("0909C84A817651502E020AAD0FBCAE5F656E7D8A") address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].sendtoaddress(address1, 10) self.nodes[0].generate(1) self.sync_all() utxos = self.nodes[1].getaddressutxos({"addresses": [address1]}) assert_equal(len(utxos), 1) tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["outputIndex"])) ] amount = int(utxos[0]["satoshis"] - 10000) tx.vout = [CTxOut(amount, address1script)] tx.rehash() self.nodes[0].importprivkey(privkey1) signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.sync_all() mempool_deltas = self.nodes[2].getaddressmempool({"addresses": [address1]}) assert_equal(len(mempool_deltas), 2) self.log.info("Passed") if __name__ == '__main__': AddressIndexTest().main()
thelazier/dash
test/functional/feature_addressindex.py
Python
mit
15,001
import xml.etree.ElementTree as ET import numpy as np import openmc import pytest from tests.unit_tests import assert_unbounded def test_basic(): c1 = openmc.Cell() c2 = openmc.Cell() c3 = openmc.Cell() u = openmc.Universe(name='cool', cells=(c1, c2, c3)) assert u.name == 'cool' cells = set(u.cells.values()) assert not (cells ^ {c1, c2, c3}) # Test __repr__ repr(u) with pytest.raises(TypeError): u.add_cell(openmc.Material()) with pytest.raises(TypeError): u.add_cells(c1) u.remove_cell(c3) cells = set(u.cells.values()) assert not (cells ^ {c1, c2}) u.clear_cells() assert not set(u.cells) def test_bounding_box(): cyl1 = openmc.ZCylinder(r=1.0) cyl2 = openmc.ZCylinder(r=2.0) c1 = openmc.Cell(region=-cyl1) c2 = openmc.Cell(region=+cyl1 & -cyl2) u = openmc.Universe(cells=[c1, c2]) ll, ur = u.bounding_box assert ll == pytest.approx((-2., -2., -np.inf)) assert ur == pytest.approx((2., 2., np.inf)) u = openmc.Universe() assert_unbounded(u) def test_plot(run_in_tmpdir, sphere_model): m = sphere_model.materials[0] univ = sphere_model.geometry.root_universe colors = {m: 'limegreen'} for basis in ('xy', 'yz', 'xz'): univ.plot( basis=basis, pixels=(10, 10), color_by='material', colors=colors, ) def test_get_nuclides(uo2): c = openmc.Cell(fill=uo2) univ = openmc.Universe(cells=[c]) nucs = univ.get_nuclides() assert nucs == ['U235', 'O16'] def test_cells(): cells = [openmc.Cell() for i in range(5)] cells2 = [openmc.Cell() for i in range(3)] cells[0].fill = openmc.Universe(cells=cells2) u = openmc.Universe(cells=cells) assert not (set(u.cells.values()) ^ set(cells)) all_cells = set(u.get_all_cells().values()) assert not (all_cells ^ set(cells + cells2)) def test_get_all_materials(cell_with_lattice): cells, mats, univ, lattice = cell_with_lattice test_mats = set(univ.get_all_materials().values()) assert not (test_mats ^ set(mats)) def test_get_all_universes(): c1 = openmc.Cell() u1 = openmc.Universe(cells=[c1]) c2 = openmc.Cell() u2 = openmc.Universe(cells=[c2]) c3 = openmc.Cell(fill=u1) c4 = openmc.Cell(fill=u2) u3 = openmc.Universe(cells=[c3, c4]) univs = set(u3.get_all_universes().values()) assert not (univs ^ {u1, u2}) def test_create_xml(cell_with_lattice): cells = [openmc.Cell() for i in range(5)] u = openmc.Universe(cells=cells) geom = ET.Element('geom') u.create_xml_subelement(geom) cell_elems = geom.findall('cell') assert len(cell_elems) == len(cells) assert all(c.get('universe') == str(u.id) for c in cell_elems) assert not (set(c.get('id') for c in cell_elems) ^ set(str(c.id) for c in cells))
wbinventor/openmc
tests/unit_tests/test_universe.py
Python
mit
2,900
from datetime import datetime from flask import Blueprint, render_template from flask_cas import login_required from timecard.api import current_period_start from timecard.models import config, admin_required admin_views = Blueprint('admin', __name__, url_prefix='/admin', template_folder='templates') @admin_views.route('/') @admin_views.route('/users', methods=['GET']) @login_required @admin_required def admin_users_page(): return render_template( 'admin_users.html', initial_date=datetime.now().isoformat(), # now, in server's time zone initial_period_start=current_period_start().isoformat(), period_duration=config['period_duration'], lock_date=config['lock_date'], ) @admin_views.route('/settings') @login_required @admin_required def admin_settings_page(): return render_template( 'admin_settings.html' )
justinbot/timecard
timecard/admin/views.py
Python
mit
885
""" Django settings for keyman project. Generated by 'django-admin startproject' using Django 1.11.7. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '$-2ijwgs8-3i*r#j@1ian5xrp+17)fz)%cdjjhwa#4x&%lk7v@' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'keys', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'keyman.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'keyman.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/'
htlcnn/pyrevitscripts
HTL.tab/Test.panel/Test.pushbutton/keyman/keyman/keyman/settings.py
Python
mit
3,109
from cryptography.hazmat import backends from cryptography.hazmat.primitives.asymmetric import ec, dsa, rsa # Crypto and Cryptodome have same API if random(): from Crypto.PublicKey import DSA from Crypto.PublicKey import RSA else: from Cryptodome.PublicKey import DSA from Cryptodome.PublicKey import RSA RSA_WEAK = 1024 RSA_OK = 2048 RSA_STRONG = 3076 DSA_WEAK = 1024 DSA_OK = 2048 DSA_STRONG = 3076 BIG = 10000 EC_WEAK = ec.SECT163K1() # has key size of 163 EC_OK = ec.SECP224R1() EC_STRONG = ec.SECP384R1() EC_BIG = ec.SECT571R1() dsa_gen_key = dsa.generate_private_key ec_gen_key = ec.generate_private_key rsa_gen_key = rsa.generate_private_key # Strong and OK keys. dsa_gen_key(key_size=DSA_OK) dsa_gen_key(key_size=DSA_STRONG) dsa_gen_key(key_size=BIG) ec_gen_key(curve=EC_OK) ec_gen_key(curve=EC_STRONG) ec_gen_key(curve=EC_BIG) rsa_gen_key(public_exponent=65537, key_size=RSA_OK) rsa_gen_key(public_exponent=65537, key_size=RSA_STRONG) rsa_gen_key(public_exponent=65537, key_size=BIG) DSA.generate(bits=RSA_OK) DSA.generate(bits=RSA_STRONG) RSA.generate(bits=RSA_OK) RSA.generate(bits=RSA_STRONG) dsa_gen_key(DSA_OK) dsa_gen_key(DSA_STRONG) dsa_gen_key(BIG) ec_gen_key(EC_OK) ec_gen_key(EC_STRONG) ec_gen_key(EC_BIG) rsa_gen_key(65537, RSA_OK) rsa_gen_key(65537, RSA_STRONG) rsa_gen_key(65537, BIG) DSA.generate(DSA_OK) DSA.generate(DSA_STRONG) RSA.generate(RSA_OK) RSA.generate(RSA_STRONG) # Weak keys dsa_gen_key(DSA_WEAK) ec_gen_key(EC_WEAK) rsa_gen_key(65537, RSA_WEAK) dsa_gen_key(key_size=DSA_WEAK) ec_gen_key(curve=EC_WEAK) rsa_gen_key(65537, key_size=RSA_WEAK) DSA.generate(DSA_WEAK) RSA.generate(RSA_WEAK) # ------------------------------------------------------------------------------ # Through function calls def make_new_rsa_key_weak(bits): return RSA.generate(bits) # NOT OK make_new_rsa_key_weak(RSA_WEAK) def make_new_rsa_key_strong(bits): return RSA.generate(bits) # OK make_new_rsa_key_strong(RSA_STRONG) def only_used_by_test(bits): # Although this call will technically not be ok, since it's only used in a test, we don't want to alert on it. return RSA.generate(bits)
github/codeql
python/ql/test/query-tests/Security/CWE-326-WeakCryptoKey/weak_crypto.py
Python
mit
2,155
import re import urllib import hashlib from django import template from django.conf import settings URL_RE = re.compile(r'^https?://([-\w\.]+)+(:\d+)?(/([\w/_\.]*(\?\S+)?)?)?', re.IGNORECASE) EMAIL_RE = re.compile(r'^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$', re.IGNORECASE) GRAVATAR_URL_PREFIX = 'http://www.gravatar.com/avatar/' DEFAULT_PARAMS = \ { # api_key: (gravatar_key, value), 'size': ('s', 80), # value is in [1,512] 'rating': ('r', 'g'), # 'pg', 'r', or 'x' 'default': ('d', ''), # 'identicon', 'monsterid', 'wavatar', '404', or escaped URI } register = template.Library() def _build_gravatar_url(email, params): """Generate a Gravatar URL. """ # step 1: get a hex hash of the email address email = email.strip().lower().encode('utf-8') if not EMAIL_RE.match(email): return '' email_hash = hashlib.md5(email).hexdigest() # step 2a: build a canonized parameters dictionary if not type(params).__name__ == 'dict': params = params.__dict__ actual_params = {} default_keys = DEFAULT_PARAMS.keys() for key, value in params.items(): if key in default_keys: k, default_value = DEFAULT_PARAMS[key] # skip parameters whose values are defaults, # assume these values are mirroring Gravatar's defaults if value != default_value: actual_params[k] = value # step 2b: validate the canonized parameters dictionary # silently drop parameter when the value is not valid for key, value in actual_params.items(): if key == 's': if value < 1 or value > 512: del actual_params[key] elif key == 'r': if value.lower() not in ('g', 'pg', 'r', 'x'): del actual_params[key] # except when the parameter key is 'd': replace with 'identicon' elif key == 'd': if value.lower() not in ('identicon', 'monsterid', 'wavatar', '404'): if not URL_RE.match(value): # if not a valid URI del actual_params[key] else: # valid URI, encode it actual_params[key] = value # urlencode will encode it later # step 3: encode params params_encode = urllib.urlencode(actual_params) # step 4: form the gravatar url gravatar_url = GRAVATAR_URL_PREFIX + email_hash if params_encode: gravatar_url += '?' + params_encode return gravatar_url class GravatarURLNode(template.Node): def __init__(self, email, params): self.email = email self.params = params def render(self, context): try: if self.params: params = template.Variable(self.params).resolve(context) else: params = {} # try matching an address string literal email_literal = self.email.strip().lower() if EMAIL_RE.match(email_literal): email = email_literal # treat as a variable else: email = template.Variable(self.email).resolve(context) except template.VariableDoesNotExist: return '' # now, we generate the gravatar url return _build_gravatar_url(email, params) @register.tag(name="gravatar_url") def get_gravatar_url(parser, token): """For template tag: {% gravatar_url <email> <params> %} Where <params> is an object or a dictionary (variable), and <email> is a string object (variable) or a string (literal). """ try: tag_name, email, params = token.split_contents() except ValueError: try: tag_name, email = token.split_contents() params = None except ValueError: raise template.TemplateSyntaxError('%r tag requires one or two arguments.' % token.contents.split()[0]) # if email is quoted, parse as a literal string if email[0] in ('"', "'") or email[-1] in ('"', "'"): if email[0] == email[-1]: email = email[1:-1] else: raise template.TemplateSyntaxError( "%r tag's first argument is in unbalanced quotes." % tag_name) return GravatarURLNode(email, params)
santa4nt/django-gravatar
django_gravatar/templatetags/gravatar_tags.py
Python
mit
4,346
"""Test.""" import pytest TM_TABLE = [ ([0, 1, 1, 0, 1], True), ([0], True), ([1], False), ([0, 1, 0, 0], False), ] @pytest.mark.parametrize("n, result", TM_TABLE) def test_is_thue_morse(n, result): """Test.""" from is_thue_morse import is_thue_morse assert is_thue_morse(n) == result
rrustia/code-katas
src/test_is_thue_morse.py
Python
mit
318
from fastapi.testclient import TestClient from docs_src.request_files.tutorial001 import app client = TestClient(app) openapi_schema = { "openapi": "3.0.2", "info": {"title": "FastAPI", "version": "0.1.0"}, "paths": { "/files/": { "post": { "responses": { "200": { "description": "Successful Response", "content": {"application/json": {"schema": {}}}, }, "422": { "description": "Validation Error", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } }, }, }, "summary": "Create File", "operationId": "create_file_files__post", "requestBody": { "content": { "multipart/form-data": { "schema": { "$ref": "#/components/schemas/Body_create_file_files__post" } } }, "required": True, }, } }, "/uploadfile/": { "post": { "responses": { "200": { "description": "Successful Response", "content": {"application/json": {"schema": {}}}, }, "422": { "description": "Validation Error", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } }, }, }, "summary": "Create Upload File", "operationId": "create_upload_file_uploadfile__post", "requestBody": { "content": { "multipart/form-data": { "schema": { "$ref": "#/components/schemas/Body_create_upload_file_uploadfile__post" } } }, "required": True, }, } }, }, "components": { "schemas": { "Body_create_upload_file_uploadfile__post": { "title": "Body_create_upload_file_uploadfile__post", "required": ["file"], "type": "object", "properties": { "file": {"title": "File", "type": "string", "format": "binary"} }, }, "Body_create_file_files__post": { "title": "Body_create_file_files__post", "required": ["file"], "type": "object", "properties": { "file": {"title": "File", "type": "string", "format": "binary"} }, }, "ValidationError": { "title": "ValidationError", "required": ["loc", "msg", "type"], "type": "object", "properties": { "loc": { "title": "Location", "type": "array", "items": {"type": "string"}, }, "msg": {"title": "Message", "type": "string"}, "type": {"title": "Error Type", "type": "string"}, }, }, "HTTPValidationError": { "title": "HTTPValidationError", "type": "object", "properties": { "detail": { "title": "Detail", "type": "array", "items": {"$ref": "#/components/schemas/ValidationError"}, } }, }, } }, } def test_openapi_schema(): response = client.get("/openapi.json") assert response.status_code == 200, response.text assert response.json() == openapi_schema file_required = { "detail": [ { "loc": ["body", "file"], "msg": "field required", "type": "value_error.missing", } ] } def test_post_form_no_body(): response = client.post("/files/") assert response.status_code == 422, response.text assert response.json() == file_required def test_post_body_json(): response = client.post("/files/", json={"file": "Foo"}) assert response.status_code == 422, response.text assert response.json() == file_required def test_post_file(tmp_path): path = tmp_path / "test.txt" path.write_bytes(b"<file content>") client = TestClient(app) with path.open("rb") as file: response = client.post("/files/", files={"file": file}) assert response.status_code == 200, response.text assert response.json() == {"file_size": 14} def test_post_large_file(tmp_path): default_pydantic_max_size = 2 ** 16 path = tmp_path / "test.txt" path.write_bytes(b"x" * (default_pydantic_max_size + 1)) client = TestClient(app) with path.open("rb") as file: response = client.post("/files/", files={"file": file}) assert response.status_code == 200, response.text assert response.json() == {"file_size": default_pydantic_max_size + 1} def test_post_upload_file(tmp_path): path = tmp_path / "test.txt" path.write_bytes(b"<file content>") client = TestClient(app) with path.open("rb") as file: response = client.post("/uploadfile/", files={"file": file}) assert response.status_code == 200, response.text assert response.json() == {"filename": "test.txt"}
tiangolo/fastapi
tests/test_tutorial/test_request_files/test_tutorial001.py
Python
mit
6,215
import matplotlib.pyplot as plt import numpy as np def logHist(X, N=30,fig=None, noclear=False, pdf=False, **kywds): ''' Plot logarithmic histogram or probability density function from sampled data. Args: X (numpy.ndarray): 1-D array of sampled values N (Optional[int]): Number of bins (default 30) fig (Optional[int]): Figure number (default None) noclear (Optioanl[bool]): Clear figure (default False) pdf (Optional[bool]): If True normalize by bin width (default False) and display as curve instead of bar chart. Note: results are always normalized by number of samples **kywds: Arbitrary keyword arguments passed to matplotlib.pyplot.bar (or matplotlib.pyplot.semilogx if pdf is True) Returns: x (ndarray): abscissa values of frequencies n (ndarray): (normalized) frequency values ''' x = np.logspace(np.log10(np.min(X)),np.log10(np.max(X)),N+1) n,x = np.histogram(X,bins=x) n = n/float(X.size) plt.figure(fig) if not noclear: plt.clf() if pdf: n /= np.diff(x) x = x[:-1]+np.diff(x)/2 plt.semilogx(x,n,**kywds) else: plt.bar(x[:len(x)-1],n,width=np.diff(x),**kywds) a = plt.gca() a.set_xlim(10.**np.floor(np.log10(np.min(X))),10.**np.ceil(np.log10(np.max(X)))) a.set_xscale('log') plt.axis() return x,n
dsavransky/miscpy
miscpy/PlotFun/logHist.py
Python
mit
1,435
from __future__ import unicode_literals from django.utils import six import pytest from nicedjango.utils.compact_csv import CsvReader @pytest.fixture def stream(): csv = b'''"a\xc2\x96b\\"c'd\\re\\nf,g\\\\",1,NULL,""\n'''.decode('utf-8') return six.StringIO(csv) def test_reader_raw(stream): r = CsvReader(stream, replacements=(), preserve_quotes=True, symbols=(), replace_digits=False) assert list(r) == [['''"a\x96b\\"c'd\\re\\nf,g\\\\"''', '1', 'NULL', '""']] def test_reader_none(stream): r = CsvReader(stream, replacements=(), preserve_quotes=True, replace_digits=False) assert list(r) == [['''"a\x96b\\"c'd\\re\\nf,g\\\\"''', '1', None, '""']] def test_reader_quotes(stream): r = CsvReader(stream, replacements=(), replace_digits=False) assert list(r) == [['''a\x96b\\"c'd\\re\\nf,g\\\\''', '1', None, '']] def test_reader_replace(stream): r = CsvReader(stream, replace_digits=False) assert list(r) == [['''a\x96b"c'd\re\nf,g\\''', '1', None, '']] def test_reader_replace_digit(stream): r = CsvReader(stream) assert list(r) == [['''a\x96b"c'd\re\nf,g\\''', 1, None, '']]
katakumpo/nicedjango
tests/test_compact_csv_reader.py
Python
mit
1,175
""" WSGI config for readbacks project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "readbacks.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
argybarg/readbacks
readbacks/wsgi.py
Python
mit
393
from pxl_object import PxlObject from pxl_vector import PxlVector class PxlSprite(PxlObject): def __init__(self, size, position): pass
desk467/pyxel
src/pxl_sprite.py
Python
mit
150
""" Test suite for the embedded <script> extraction """ from BeautifulSoup import BeautifulSoup from nose.tools import raises, eq_ from csxj.datasources.parser_tools import media_utils from csxj.datasources.parser_tools import twitter_utils from tests.datasources.parser_tools import test_twitter_utils def make_soup(html_data): return BeautifulSoup(html_data) class TestMediaUtils(object): def setUp(self): self.netloc = 'foo.com' self.internal_sites = {} def test_embedded_script(self): """ The embedded <script> extraction works on a simple embedded script with <noscript> fallback """ html_data = """ <div> <script src='http://bar.com/some_widget.js'> </script> * <noscript> <a href='http://bar.com/some_resource'>Disabled JS, go here</a> </noscript> </div> """ soup = make_soup(html_data) tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites) eq_(tagged_URL.URL, "http://bar.com/some_resource") @raises(ValueError) def test_embedded_script_without_noscript_fallback(self): """ The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """ html_data = """ <div> <script src='http://bar.com/some_widget.js'> </script> </div> """ soup = make_soup(html_data) media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites) def test_embeded_tweet_widget(self): """ The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """ html_data = """ <div> <script src={0}> {1} </script> </div> """.format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE) soup = make_soup(html_data) tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites) expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded']) eq_(tagged_URL.tags, expected_tags) @raises(ValueError) def test_embedded_javascript_code(self): """ The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """ js_content = """<script type='text/javascript'>var pokey='penguin'; </script>""" soup = make_soup(js_content) media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites) def test_embedded_tweet_widget_splitted(self): """ The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it.""" html_data = """ <div> <script src={0}></script> <script> {1} </script> </div> """.format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE) soup = make_soup(html_data) tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites) expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded']) eq_(tagged_URL.tags, expected_tags) class TestDewPlayer(object): def test_simple_url_extraction(self): """ media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """ dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3" expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3" extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url) eq_(expected_mp3_url, extracted_url) @raises(ValueError) def test_empty_url(self): """ media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """ media_utils.extract_source_url_from_dewplayer("") @raises(ValueError) def test_bad_query_url(self): """ media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """ wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar" media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
sevas/csxj-crawler
tests/datasources/parser_tools/test_media_utils.py
Python
mit
4,676
import random from urllib import urlopen import sys WORD_URL = "http://learncodethehardway.org/words.txt" WORDS = [] PHRASES = { "class %%%(%%%):": "Make a class named %%% that is-a %%%.", "class %%%(object):\n\tdef __init__(self, ***)" : "class %%% has-a __init__ hat takes self and *** parameter.", "class %%%(object):\n\tdef ***(self, @@@)": "class %%% has-a function named *** that takes self and @@@ parameter.", "*** = %%%()": "Set *** to an instance of class %%%.", "***.***(@@@)": "From *** get the *** function, and call it with parameters self, @@@.", "***.*** = '***'": "From *** get the *** attribute and set it to '***'." } # do they want to drill phrases first if len(sys.argv) == 2 and sys.argv[1] == "english": PHRASE_FIRST = True else: PHRASE_FIRST = False # load up the word from the website for word in urlopen(WORD_URL).readlines(): WORDS.append(word.strip()) def convert(snippet, phrase): class_names = [w.capitalize() for w in random.sample(WORDS, snippet.count("%%%"))] other_names = random.sample(WORDS, snippet.count("***")) results = [] param_names = [] for i in range(0, snippet.count("@@@")): param_count = random.randint(1,3) param_names.append(', '.join(random.sample(WORDS, param_count))) for sentence in snippet, phrase: result = sentence[:] # fake class names for word in class_names: result = result.replace("%%%", word, 1) # fake other names for word in other_names: result = result.replace("***", word, 1) # fake parameter lists for word in param_names: result = result.replace("@@@", word, 1) results.append(result) return results # keep going until they hit CTRL-D try: while True: snippets = PHRASES.keys() random.shuffle(snippets) for snippet in snippets: phrase = PHRASES[snippet] question, answer = convert(snippet, phrase) if PHRASE_FIRST: question, answer = answer, question print question raw_input("> ") print "ANSWER: %s\n\n" % answer except EOFError: print "\nBye"
peterhogan/python
oop_test.py
Python
mit
2,018
# vim: set fileencoding=utf-8 : """A setuptools based setup module. See: https://packaging.python.org/en/latest/distributing.html https://github.com/pypa/sampleproject """ # Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='endymion', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html version='1.4.1', description='A small tool to check the link validity of external Vagrant boxes on Atlas', long_description=long_description, # The project's main homepage. url='https://github.com/lpancescu/endymion', # Author details author='Laurențiu Păncescu', author_email='[email protected]', # Choose your license license='MIT', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 4 - Beta', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Topic :: Utilities', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], # What does your project relate to? keywords='vagrant atlas', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). packages=find_packages(exclude=[]), # Alternatively, if you want to distribute just a my_module.py, uncomment # this: # py_modules=["my_module"], # List run-time dependencies here. These will be installed by pip when # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=[], # List additional groups of dependencies here (e.g. development # dependencies). You can install these using the following syntax, # for example: # $ pip install -e .[dev,test] extras_require={}, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={}, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa # In this case, 'data_file' will be installed into '<sys.prefix>/my_data' data_files=[], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. entry_points={ 'console_scripts': [ 'endymion=endymion:main', ], }, )
lpancescu/atlas-lint
setup.py
Python
mit
3,767
from .game import Board for i in range(10): Board.all() print(i)
jorgebg/tictactoe
time.py
Python
mit
74
# coding: utf-8 # ##Test out UGRID-0.9 compliant unstructured grid model datasets with PYUGRID # In[1]: name_list=['sea_surface_elevation', 'sea_surface_height_above_geoid', 'sea_surface_height','water level', 'sea_surface_height_above_sea_level', 'water_surface_height_above_reference_datum', 'sea_surface_height_above_reference_ellipsoid'] models = dict(ADCIRC=('http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/' 'UND_ADCIRC/Hurricane_Ike_2D_final_run_with_waves'), FVCOM=('http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/' 'Forecasts/NECOFS_GOM3_FORECAST.nc'), SELFE=('http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/' 'VIMS_SELFE/Hurricane_Ike_2D_final_run_with_waves'), WW3=('http://comt.sura.org/thredds/dodsC/data/comt_2/pr_inundation_tropical/EMC_ADCIRC-WW3/' 'Dec2013Storm_2D_preliminary_run_1_waves_only')) # In[2]: import iris iris.FUTURE.netcdf_promote = True def cube_func(cube): return (cube.standard_name in name_list) and (not any(m.method == 'maximum' for m in cube.cell_methods)) constraint = iris.Constraint(cube_func=cube_func) cubes = dict() for model, url in models.items(): cube = iris.load_cube(url, constraint) cubes.update({model: cube}) # In[3]: cubes # In[4]: import pyugrid import matplotlib.tri as tri def get_mesh(cube, url): ug = pyugrid.UGrid.from_ncfile(url) cube.mesh = ug cube.mesh_dimension = 1 return cube def get_triang(cube): lon = cube.mesh.nodes[:, 0] lat = cube.mesh.nodes[:, 1] nv = cube.mesh.faces return tri.Triangulation(lon, lat, triangles=nv) # In[5]: tris = dict() for model, cube in cubes.items(): url = models[model] cube = get_mesh(cube, url) cubes.update({model: cube}) tris.update({model: get_triang(cube)}) # In[6]: get_ipython().magic('matplotlib inline') import numpy as np import cartopy.crs as ccrs import matplotlib.pyplot as plt def plot_model(model): cube = cubes[model] lon = cube.mesh.nodes[:, 0] lat = cube.mesh.nodes[:, 1] ind = -1 # just take the last time index for now zcube = cube[ind] triang = tris[model] fig, ax = plt.subplots(figsize=(7, 7), subplot_kw=dict(projection=ccrs.PlateCarree())) ax.set_extent([lon.min(), lon.max(), lat.min(), lat.max()]) ax.coastlines() levs = np.arange(-1, 5, 0.2) cs = ax.tricontourf(triang, zcube.data, levels=levs) fig.colorbar(cs) ax.tricontour(triang, zcube.data, colors='k',levels=levs) tvar = cube.coord('time') tstr = tvar.units.num2date(tvar.points[ind]) gl = ax.gridlines(draw_labels=True) gl.xlabels_top = gl.ylabels_right = False title = ax.set_title('%s: Elevation (m): %s' % (zcube.attributes['title'], tstr)) return fig, ax # In[7]: fig, ax = plot_model('ADCIRC') # In[8]: fig, ax = plot_model('FVCOM') # In[9]: fig, ax = plot_model('WW3') # In[10]: fig, ax = plot_model('SELFE')
ioos/comt
python/pyugrid_test.py
Python
mit
3,158
#!/usr/bin/python """ fanhaorename.py """ import os import os.path import logging import fileorganizer from fileorganizer import _helper from fileorganizer.replacename import _replacename __author__ = "Jack Chang <[email protected]>" def _tagHelper(tag): """ TODO """ result = "" for c in tag: if c.isalpha(): result += "[{0}{1}]".format(c.lower(), c.upper()) else: result += c return result def fanhaorename(work_dir, tag, exclude=None, mode=0, wetrun=False, this_name=os.path.basename(__file__)): """ Batch Rename Fanhao \b Args: work_dir (str): Working Directory tag (str): Fanhao tag find (str, optional): Regex string to find in filename/foldername replace (str, optional): Regex string to replace in filename/foldername exclude (str, optional): Regex string to exclude in mattches mode (int, optional): 0=FILE ONLY, 1=FOLDER ONLY, 2=BOTH wetrun (bool, optional): Test Run or not """ _find_dir = r"(.*)({0})(-|_| )*(\d\d\d)(.*)".format(_tagHelper(tag)) _replace_dir = r"{0}-\4".format(tag) _find_file = _find_dir + r"(\.(.*))" _replace_file = _replace_dir + r"\6" _helper.init_loger() this_run = "WET" if wetrun else "DRY" loger = logging.getLogger(this_name) loger.info("[START] === %s [%s RUN] ===", this_name, this_run) loger.info("[DO] Rename \"%s\" fanhao in \"%s\"; Mode %s", tag, work_dir, mode) if mode in (0, 2): # mode 0 and 2 for item in _replacename(_find_file, _replace_file, work_dir, 0, exclude): item.commit() if wetrun else loger.info("%s", item) if mode in (1, 2): # mode 1 and 2 for item in _replacename(_find_dir, _replace_dir, work_dir, 1, exclude): item.commit() if wetrun else loger.info("%s", item) loger.info("[END] === %s [%s RUN] ===", this_name, this_run) if __name__ == "__main__": fileorganizer.cli.cli_fanhaorename()
wei0831/fileorganizer
fileorganizer/fanhaorename.py
Python
mit
2,156
import unittest from app.commands.file_command import FileCommand class TestFileCommand(unittest.TestCase): def setUp(self): self.window = WindowSpy() self.settings = PluginSettingsStub() self.sublime = SublimeSpy() self.os_path = OsPathSpy() # SUT self.command = FileCommand(self.settings, self.os_path, self.sublime) def test_open_source_file(self): self.settings.tests_folder = 'tests/unit' self.command.open_source_file('C:/path/to/root/tests/unit/path/to/fileTest.php', self.window) self.assertEqual('C:/path/to/root/path/to/file.php', self.window.file_to_open) def test_open_source_file_works_with_backslashes(self): self.settings.tests_folder = 'tests/unit' self.command.open_source_file('C:\\path\\to\\root\\tests\\unit\\path\\to\\fileTest.php', self.window) self.assertEqual('C:/path/to/root/path/to/file.php', self.window.file_to_open) def test_open_source_file_works_for_network_paths(self): self.settings.tests_folder = 'tests' self.command.open_source_file('\\\\server\\dev\\root\\tests\\unit\\Service\\SearchParametersMapperTest.php', self.window) self.assertEqual('\\\\server\\dev\\root\\Service\\SearchParametersMapper.php', self.window.file_to_open) def test_open_source_file_works_for_network_paths_and_complex_tests_folder(self): self.settings.tests_folder = 'tests/unit' self.command.open_source_file('\\\\server\\dev\\root\\tests\\unit\\Service\\SearchParametersMapperTest.php', self.window) self.assertEqual('\\\\server\\dev\\root\\Service\\SearchParametersMapper.php', self.window.file_to_open) def test_open_source_file_when_tests_folder_is_not_unit_test_folder(self): self.settings.root = 'C:/path/to/root' self.settings.tests_folder = 'tests_folder' self.command.open_source_file('C:/path/to/root/tests_folder/unit/path/to/fileTest.php', self.window) self.assertEqual('C:/path/to/root/path/to/file.php', self.window.file_to_open) def test_open_source_file_remove_only_first_appearance_of_tests_folder_in_path(self): self.settings.root = 'C:/path/to/root' self.settings.tests_folder = 'tests' self.command.open_source_file('C:/path/to/root/tests/unit/path/to/tests/fileTest.php', self.window) self.assertEqual('C:/path/to/root/path/to/tests/file.php', self.window.file_to_open) def test_open_source_file_when_tests_folder_is_not_unit_test_folder_remove_only_unit_folder_after_test_path(self): self.settings.root = 'C:/path/to/root' self.settings.tests_folder = 'tests_folder' self.command.open_source_file('C:/path/to/root/tests_folder/unit/path/to/unit/fileTest.php', self.window) self.assertEqual('C:/path/to/root/path/to/unit/file.php', self.window.file_to_open) def test_if_source_file_exists_return_true(self): self.settings.tests_folder = 'tests/unit' self.os_path.is_file_returns = True actual = self.command.source_file_exists('C:\\path\\to\\root\\tests\\unit\\path\\to\\fileTest.php') self.assertTrue(actual) self.assertEqual('C:/path/to/root/path/to/file.php', self.os_path.isfile_received_filepath) def test_source_file_does_not_exist_if_file_already_is_a_source_file(self): self.settings.tests_folder = 'tests/unit' self.os_path.is_file_returns = True actual = self.command.source_file_exists('root\path\src\Gallery\ImageType.php') self.assertFalse(actual) def test_if_source_file_does_not_exist_return_false(self): self.settings.tests_folder = 'tests/unit' self.os_path.is_file_returns = False self.assertFalse(self.command.source_file_exists('C:/path/to/root/path/to/fileTest.php')) self.assertEqual('C:/path/to/root/path/to/file.php', self.os_path.isfile_received_filepath) def test_if_source_file_is_none_return_false(self): """ This case is possible when currently opened tab in sublime is untitled (i.e. not yet created) file """ self.assertFalse(self.command.source_file_exists(None)) def test_if_test_file_is_none_return_false(self): """ This case is possible when currently opened tab in sublime is untitled (i.e. not yet created) file """ self.settings.root = 'C:/path/to/root' self.settings.tests_folder = 'tests/unit' self.assertFalse(self.command.test_file_exists(None, self.window)) def test_open_file(self): self.settings.root = 'C:/path/to/root' self.settings.tests_folder = 'tests/unit' self.command.open_test_file('C:/path/to/root/path/to/file.php', self.window) self.assertEqual('C:/path/to/root/tests/unit/path/to/fileTest.php', self.window.file_to_open) def test_correct_file_name_sent_to_os_is_file_method(self): self.window.project_root = 'C:/path/to/root' self.settings.root = '' self.settings.tests_folder = 'tests/unit' self.command.test_file_exists('C:/path/to/root/path/to/file.php', self.window) self.assertEqual('C:/path/to/root/tests/unit/path/to/fileTest.php', self.os_path.isfile_received_filepath) def test_file_exists_ignores_trailing_slash_in_root_path(self): self.window.project_root = 'C:/path/to/root/' self.settings.root = '' self.settings.tests_folder = 'tests/unit' self.command.test_file_exists('C:/path/to/root/path/to/file.php', self.window) self.assertEqual('C:/path/to/root/tests/unit/path/to/fileTest.php', self.os_path.isfile_received_filepath) def test_if_test_file_exists_return_true(self): self.settings.root = 'C:/path/to/root/' self.settings.tests_folder = 'tests/unit' self.os_path.is_file_returns = True self.assertTrue(self.command.test_file_exists('C:/path/to/root/path/to/file.php', self.window)) def test_test_file_exists_returns_true_if_test_file_is_input(self): self.settings.root = 'C:/path/to/root/' self.settings.tests_folder = 'tests/unit' self.os_path.is_file_returns = True self.assertTrue(self.command.test_file_exists('C:/path/to/root/tests/unit/path/to/fileTest.php', self.window)) self.assertEqual('C:/path/to/root/tests/unit/path/to/fileTest.php', self.os_path.isfile_received_filepath, 'Expected test file filepath as parameter to isfile') def test_if_test_file_does_not_exist_return_false(self): self.settings.root = 'C:/path/to/root/' self.settings.tests_folder = 'tests/unit' self.os_path.is_file_returns = False self.assertFalse(self.command.test_file_exists('C:/path/to/root/path/to/file.php', self.window)) def test_replace_back_slashes_with_forward_slashes(self): self.window.project_root = 'C:\\path\\to\\root' self.settings.root = '' self.settings.tests_folder = 'tests\\unit' self.command.test_file_exists('C:\\path\\to\\root\\path\\to\\file.php', self.window) self.assertEqual('C:/path/to/root/tests/unit/path/to/fileTest.php', self.os_path.isfile_received_filepath) class PluginSettingsStub: pass class WindowSpy: def __init__(self): self.file_to_open = None self.project_root = None def folders(self): return [self.project_root] def open_file(self, file_to_open): self.file_to_open = file_to_open class OsPathSpy: def __init__(self): self.is_file_returns = None self.isfile_received_filepath = None def isfile(self, filepath): self.isfile_received_filepath = filepath return self.is_file_returns class SublimeSpy: pass
ldgit/remote-phpunit
tests/app/commands/test_file_command.py
Python
mit
7,821
# -*- coding: utf-8 -*- from flask import Blueprint from jotonce.api import route from jotonce.passphrases.models import Passphrase bp = Blueprint('passphrase', __name__, url_prefix='/passphrase') @route(bp, '/') def get(): p = Passphrase.get_random() return {"results": p}
nficano/jotonce.com
jotonce/api/passphrases.py
Python
mit
285
import numpy as np from pycqed.analysis import measurement_analysis as ma from pycqed.analysis_v2 import measurement_analysis as ma2 from pycqed.measurement import sweep_functions as swf from pycqed.measurement import detector_functions as det MC_instr = None VNA_instr = None def acquire_single_linear_frequency_span(file_name, start_freq=None, stop_freq=None, center_freq=None, span=None, nr_avg=1, sweep_mode='auto', nbr_points=101, power=-20, bandwidth=100, measure='S21', options_dict=None): """ Acquires a single trace from the VNA. Inputs: file_name (str), name of the output file. start_freq (float), starting frequency of the trace. stop_freq (float), stoping frequency of the trace. center_freq (float), central frequency of the trace. span (float), span of the trace. nbr_points (int), Number of points within the trace. power (float), power in dBm. bandwidth (float), bandwidth in Hz. measure (str), scattering parameter to measure (ie. 'S21'). Output: NONE Beware that start/stop and center/span are just two ways of configuring the same thing. """ # set VNA sweep function if start_freq != None and stop_freq != None: MC_instr.set_sweep_function(swf.ZNB_VNA_sweep(VNA_instr, start_freq=start_freq, stop_freq=stop_freq, npts=nbr_points, force_reset=True)) elif center_freq != None and span != None: MC_instr.set_sweep_function(swf.ZNB_VNA_sweep(VNA_instr, center_freq=center_freq, span=span, npts=nbr_points, force_reset=True)) # set VNA detector function MC_instr.set_detector_function(det.ZNB_VNA_detector(VNA_instr)) # VNA settings # VNA_instr.average_state('off') VNA_instr.bandwidth(bandwidth) # hack to measure S parameters different from S21 str_to_write = "calc:par:meas 'trc1', '%s'" % measure print(str_to_write) VNA_instr.visa_handle.write(str_to_write) VNA_instr.avg(nr_avg) VNA_instr.number_sweeps_all(nr_avg) VNA_instr.average_mode(sweep_mode) VNA_instr.power(power) VNA_instr.timeout(10**4) t_start = ma.a_tools.current_timestamp() MC_instr.run(name=file_name) t_stop = ma.a_tools.current_timestamp() t_meas = ma.a_tools.get_timestamps_in_range(t_start, t_stop, label=file_name) assert len(t_meas) == 1, "Multiple timestamps found for this measurement" t_meas = t_meas[0] # ma.Homodyne_Analysis(auto=True, label=file_name, fitting_model='hanger') # ma.VNA_analysis(auto=True, label=file_name) ma2.VNA_analysis(auto=True, t_start=None, options_dict=options_dict) def acquire_current_trace(file_name): """ Acquires the trace currently displayed on VNA. Inputs: file_name (str), name of the output file. Output: NONE """ # get values from VNA start_freq = VNA_instr.start_frequency() stop_freq = VNA_instr.stop_frequency() nbr_points = VNA_instr.npts() power = VNA_instr.power() bandwidth = VNA_instr.bandwidth() current_sweep_time = VNA_instr.sweep_time() print(current_sweep_time) acquire_single_linear_frequency_span(file_name, start_freq=start_freq, stop_freq=stop_freq, nbr_points=nbr_points, power=power, bandwidth=bandwidth) def acquire_linear_frequency_span_vs_power(file_name, start_freq=None, stop_freq=None, center_freq=None, start_power=None, stop_power=None, step_power=2, span=None, nbr_points=101, bandwidth=100, measure='S21'): """ Acquires a single trace from the VNA. Inputs: file_name (str), name of the output file. start_freq (float), starting frequency of the trace. stop_freq (float), stoping frequency of the trace. center_freq (float), central frequency of the trace. span (float), span of the trace. nbr_points (int), Number of points within the trace. start_power, stop_power, step_power (float), power range in dBm. bandwidth (float), bandwidth in Hz. measure (str), scattering parameter to measure (ie. 'S21'). Output: NONE Beware that start/stop and center/span are just two ways of configuring the same thing. """ # set VNA sweep function if start_freq != None and stop_freq != None: swf_fct_1D = swf.ZNB_VNA_sweep(VNA_instr, start_freq=start_freq, stop_freq=stop_freq, npts=nbr_points, force_reset=True) MC_instr.set_sweep_function(swf_fct_1D) elif center_freq != None and span != None: swf_fct_1D = swf.ZNB_VNA_sweep(VNA_instr, center_freq=center_freq, span=span, npts=nbr_points, force_reset=True) MC_instr.set_sweep_function(swf_fct_1D) if start_power != None and stop_power != None: # it prepares the sweep_points, such that it does not complain. swf_fct_1D.prepare() MC_instr.set_sweep_points(swf_fct_1D.sweep_points) MC_instr.set_sweep_function_2D(VNA_instr.power) MC_instr.set_sweep_points_2D(np.arange(start_power, stop_power+step_power/2., step_power)) else: raise ValueError('Need to define power range.') # set VNA detector function MC_instr.set_detector_function(det.ZNB_VNA_detector(VNA_instr)) # VNA settings VNA_instr.average_state('off') VNA_instr.bandwidth(bandwidth) # hack to measure S parameters different from S21 str_to_write = "calc:par:meas 'trc1', '%s'" % measure print(str_to_write) VNA_instr.visa_handle.write(str_to_write) VNA_instr.timeout(600) MC_instr.run(name=file_name, mode='2D') # ma.Homodyne_Analysis(auto=True, label=file_name, fitting_model='hanger') ma.TwoD_Analysis(auto=True, label=file_name) def acquire_2D_linear_frequency_span_vs_param(file_name, start_freq=None, stop_freq=None, center_freq=None, parameter=None, sweep_vector=None, span=None, nbr_points=101, power=-20, bandwidth=100, measure='S21'): """ Acquires a single trace from the VNA. Inputs: file_name (str), name of the output file. start_freq (float), starting frequency of the trace. stop_freq (float), stoping frequency of the trace. center_freq (float), central frequency of the trace. span (float), span of the trace. nbr_points (int), Number of points within the trace. power (float), power in dBm. bandwidth (float), bandwidth in Hz. measure (str), scattering parameter to measure (ie. 'S21'). Output: NONE Beware that start/stop and center/span are just two ways of configuring the same thing. """ # set VNA sweep function if start_freq != None and stop_freq != None: swf_fct_1D = swf.ZNB_VNA_sweep(VNA_instr, start_freq=start_freq, stop_freq=stop_freq, npts=nbr_points, force_reset=True) MC_instr.set_sweep_function(swf_fct_1D) elif center_freq != None and span != None: swf_fct_1D = swf.ZNB_VNA_sweep(VNA_instr, center_freq=center_freq, span=span, npts=nbr_points, force_reset=True) MC_instr.set_sweep_function(swf_fct_1D) if parameter is not None and sweep_vector is not None: # it prepares the sweep_points, such that it does not complain. swf_fct_1D.prepare() MC_instr.set_sweep_points(swf_fct_1D.sweep_points) MC_instr.set_sweep_function_2D(parameter) MC_instr.set_sweep_points_2D(sweep_vector) else: raise ValueError('Need to define parameter and its range.') # set VNA detector function MC_instr.set_detector_function(det.ZNB_VNA_detector(VNA_instr)) # VNA settings VNA_instr.power(power) VNA_instr.average_state('off') VNA_instr.bandwidth(bandwidth) # hack to measure S parameters different from S21 str_to_write = "calc:par:meas 'trc1', '%s'" % measure print(str_to_write) VNA_instr.visa_handle.write(str_to_write) VNA_instr.timeout(600) MC_instr.run(name=file_name, mode='2D') # ma.Homodyne_Analysis(auto=True, label=file_name, fitting_model='hanger') ma.TwoD_Analysis(auto=True, label=file_name) def acquire_disjoint_frequency_traces(file_name, list_freq_ranges, power=-20, bandwidth=100, measure='S21'): """ list_freq_ranges is a list that contains the arays defining all the ranges of interest. """ for idx, range_array in enumerate(list_freq_ranges): this_file = file_name + '_%d'%idx acquire_single_linear_frequency_span(file_name = this_file, start_freq=range_array[0], stop_freq=range_array[-1], nbr_points=len(range_array), power=power, bandwidth=bandwidth, measure=measure) packing_mmts(file_name, labels=file_name+'__', N=len(list_freq_ranges)) def packing_mmts(file_name, labels, N): # imports data # stacks it toghether for idx in range(N): this_file = file_name + '_%d'%idx ma_obj = ma.MeasurementAnalysis(label=this_file, auto=False) ma_obj.get_naming_and_values() if idx==0: data_points = ma_obj.sweep_points data_vector = ma_obj.measured_values else: data_points = np.hstack((data_points,ma_obj.sweep_points)) data_vector = np.hstack((data_vector,ma_obj.measured_values)) del ma_obj data_vector = np.array(data_vector) data_points = np.array(data_points) sort_mask = np.argsort(data_points) data_points = data_points[sort_mask] data_vector = data_vector[:,sort_mask] # keeps track of sweeppoints call class call_counter: def __init__(self): self.counter=0 swp_points_counter = call_counter() def wrapped_data_importing(counter): counter.counter += 1 return data_vector[:,counter.counter-1] detector_inst = det.Function_Detector_list( sweep_function=wrapped_data_importing, result_keys=['Amp', 'phase', 'I', 'Q' ,'Amp_dB'], msmt_kw={'counter': swp_points_counter}) MC_instr.set_sweep_function(swf.None_Sweep(name='VNA sweep')) MC_instr.set_sweep_points(data_points.flatten()) MC_instr.set_detector_function(detector_inst) MC_instr.run(name=file_name)
DiCarloLab-Delft/PycQED_py3
pycqed/measurement/VNA_module.py
Python
mit
12,353
from asyncio import coroutine import pytest from aiohttp import HttpBadRequest, HttpMethodNotAllowed from fluentmock import create_mock from aiohttp_rest import RestEndpoint class CustomEndpoint(RestEndpoint): def get(self): pass def patch(self): pass @pytest.fixture def endpoint(): return RestEndpoint() @pytest.fixture def custom_endpoint(): return CustomEndpoint() def test_exiting_methods_are_registered_during_initialisation(custom_endpoint: CustomEndpoint): assert len(custom_endpoint.methods) == 2 assert ('GET', custom_endpoint.get) in custom_endpoint.methods.items() assert ('PATCH', custom_endpoint.patch) in custom_endpoint.methods.items() def test_register_method(endpoint: RestEndpoint): def sample_method(): pass endpoint.register_method('verb', sample_method) assert ('VERB', sample_method) in endpoint.methods.items() @pytest.mark.asyncio async def test_dispatch_uses_correct_handler_for_verb(endpoint: RestEndpoint): endpoint.register_method('VERB1', coroutine(lambda: 5)) endpoint.register_method('VERB2', coroutine(lambda: 17)) assert await endpoint.dispatch(create_mock(method='VERB1', match_info={})) == 5 assert await endpoint.dispatch(create_mock(method='VERB2', match_info={})) == 17 @pytest.mark.asyncio async def test_dispatch_passes_request_when_required(endpoint: RestEndpoint): endpoint.register_method('REQUEST', coroutine(lambda request: request)) request = create_mock(method='REQUEST', match_info={}) assert await endpoint.dispatch(request) == request @pytest.mark.asyncio async def test_dispatch_passes_match_info_when_required(endpoint: RestEndpoint): endpoint.register_method('MATCH_INFO', coroutine(lambda prop1, prop2: (prop2, prop1))) request = create_mock(method='MATCH_INFO', match_info={'prop1': 1, 'prop2': 2}) assert await endpoint.dispatch(request) == (2, 1) @pytest.mark.asyncio async def test_dispatch_raises_bad_request_when_match_info_does_not_exist(endpoint: RestEndpoint): endpoint.register_method('BAD_MATCH_INFO', coroutine(lambda no_match: no_match)) request = create_mock(method='BAD_MATCH_INFO', match_info={}) with pytest.raises(HttpBadRequest): await endpoint.dispatch(request) @pytest.mark.asyncio async def test_dispatch_raises_method_not_allowed_when_verb_not_matched(endpoint: RestEndpoint): request = create_mock(method='NO_METHOD') with pytest.raises(HttpMethodNotAllowed): await endpoint.dispatch(request)
atbentley/aiohttp-rest
tests/test_endpoint.py
Python
mit
2,542